diff --git a/crates/ruff/tests/integration_test.rs b/crates/ruff/tests/integration_test.rs index 688f4fb11c62f..ae5d850da32d2 100644 --- a/crates/ruff/tests/integration_test.rs +++ b/crates/ruff/tests/integration_test.rs @@ -1054,6 +1054,52 @@ def mvce(keys, values): "#); } +#[test] +fn show_statistics_syntax_errors() { + let mut cmd = RuffCheck::default() + .args(["--statistics", "--target-version=py39", "--preview"]) + .build(); + + // ParseError + assert_cmd_snapshot!( + cmd.pass_stdin("x ="), + @r" + success: false + exit_code: 1 + ----- stdout ----- + 1 syntax-error + Found 1 error. + + ----- stderr ----- + "); + + // match before 3.10, UnsupportedSyntaxError + assert_cmd_snapshot!( + cmd.pass_stdin("match 2:\n case 1: ..."), + @r" + success: false + exit_code: 1 + ----- stdout ----- + 1 syntax-error + Found 1 error. + + ----- stderr ----- + "); + + // rebound comprehension variable, SemanticSyntaxError + assert_cmd_snapshot!( + cmd.pass_stdin("[x := 1 for x in range(0)]"), + @r" + success: false + exit_code: 1 + ----- stdout ----- + 1 syntax-error + Found 1 error. + + ----- stderr ----- + "); +} + #[test] fn preview_enabled_prefix() { // All the RUF9XX test rules should be triggered diff --git a/crates/ruff/tests/lint.rs b/crates/ruff/tests/lint.rs index 1fdead18d5834..b95b99d93487d 100644 --- a/crates/ruff/tests/lint.rs +++ b/crates/ruff/tests/lint.rs @@ -5491,3 +5491,68 @@ fn cookiecutter_globbing_no_project_root() -> Result<()> { Ok(()) } + +/// Test that semantic syntax errors (1) are emitted, (2) are not cached, (3) don't affect the +/// reporting of normal diagnostics, and (4) are not suppressed by `select = []` (or otherwise +/// disabling all AST-based rules). +#[test] +fn semantic_syntax_errors() -> Result<()> { + let tempdir = TempDir::new()?; + let contents = "[(x := 1) for x in foo]"; + fs::write(tempdir.path().join("main.py"), contents)?; + + let mut cmd = Command::new(get_cargo_bin(BIN_NAME)); + // inline STDIN_BASE_OPTIONS to remove --no-cache + cmd.args(["check", "--output-format", "concise"]) + .arg("--preview") + .arg("--quiet") // suppress `debug build without --no-cache` warnings + .current_dir(&tempdir); + + assert_cmd_snapshot!( + cmd, + @r" + success: false + exit_code: 1 + ----- stdout ----- + main.py:1:3: SyntaxError: assignment expression cannot rebind comprehension variable + main.py:1:20: F821 Undefined name `foo` + + ----- stderr ----- + " + ); + + // this should *not* be cached, like normal parse errors + assert_cmd_snapshot!( + cmd, + @r" + success: false + exit_code: 1 + ----- stdout ----- + main.py:1:3: SyntaxError: assignment expression cannot rebind comprehension variable + main.py:1:20: F821 Undefined name `foo` + + ----- stderr ----- + " + ); + + // ensure semantic errors are caught even without AST-based rules selected + assert_cmd_snapshot!( + Command::new(get_cargo_bin(BIN_NAME)) + .args(STDIN_BASE_OPTIONS) + .args(["--config", "lint.select = []"]) + .arg("--preview") + .arg("-") + .pass_stdin(contents), + @r" + success: false + exit_code: 1 + ----- stdout ----- + -:1:3: SyntaxError: assignment expression cannot rebind comprehension variable + Found 1 error. + + ----- stderr ----- + " + ); + + Ok(()) +} diff --git a/crates/ruff_linter/src/checkers/ast/analyze/statement.rs b/crates/ruff_linter/src/checkers/ast/analyze/statement.rs index 7b4aaa0a7e88e..3d770544d047b 100644 --- a/crates/ruff_linter/src/checkers/ast/analyze/statement.rs +++ b/crates/ruff_linter/src/checkers/ast/analyze/statement.rs @@ -853,14 +853,6 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { if checker.enabled(Rule::FutureFeatureNotDefined) { pyflakes::rules::future_feature_not_defined(checker, alias); } - if checker.enabled(Rule::LateFutureImport) { - if checker.semantic.seen_futures_boundary() { - checker.report_diagnostic(Diagnostic::new( - pyflakes::rules::LateFutureImport, - stmt.range(), - )); - } - } } else if &alias.name == "*" { if checker.enabled(Rule::UndefinedLocalWithNestedImportStarUsage) { if !matches!(checker.semantic.current_scope().kind, ScopeKind::Module) { diff --git a/crates/ruff_linter/src/checkers/ast/mod.rs b/crates/ruff_linter/src/checkers/ast/mod.rs index fc72c1d1a3871..9f4d63a484666 100644 --- a/crates/ruff_linter/src/checkers/ast/mod.rs +++ b/crates/ruff_linter/src/checkers/ast/mod.rs @@ -26,6 +26,9 @@ use std::path::Path; use itertools::Itertools; use log::debug; +use ruff_python_parser::semantic_errors::{ + SemanticSyntaxChecker, SemanticSyntaxContext, SemanticSyntaxError, SemanticSyntaxErrorKind, +}; use rustc_hash::{FxHashMap, FxHashSet}; use ruff_diagnostics::{Diagnostic, IsolationLevel}; @@ -63,6 +66,7 @@ use crate::importer::Importer; use crate::noqa::NoqaMapping; use crate::package::PackageRoot; use crate::registry::Rule; +use crate::rules::pyflakes::rules::LateFutureImport; use crate::rules::{flake8_pyi, flake8_type_checking, pyflakes, pyupgrade}; use crate::settings::{flags, LinterSettings}; use crate::{docstrings, noqa, Locator}; @@ -223,8 +227,13 @@ pub(crate) struct Checker<'a> { last_stmt_end: TextSize, /// A state describing if a docstring is expected or not. docstring_state: DocstringState, - /// The target [`PythonVersion`] for version-dependent checks + /// The target [`PythonVersion`] for version-dependent checks. target_version: PythonVersion, + /// Helper visitor for detecting semantic syntax errors. + #[allow(clippy::struct_field_names)] + semantic_checker: SemanticSyntaxChecker, + /// Errors collected by the `semantic_checker`. + semantic_errors: RefCell>, } impl<'a> Checker<'a> { @@ -272,6 +281,8 @@ impl<'a> Checker<'a> { last_stmt_end: TextSize::default(), docstring_state: DocstringState::default(), target_version, + semantic_checker: SemanticSyntaxChecker::new(), + semantic_errors: RefCell::default(), } } } @@ -512,10 +523,44 @@ impl<'a> Checker<'a> { pub(crate) const fn target_version(&self) -> PythonVersion { self.target_version } + + fn with_semantic_checker(&mut self, f: impl FnOnce(&mut SemanticSyntaxChecker, &Checker)) { + let mut checker = std::mem::take(&mut self.semantic_checker); + f(&mut checker, self); + self.semantic_checker = checker; + } +} + +impl SemanticSyntaxContext for Checker<'_> { + fn seen_docstring_boundary(&self) -> bool { + self.semantic.seen_module_docstring_boundary() + } + + fn python_version(&self) -> PythonVersion { + self.target_version + } + + fn report_semantic_error(&self, error: SemanticSyntaxError) { + match error.kind { + SemanticSyntaxErrorKind::LateFutureImport => { + if self.settings.rules.enabled(Rule::LateFutureImport) { + self.report_diagnostic(Diagnostic::new(LateFutureImport, error.range)); + } + } + SemanticSyntaxErrorKind::ReboundComprehensionVariable + if self.settings.preview.is_enabled() => + { + self.semantic_errors.borrow_mut().push(error); + } + SemanticSyntaxErrorKind::ReboundComprehensionVariable => {} + } + } } impl<'a> Visitor<'a> for Checker<'a> { fn visit_stmt(&mut self, stmt: &'a Stmt) { + self.with_semantic_checker(|semantic, context| semantic.visit_stmt(stmt, context)); + // Step 0: Pre-processing self.semantic.push_node(stmt); @@ -550,17 +595,13 @@ impl<'a> Visitor<'a> for Checker<'a> { { self.semantic.flags |= SemanticModelFlags::FUTURE_ANNOTATIONS; } - } else { - self.semantic.flags |= SemanticModelFlags::FUTURES_BOUNDARY; } } Stmt::Import(_) => { self.semantic.flags |= SemanticModelFlags::MODULE_DOCSTRING_BOUNDARY; - self.semantic.flags |= SemanticModelFlags::FUTURES_BOUNDARY; } _ => { self.semantic.flags |= SemanticModelFlags::MODULE_DOCSTRING_BOUNDARY; - self.semantic.flags |= SemanticModelFlags::FUTURES_BOUNDARY; if !(self.semantic.seen_import_boundary() || stmt.is_ipy_escape_command_stmt() || helpers::is_assignment_to_a_dunder(stmt) @@ -1131,6 +1172,8 @@ impl<'a> Visitor<'a> for Checker<'a> { } fn visit_expr(&mut self, expr: &'a Expr) { + self.with_semantic_checker(|semantic, context| semantic.visit_expr(expr, context)); + // Step 0: Pre-processing if self.source_type.is_stub() && self.semantic.in_class_base() @@ -2674,7 +2717,7 @@ pub(crate) fn check_ast( cell_offsets: Option<&CellOffsets>, notebook_index: Option<&NotebookIndex>, target_version: PythonVersion, -) -> Vec { +) -> (Vec, Vec) { let module_path = package .map(PackageRoot::path) .and_then(|package| to_module_path(package, path)); @@ -2739,5 +2782,11 @@ pub(crate) fn check_ast( checker.analyze.scopes.push(ScopeId::global()); analyze::deferred_scopes(&checker); - checker.diagnostics.take() + let Checker { + diagnostics, + semantic_errors, + .. + } = checker; + + (diagnostics.into_inner(), semantic_errors.into_inner()) } diff --git a/crates/ruff_linter/src/linter.rs b/crates/ruff_linter/src/linter.rs index ca365fc57889e..24add1e6d8768 100644 --- a/crates/ruff_linter/src/linter.rs +++ b/crates/ruff_linter/src/linter.rs @@ -6,6 +6,7 @@ use std::path::Path; use anyhow::{anyhow, Result}; use colored::Colorize; use itertools::Itertools; +use ruff_python_parser::semantic_errors::SemanticSyntaxError; use rustc_hash::FxHashMap; use ruff_diagnostics::Diagnostic; @@ -43,9 +44,9 @@ pub struct LinterResult { /// Flag indicating that the parsed source code does not contain any /// [`ParseError`]s has_valid_syntax: bool, - /// Flag indicating that the parsed source code does not contain any - /// [`UnsupportedSyntaxError`]s - has_no_unsupported_syntax_errors: bool, + /// Flag indicating that the parsed source code does not contain any [`ParseError`]s, + /// [`UnsupportedSyntaxError`]s, or [`SemanticSyntaxError`]s. + has_no_syntax_errors: bool, } impl LinterResult { @@ -62,7 +63,7 @@ impl LinterResult { /// /// See [`LinterResult::has_valid_syntax`] for a version specific to [`ParseError`]s. pub fn has_no_syntax_errors(&self) -> bool { - self.has_valid_syntax() && self.has_no_unsupported_syntax_errors + self.has_valid_syntax() && self.has_no_syntax_errors } /// Returns `true` if the parsed source code is valid i.e., it has no [`ParseError`]s. @@ -114,6 +115,9 @@ pub fn check_path( // Aggregate all diagnostics. let mut diagnostics = vec![]; + // Aggregate all semantic syntax errors. + let mut semantic_syntax_errors = vec![]; + let tokens = parsed.tokens(); let comment_ranges = indexer.comment_ranges(); @@ -172,35 +176,33 @@ pub fn check_path( // Run the AST-based rules only if there are no syntax errors. if parsed.has_valid_syntax() { - let use_ast = settings - .rules - .iter_enabled() - .any(|rule_code| rule_code.lint_source().is_ast()); + let cell_offsets = source_kind.as_ipy_notebook().map(Notebook::cell_offsets); + let notebook_index = source_kind.as_ipy_notebook().map(Notebook::index); + + let (new_diagnostics, new_semantic_syntax_errors) = check_ast( + parsed, + locator, + stylist, + indexer, + &directives.noqa_line_for, + settings, + noqa, + path, + package, + source_type, + cell_offsets, + notebook_index, + target_version, + ); + diagnostics.extend(new_diagnostics); + semantic_syntax_errors.extend(new_semantic_syntax_errors); + let use_imports = !directives.isort.skip_file && settings .rules .iter_enabled() .any(|rule_code| rule_code.lint_source().is_imports()); - if use_ast || use_imports || use_doc_lines { - let cell_offsets = source_kind.as_ipy_notebook().map(Notebook::cell_offsets); - let notebook_index = source_kind.as_ipy_notebook().map(Notebook::index); - if use_ast { - diagnostics.extend(check_ast( - parsed, - locator, - stylist, - indexer, - &directives.noqa_line_for, - settings, - noqa, - path, - package, - source_type, - cell_offsets, - notebook_index, - target_version, - )); - } + if use_imports || use_doc_lines { if use_imports { let import_diagnostics = check_imports( parsed, @@ -368,6 +370,7 @@ pub fn check_path( diagnostics, parsed.errors(), syntax_errors, + &semantic_syntax_errors, path, locator, directives, @@ -482,9 +485,9 @@ pub fn lint_only( ); LinterResult { - messages, has_valid_syntax: parsed.has_valid_syntax(), - has_no_unsupported_syntax_errors: parsed.unsupported_syntax_errors().is_empty(), + has_no_syntax_errors: !messages.iter().any(Message::is_syntax_error), + messages, } } @@ -493,6 +496,7 @@ fn diagnostics_to_messages( diagnostics: Vec, parse_errors: &[ParseError], unsupported_syntax_errors: &[UnsupportedSyntaxError], + semantic_syntax_errors: &[SemanticSyntaxError], path: &Path, locator: &Locator, directives: &Directives, @@ -514,6 +518,11 @@ fn diagnostics_to_messages( .chain(unsupported_syntax_errors.iter().map(|syntax_error| { Message::from_unsupported_syntax_error(syntax_error, file.deref().clone()) })) + .chain( + semantic_syntax_errors + .iter() + .map(|error| Message::from_semantic_syntax_error(error, file.deref().clone())), + ) .chain(diagnostics.into_iter().map(|diagnostic| { let noqa_offset = directives.noqa_line_for.resolve(diagnostic.start()); Message::from_diagnostic(diagnostic, file.deref().clone(), noqa_offset) @@ -545,7 +554,7 @@ pub fn lint_fix<'a>( let mut has_valid_syntax = false; // Track whether the _initial_ source code has no unsupported syntax errors. - let mut has_no_unsupported_syntax_errors = false; + let mut has_no_syntax_errors = false; let target_version = settings.resolve_target_version(path); @@ -589,12 +598,12 @@ pub fn lint_fix<'a>( if iterations == 0 { has_valid_syntax = parsed.has_valid_syntax(); - has_no_unsupported_syntax_errors = parsed.unsupported_syntax_errors().is_empty(); + has_no_syntax_errors = !messages.iter().any(Message::is_syntax_error); } else { // If the source code had no syntax errors on the first pass, but // does on a subsequent pass, then we've introduced a // syntax error. Return the original code. - if has_valid_syntax && has_no_unsupported_syntax_errors { + if has_valid_syntax && has_no_syntax_errors { if let Some(error) = parsed.errors().first() { report_fix_syntax_error( path, @@ -636,7 +645,7 @@ pub fn lint_fix<'a>( result: LinterResult { messages, has_valid_syntax, - has_no_unsupported_syntax_errors, + has_no_syntax_errors, }, transformed, fixed, diff --git a/crates/ruff_linter/src/message/mod.rs b/crates/ruff_linter/src/message/mod.rs index 8e1b1ffad9de1..7f16eeac54df2 100644 --- a/crates/ruff_linter/src/message/mod.rs +++ b/crates/ruff_linter/src/message/mod.rs @@ -3,6 +3,7 @@ use std::collections::BTreeMap; use std::io::Write; use std::ops::Deref; +use ruff_python_parser::semantic_errors::SemanticSyntaxError; use rustc_hash::FxHashMap; pub use azure::AzureEmitter; @@ -135,6 +136,18 @@ impl Message { }) } + /// Create a [`Message`] from the given [`SemanticSyntaxError`]. + pub fn from_semantic_syntax_error( + semantic_syntax_error: &SemanticSyntaxError, + file: SourceFile, + ) -> Message { + Message::SyntaxError(SyntaxErrorMessage { + message: format!("SyntaxError: {semantic_syntax_error}"), + range: semantic_syntax_error.range, + file, + }) + } + pub const fn as_diagnostic_message(&self) -> Option<&DiagnosticMessage> { match self { Message::Diagnostic(m) => Some(m), diff --git a/crates/ruff_python_parser/resources/inline/err/rebound_comprehension_variable.py b/crates/ruff_python_parser/resources/inline/err/rebound_comprehension_variable.py new file mode 100644 index 0000000000000..6717c22333db5 --- /dev/null +++ b/crates/ruff_python_parser/resources/inline/err/rebound_comprehension_variable.py @@ -0,0 +1,9 @@ +[(a := 0) for a in range(0)] +{(a := 0) for a in range(0)} +{(a := 0): val for a in range(0)} +{key: (a := 0) for a in range(0)} +((a := 0) for a in range(0)) +[[(a := 0)] for a in range(0)] +[(a := 0) for b in range (0) for a in range(0)] +[(a := 0) for a in range (0) for b in range(0)] +[((a := 0), (b := 1)) for a in range (0) for b in range(0)] diff --git a/crates/ruff_python_parser/resources/inline/ok/non_rebound_comprehension_variable.py b/crates/ruff_python_parser/resources/inline/ok/non_rebound_comprehension_variable.py new file mode 100644 index 0000000000000..a8300b13df286 --- /dev/null +++ b/crates/ruff_python_parser/resources/inline/ok/non_rebound_comprehension_variable.py @@ -0,0 +1 @@ +[a := 0 for x in range(0)] diff --git a/crates/ruff_python_parser/resources/valid/expressions/arguments.py b/crates/ruff_python_parser/resources/valid/expressions/arguments.py index 012db5016c98d..5cd5c0922c2d4 100644 --- a/crates/ruff_python_parser/resources/valid/expressions/arguments.py +++ b/crates/ruff_python_parser/resources/valid/expressions/arguments.py @@ -32,7 +32,7 @@ # Named expression call(x := 1) -call(x := 1 for x in iter) +call(x := 1 for i in iter) # Starred expressions call(*x and y) diff --git a/crates/ruff_python_parser/src/lib.rs b/crates/ruff_python_parser/src/lib.rs index 260375ece2b3a..c2a60f544edb2 100644 --- a/crates/ruff_python_parser/src/lib.rs +++ b/crates/ruff_python_parser/src/lib.rs @@ -85,6 +85,7 @@ use ruff_text_size::{Ranged, TextRange, TextSize}; mod error; pub mod lexer; mod parser; +pub mod semantic_errors; mod string; mod token; mod token_set; diff --git a/crates/ruff_python_parser/src/semantic_errors.rs b/crates/ruff_python_parser/src/semantic_errors.rs new file mode 100644 index 0000000000000..185cf7e9a2877 --- /dev/null +++ b/crates/ruff_python_parser/src/semantic_errors.rs @@ -0,0 +1,273 @@ +//! [`SemanticSyntaxChecker`] for AST-based syntax errors. +//! +//! This checker is not responsible for traversing the AST itself. Instead, its +//! [`SemanticSyntaxChecker::visit_stmt`] and [`SemanticSyntaxChecker::visit_expr`] methods should +//! be called in a parent `Visitor`'s `visit_stmt` and `visit_expr` methods, respectively. + +use std::fmt::Display; + +use ruff_python_ast::{ + self as ast, + visitor::{walk_expr, Visitor}, + Expr, PythonVersion, Stmt, StmtExpr, StmtImportFrom, +}; +use ruff_text_size::TextRange; + +#[derive(Debug)] +pub struct SemanticSyntaxChecker { + /// The checker has traversed past the `__future__` import boundary. + /// + /// For example, the checker could be visiting `x` in: + /// + /// ```python + /// from __future__ import annotations + /// + /// import os + /// + /// x: int = 1 + /// ``` + /// + /// Python considers it a syntax error to import from `__future__` after any other + /// non-`__future__`-importing statements. + seen_futures_boundary: bool, +} + +impl SemanticSyntaxChecker { + pub fn new() -> Self { + Self { + seen_futures_boundary: false, + } + } +} + +impl SemanticSyntaxChecker { + fn add_error( + context: &Ctx, + kind: SemanticSyntaxErrorKind, + range: TextRange, + ) { + context.report_semantic_error(SemanticSyntaxError { + kind, + range, + python_version: context.python_version(), + }); + } + + fn check_stmt(&self, stmt: &ast::Stmt, ctx: &Ctx) { + if let Stmt::ImportFrom(StmtImportFrom { range, module, .. }) = stmt { + if self.seen_futures_boundary && matches!(module.as_deref(), Some("__future__")) { + Self::add_error(ctx, SemanticSyntaxErrorKind::LateFutureImport, *range); + } + } + } + + pub fn visit_stmt(&mut self, stmt: &ast::Stmt, ctx: &Ctx) { + // update internal state + match stmt { + Stmt::Expr(StmtExpr { value, .. }) + if !ctx.seen_docstring_boundary() && value.is_string_literal_expr() => {} + Stmt::ImportFrom(StmtImportFrom { module, .. }) => { + // Allow __future__ imports until we see a non-__future__ import. + if !matches!(module.as_deref(), Some("__future__")) { + self.seen_futures_boundary = true; + } + } + _ => { + self.seen_futures_boundary = true; + } + } + + // check for errors + self.check_stmt(stmt, ctx); + } + + pub fn visit_expr(&mut self, expr: &Expr, ctx: &Ctx) { + match expr { + Expr::ListComp(ast::ExprListComp { + elt, generators, .. + }) + | Expr::SetComp(ast::ExprSetComp { + elt, generators, .. + }) + | Expr::Generator(ast::ExprGenerator { + elt, generators, .. + }) => Self::check_generator_expr(elt, generators, ctx), + Expr::DictComp(ast::ExprDictComp { + key, + value, + generators, + .. + }) => { + Self::check_generator_expr(key, generators, ctx); + Self::check_generator_expr(value, generators, ctx); + } + _ => {} + } + } + + /// Add a [`SyntaxErrorKind::ReboundComprehensionVariable`] if `expr` rebinds an iteration + /// variable in `generators`. + fn check_generator_expr( + expr: &Expr, + comprehensions: &[ast::Comprehension], + ctx: &Ctx, + ) { + let rebound_variables = { + let mut visitor = ReboundComprehensionVisitor { + comprehensions, + rebound_variables: Vec::new(), + }; + visitor.visit_expr(expr); + visitor.rebound_variables + }; + + // TODO(brent) with multiple diagnostic ranges, we could mark both the named expr (current) + // and the name expr being rebound + for range in rebound_variables { + // test_err rebound_comprehension_variable + // [(a := 0) for a in range(0)] + // {(a := 0) for a in range(0)} + // {(a := 0): val for a in range(0)} + // {key: (a := 0) for a in range(0)} + // ((a := 0) for a in range(0)) + // [[(a := 0)] for a in range(0)] + // [(a := 0) for b in range (0) for a in range(0)] + // [(a := 0) for a in range (0) for b in range(0)] + // [((a := 0), (b := 1)) for a in range (0) for b in range(0)] + + // test_ok non_rebound_comprehension_variable + // [a := 0 for x in range(0)] + Self::add_error( + ctx, + SemanticSyntaxErrorKind::ReboundComprehensionVariable, + range, + ); + } + } +} + +impl Default for SemanticSyntaxChecker { + fn default() -> Self { + Self::new() + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct SemanticSyntaxError { + pub kind: SemanticSyntaxErrorKind, + pub range: TextRange, + pub python_version: PythonVersion, +} + +impl Display for SemanticSyntaxError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self.kind { + SemanticSyntaxErrorKind::LateFutureImport => { + f.write_str("__future__ imports must be at the top of the file") + } + SemanticSyntaxErrorKind::ReboundComprehensionVariable => { + f.write_str("assignment expression cannot rebind comprehension variable") + } + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum SemanticSyntaxErrorKind { + /// Represents the use of a `__future__` import after the beginning of a file. + /// + /// ## Examples + /// + /// ```python + /// from pathlib import Path + /// + /// from __future__ import annotations + /// ``` + /// + /// This corresponds to the [`late-future-import`] (`F404`) rule in ruff. + /// + /// [`late-future-import`]: https://docs.astral.sh/ruff/rules/late-future-import/ + LateFutureImport, + + /// Represents the rebinding of the iteration variable of a list, set, or dict comprehension or + /// a generator expression. + /// + /// ## Examples + /// + /// ```python + /// [(a := 0) for a in range(0)] + /// {(a := 0) for a in range(0)} + /// {(a := 0): val for a in range(0)} + /// {key: (a := 0) for a in range(0)} + /// ((a := 0) for a in range(0)) + /// ``` + ReboundComprehensionVariable, +} + +/// Searches for the first named expression (`x := y`) rebinding one of the `iteration_variables` in +/// a comprehension or generator expression. +struct ReboundComprehensionVisitor<'a> { + comprehensions: &'a [ast::Comprehension], + rebound_variables: Vec, +} + +impl Visitor<'_> for ReboundComprehensionVisitor<'_> { + fn visit_expr(&mut self, expr: &Expr) { + if let Expr::Named(ast::ExprNamed { target, .. }) = expr { + if let Expr::Name(ast::ExprName { id, range, .. }) = &**target { + if self.comprehensions.iter().any(|comp| { + comp.target + .as_name_expr() + .is_some_and(|name| name.id == *id) + }) { + self.rebound_variables.push(*range); + } + }; + } + walk_expr(self, expr); + } +} + +pub trait SemanticSyntaxContext { + /// Returns `true` if a module's docstring boundary has been passed. + fn seen_docstring_boundary(&self) -> bool; + + /// The target Python version for detecting backwards-incompatible syntax changes. + fn python_version(&self) -> PythonVersion; + + fn report_semantic_error(&self, error: SemanticSyntaxError); +} + +#[derive(Default)] +pub struct SemanticSyntaxCheckerVisitor { + checker: SemanticSyntaxChecker, + context: Ctx, +} + +impl SemanticSyntaxCheckerVisitor { + pub fn new(context: Ctx) -> Self { + Self { + checker: SemanticSyntaxChecker::new(), + context, + } + } + + pub fn into_context(self) -> Ctx { + self.context + } +} + +impl Visitor<'_> for SemanticSyntaxCheckerVisitor +where + Ctx: SemanticSyntaxContext, +{ + fn visit_stmt(&mut self, stmt: &'_ Stmt) { + self.checker.visit_stmt(stmt, &self.context); + ruff_python_ast::visitor::walk_stmt(self, stmt); + } + + fn visit_expr(&mut self, expr: &'_ Expr) { + self.checker.visit_expr(expr, &self.context); + ruff_python_ast::visitor::walk_expr(self, expr); + } +} diff --git a/crates/ruff_python_parser/tests/fixtures.rs b/crates/ruff_python_parser/tests/fixtures.rs index 5ce0f741db6b3..6163c80b1c7c3 100644 --- a/crates/ruff_python_parser/tests/fixtures.rs +++ b/crates/ruff_python_parser/tests/fixtures.rs @@ -1,3 +1,4 @@ +use std::cell::RefCell; use std::cmp::Ordering; use std::fmt::{Formatter, Write}; use std::fs; @@ -5,7 +6,11 @@ use std::path::Path; use ruff_annotate_snippets::{Level, Renderer, Snippet}; use ruff_python_ast::visitor::source_order::{walk_module, SourceOrderVisitor, TraversalSignal}; +use ruff_python_ast::visitor::Visitor; use ruff_python_ast::{AnyNodeRef, Mod, PythonVersion}; +use ruff_python_parser::semantic_errors::{ + SemanticSyntaxCheckerVisitor, SemanticSyntaxContext, SemanticSyntaxError, +}; use ruff_python_parser::{parse_unchecked, Mode, ParseErrorType, ParseOptions, Token}; use ruff_source_file::{LineIndex, OneIndexed, SourceCode}; use ruff_text_size::{Ranged, TextLen, TextRange, TextSize}; @@ -81,6 +86,38 @@ fn test_valid_syntax(input_path: &Path) { writeln!(&mut output, "## AST").unwrap(); writeln!(&mut output, "\n```\n{:#?}\n```", parsed.syntax()).unwrap(); + let parsed = parsed.try_into_module().expect("Parsed with Mode::Module"); + + let mut visitor = SemanticSyntaxCheckerVisitor::new(TestContext::default()); + + for stmt in parsed.suite() { + visitor.visit_stmt(stmt); + } + + let semantic_syntax_errors = visitor.into_context().diagnostics.into_inner(); + + if !semantic_syntax_errors.is_empty() { + let mut message = "Expected no semantic syntax errors for a valid program:\n".to_string(); + + let line_index = LineIndex::from_source_text(&source); + let source_code = SourceCode::new(&source, &line_index); + + for error in semantic_syntax_errors { + writeln!( + &mut message, + "{}\n", + CodeFrame { + range: error.range, + error: &ParseErrorType::OtherError(error.to_string()), + source_code: &source_code, + } + ) + .unwrap(); + } + + panic!("{input_path:?}: {message}"); + } + insta::with_settings!({ omit_expression => true, input_file => input_path, @@ -99,11 +136,6 @@ fn test_invalid_syntax(input_path: &Path) { }); let parsed = parse_unchecked(&source, options); - assert!( - parsed.has_syntax_errors(), - "{input_path:?}: Expected parser to generate at least one syntax error for a program containing syntax errors." - ); - validate_tokens(parsed.tokens(), source.text_len(), input_path); validate_ast(parsed.syntax(), source.text_len(), input_path); @@ -148,6 +180,38 @@ fn test_invalid_syntax(input_path: &Path) { .unwrap(); } + let parsed = parsed.try_into_module().expect("Parsed with Mode::Module"); + + let mut visitor = SemanticSyntaxCheckerVisitor::new(TestContext::default()); + + for stmt in parsed.suite() { + visitor.visit_stmt(stmt); + } + + let semantic_syntax_errors = visitor.into_context().diagnostics.into_inner(); + + assert!( + parsed.has_syntax_errors() || !semantic_syntax_errors.is_empty(), + "{input_path:?}: Expected parser to generate at least one syntax error for a program containing syntax errors." + ); + + if !semantic_syntax_errors.is_empty() { + writeln!(&mut output, "## Semantic Syntax Errors\n").unwrap(); + } + + for error in semantic_syntax_errors { + writeln!( + &mut output, + "{}\n", + CodeFrame { + range: error.range, + error: &ParseErrorType::OtherError(error.to_string()), + source_code: &source_code, + } + ) + .unwrap(); + } + insta::with_settings!({ omit_expression => true, input_file => input_path, @@ -393,3 +457,22 @@ impl<'ast> SourceOrderVisitor<'ast> for ValidateAstVisitor<'ast> { self.previous = Some(node); } } + +#[derive(Debug, Default)] +struct TestContext { + diagnostics: RefCell>, +} + +impl SemanticSyntaxContext for TestContext { + fn seen_docstring_boundary(&self) -> bool { + false + } + + fn python_version(&self) -> PythonVersion { + PythonVersion::default() + } + + fn report_semantic_error(&self, error: SemanticSyntaxError) { + self.diagnostics.borrow_mut().push(error); + } +} diff --git a/crates/ruff_python_parser/tests/generate_inline_tests.rs b/crates/ruff_python_parser/tests/generate_inline_tests.rs index 7f71d74717e3b..908f7c6337a5d 100644 --- a/crates/ruff_python_parser/tests/generate_inline_tests.rs +++ b/crates/ruff_python_parser/tests/generate_inline_tests.rs @@ -22,7 +22,7 @@ fn project_root() -> PathBuf { #[test] fn generate_inline_tests() -> Result<()> { - let parser_dir = project_root().join("crates/ruff_python_parser/src/parser"); + let parser_dir = project_root().join("crates/ruff_python_parser/src/"); let tests = TestCollection::try_from(parser_dir.as_path())?; let mut test_files = TestFiles::default(); diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@rebound_comprehension_variable.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@rebound_comprehension_variable.py.snap new file mode 100644 index 0000000000000..d4093c88394d3 --- /dev/null +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@rebound_comprehension_variable.py.snap @@ -0,0 +1,903 @@ +--- +source: crates/ruff_python_parser/tests/fixtures.rs +input_file: crates/ruff_python_parser/resources/inline/err/rebound_comprehension_variable.py +--- +## AST + +``` +Module( + ModModule { + range: 0..342, + body: [ + Expr( + StmtExpr { + range: 0..28, + value: ListComp( + ExprListComp { + range: 0..28, + elt: Named( + ExprNamed { + range: 2..8, + target: Name( + ExprName { + range: 2..3, + id: Name("a"), + ctx: Store, + }, + ), + value: NumberLiteral( + ExprNumberLiteral { + range: 7..8, + value: Int( + 0, + ), + }, + ), + }, + ), + generators: [ + Comprehension { + range: 10..27, + target: Name( + ExprName { + range: 14..15, + id: Name("a"), + ctx: Store, + }, + ), + iter: Call( + ExprCall { + range: 19..27, + func: Name( + ExprName { + range: 19..24, + id: Name("range"), + ctx: Load, + }, + ), + arguments: Arguments { + range: 24..27, + args: [ + NumberLiteral( + ExprNumberLiteral { + range: 25..26, + value: Int( + 0, + ), + }, + ), + ], + keywords: [], + }, + }, + ), + ifs: [], + is_async: false, + }, + ], + }, + ), + }, + ), + Expr( + StmtExpr { + range: 29..57, + value: SetComp( + ExprSetComp { + range: 29..57, + elt: Named( + ExprNamed { + range: 31..37, + target: Name( + ExprName { + range: 31..32, + id: Name("a"), + ctx: Store, + }, + ), + value: NumberLiteral( + ExprNumberLiteral { + range: 36..37, + value: Int( + 0, + ), + }, + ), + }, + ), + generators: [ + Comprehension { + range: 39..56, + target: Name( + ExprName { + range: 43..44, + id: Name("a"), + ctx: Store, + }, + ), + iter: Call( + ExprCall { + range: 48..56, + func: Name( + ExprName { + range: 48..53, + id: Name("range"), + ctx: Load, + }, + ), + arguments: Arguments { + range: 53..56, + args: [ + NumberLiteral( + ExprNumberLiteral { + range: 54..55, + value: Int( + 0, + ), + }, + ), + ], + keywords: [], + }, + }, + ), + ifs: [], + is_async: false, + }, + ], + }, + ), + }, + ), + Expr( + StmtExpr { + range: 58..91, + value: DictComp( + ExprDictComp { + range: 58..91, + key: Named( + ExprNamed { + range: 60..66, + target: Name( + ExprName { + range: 60..61, + id: Name("a"), + ctx: Store, + }, + ), + value: NumberLiteral( + ExprNumberLiteral { + range: 65..66, + value: Int( + 0, + ), + }, + ), + }, + ), + value: Name( + ExprName { + range: 69..72, + id: Name("val"), + ctx: Load, + }, + ), + generators: [ + Comprehension { + range: 73..90, + target: Name( + ExprName { + range: 77..78, + id: Name("a"), + ctx: Store, + }, + ), + iter: Call( + ExprCall { + range: 82..90, + func: Name( + ExprName { + range: 82..87, + id: Name("range"), + ctx: Load, + }, + ), + arguments: Arguments { + range: 87..90, + args: [ + NumberLiteral( + ExprNumberLiteral { + range: 88..89, + value: Int( + 0, + ), + }, + ), + ], + keywords: [], + }, + }, + ), + ifs: [], + is_async: false, + }, + ], + }, + ), + }, + ), + Expr( + StmtExpr { + range: 92..125, + value: DictComp( + ExprDictComp { + range: 92..125, + key: Name( + ExprName { + range: 93..96, + id: Name("key"), + ctx: Load, + }, + ), + value: Named( + ExprNamed { + range: 99..105, + target: Name( + ExprName { + range: 99..100, + id: Name("a"), + ctx: Store, + }, + ), + value: NumberLiteral( + ExprNumberLiteral { + range: 104..105, + value: Int( + 0, + ), + }, + ), + }, + ), + generators: [ + Comprehension { + range: 107..124, + target: Name( + ExprName { + range: 111..112, + id: Name("a"), + ctx: Store, + }, + ), + iter: Call( + ExprCall { + range: 116..124, + func: Name( + ExprName { + range: 116..121, + id: Name("range"), + ctx: Load, + }, + ), + arguments: Arguments { + range: 121..124, + args: [ + NumberLiteral( + ExprNumberLiteral { + range: 122..123, + value: Int( + 0, + ), + }, + ), + ], + keywords: [], + }, + }, + ), + ifs: [], + is_async: false, + }, + ], + }, + ), + }, + ), + Expr( + StmtExpr { + range: 126..154, + value: Generator( + ExprGenerator { + range: 126..154, + elt: Named( + ExprNamed { + range: 128..134, + target: Name( + ExprName { + range: 128..129, + id: Name("a"), + ctx: Store, + }, + ), + value: NumberLiteral( + ExprNumberLiteral { + range: 133..134, + value: Int( + 0, + ), + }, + ), + }, + ), + generators: [ + Comprehension { + range: 136..153, + target: Name( + ExprName { + range: 140..141, + id: Name("a"), + ctx: Store, + }, + ), + iter: Call( + ExprCall { + range: 145..153, + func: Name( + ExprName { + range: 145..150, + id: Name("range"), + ctx: Load, + }, + ), + arguments: Arguments { + range: 150..153, + args: [ + NumberLiteral( + ExprNumberLiteral { + range: 151..152, + value: Int( + 0, + ), + }, + ), + ], + keywords: [], + }, + }, + ), + ifs: [], + is_async: false, + }, + ], + parenthesized: true, + }, + ), + }, + ), + Expr( + StmtExpr { + range: 155..185, + value: ListComp( + ExprListComp { + range: 155..185, + elt: List( + ExprList { + range: 156..166, + elts: [ + Named( + ExprNamed { + range: 158..164, + target: Name( + ExprName { + range: 158..159, + id: Name("a"), + ctx: Store, + }, + ), + value: NumberLiteral( + ExprNumberLiteral { + range: 163..164, + value: Int( + 0, + ), + }, + ), + }, + ), + ], + ctx: Load, + }, + ), + generators: [ + Comprehension { + range: 167..184, + target: Name( + ExprName { + range: 171..172, + id: Name("a"), + ctx: Store, + }, + ), + iter: Call( + ExprCall { + range: 176..184, + func: Name( + ExprName { + range: 176..181, + id: Name("range"), + ctx: Load, + }, + ), + arguments: Arguments { + range: 181..184, + args: [ + NumberLiteral( + ExprNumberLiteral { + range: 182..183, + value: Int( + 0, + ), + }, + ), + ], + keywords: [], + }, + }, + ), + ifs: [], + is_async: false, + }, + ], + }, + ), + }, + ), + Expr( + StmtExpr { + range: 186..233, + value: ListComp( + ExprListComp { + range: 186..233, + elt: Named( + ExprNamed { + range: 188..194, + target: Name( + ExprName { + range: 188..189, + id: Name("a"), + ctx: Store, + }, + ), + value: NumberLiteral( + ExprNumberLiteral { + range: 193..194, + value: Int( + 0, + ), + }, + ), + }, + ), + generators: [ + Comprehension { + range: 196..214, + target: Name( + ExprName { + range: 200..201, + id: Name("b"), + ctx: Store, + }, + ), + iter: Call( + ExprCall { + range: 205..214, + func: Name( + ExprName { + range: 205..210, + id: Name("range"), + ctx: Load, + }, + ), + arguments: Arguments { + range: 211..214, + args: [ + NumberLiteral( + ExprNumberLiteral { + range: 212..213, + value: Int( + 0, + ), + }, + ), + ], + keywords: [], + }, + }, + ), + ifs: [], + is_async: false, + }, + Comprehension { + range: 215..232, + target: Name( + ExprName { + range: 219..220, + id: Name("a"), + ctx: Store, + }, + ), + iter: Call( + ExprCall { + range: 224..232, + func: Name( + ExprName { + range: 224..229, + id: Name("range"), + ctx: Load, + }, + ), + arguments: Arguments { + range: 229..232, + args: [ + NumberLiteral( + ExprNumberLiteral { + range: 230..231, + value: Int( + 0, + ), + }, + ), + ], + keywords: [], + }, + }, + ), + ifs: [], + is_async: false, + }, + ], + }, + ), + }, + ), + Expr( + StmtExpr { + range: 234..281, + value: ListComp( + ExprListComp { + range: 234..281, + elt: Named( + ExprNamed { + range: 236..242, + target: Name( + ExprName { + range: 236..237, + id: Name("a"), + ctx: Store, + }, + ), + value: NumberLiteral( + ExprNumberLiteral { + range: 241..242, + value: Int( + 0, + ), + }, + ), + }, + ), + generators: [ + Comprehension { + range: 244..262, + target: Name( + ExprName { + range: 248..249, + id: Name("a"), + ctx: Store, + }, + ), + iter: Call( + ExprCall { + range: 253..262, + func: Name( + ExprName { + range: 253..258, + id: Name("range"), + ctx: Load, + }, + ), + arguments: Arguments { + range: 259..262, + args: [ + NumberLiteral( + ExprNumberLiteral { + range: 260..261, + value: Int( + 0, + ), + }, + ), + ], + keywords: [], + }, + }, + ), + ifs: [], + is_async: false, + }, + Comprehension { + range: 263..280, + target: Name( + ExprName { + range: 267..268, + id: Name("b"), + ctx: Store, + }, + ), + iter: Call( + ExprCall { + range: 272..280, + func: Name( + ExprName { + range: 272..277, + id: Name("range"), + ctx: Load, + }, + ), + arguments: Arguments { + range: 277..280, + args: [ + NumberLiteral( + ExprNumberLiteral { + range: 278..279, + value: Int( + 0, + ), + }, + ), + ], + keywords: [], + }, + }, + ), + ifs: [], + is_async: false, + }, + ], + }, + ), + }, + ), + Expr( + StmtExpr { + range: 282..341, + value: ListComp( + ExprListComp { + range: 282..341, + elt: Tuple( + ExprTuple { + range: 283..303, + elts: [ + Named( + ExprNamed { + range: 285..291, + target: Name( + ExprName { + range: 285..286, + id: Name("a"), + ctx: Store, + }, + ), + value: NumberLiteral( + ExprNumberLiteral { + range: 290..291, + value: Int( + 0, + ), + }, + ), + }, + ), + Named( + ExprNamed { + range: 295..301, + target: Name( + ExprName { + range: 295..296, + id: Name("b"), + ctx: Store, + }, + ), + value: NumberLiteral( + ExprNumberLiteral { + range: 300..301, + value: Int( + 1, + ), + }, + ), + }, + ), + ], + ctx: Load, + parenthesized: true, + }, + ), + generators: [ + Comprehension { + range: 304..322, + target: Name( + ExprName { + range: 308..309, + id: Name("a"), + ctx: Store, + }, + ), + iter: Call( + ExprCall { + range: 313..322, + func: Name( + ExprName { + range: 313..318, + id: Name("range"), + ctx: Load, + }, + ), + arguments: Arguments { + range: 319..322, + args: [ + NumberLiteral( + ExprNumberLiteral { + range: 320..321, + value: Int( + 0, + ), + }, + ), + ], + keywords: [], + }, + }, + ), + ifs: [], + is_async: false, + }, + Comprehension { + range: 323..340, + target: Name( + ExprName { + range: 327..328, + id: Name("b"), + ctx: Store, + }, + ), + iter: Call( + ExprCall { + range: 332..340, + func: Name( + ExprName { + range: 332..337, + id: Name("range"), + ctx: Load, + }, + ), + arguments: Arguments { + range: 337..340, + args: [ + NumberLiteral( + ExprNumberLiteral { + range: 338..339, + value: Int( + 0, + ), + }, + ), + ], + keywords: [], + }, + }, + ), + ifs: [], + is_async: false, + }, + ], + }, + ), + }, + ), + ], + }, +) +``` +## Semantic Syntax Errors + + | +1 | [(a := 0) for a in range(0)] + | ^ Syntax Error: assignment expression cannot rebind comprehension variable +2 | {(a := 0) for a in range(0)} +3 | {(a := 0): val for a in range(0)} + | + + + | +1 | [(a := 0) for a in range(0)] +2 | {(a := 0) for a in range(0)} + | ^ Syntax Error: assignment expression cannot rebind comprehension variable +3 | {(a := 0): val for a in range(0)} +4 | {key: (a := 0) for a in range(0)} + | + + + | +1 | [(a := 0) for a in range(0)] +2 | {(a := 0) for a in range(0)} +3 | {(a := 0): val for a in range(0)} + | ^ Syntax Error: assignment expression cannot rebind comprehension variable +4 | {key: (a := 0) for a in range(0)} +5 | ((a := 0) for a in range(0)) + | + + + | +2 | {(a := 0) for a in range(0)} +3 | {(a := 0): val for a in range(0)} +4 | {key: (a := 0) for a in range(0)} + | ^ Syntax Error: assignment expression cannot rebind comprehension variable +5 | ((a := 0) for a in range(0)) +6 | [[(a := 0)] for a in range(0)] + | + + + | +3 | {(a := 0): val for a in range(0)} +4 | {key: (a := 0) for a in range(0)} +5 | ((a := 0) for a in range(0)) + | ^ Syntax Error: assignment expression cannot rebind comprehension variable +6 | [[(a := 0)] for a in range(0)] +7 | [(a := 0) for b in range (0) for a in range(0)] + | + + + | +4 | {key: (a := 0) for a in range(0)} +5 | ((a := 0) for a in range(0)) +6 | [[(a := 0)] for a in range(0)] + | ^ Syntax Error: assignment expression cannot rebind comprehension variable +7 | [(a := 0) for b in range (0) for a in range(0)] +8 | [(a := 0) for a in range (0) for b in range(0)] + | + + + | +5 | ((a := 0) for a in range(0)) +6 | [[(a := 0)] for a in range(0)] +7 | [(a := 0) for b in range (0) for a in range(0)] + | ^ Syntax Error: assignment expression cannot rebind comprehension variable +8 | [(a := 0) for a in range (0) for b in range(0)] +9 | [((a := 0), (b := 1)) for a in range (0) for b in range(0)] + | + + + | +6 | [[(a := 0)] for a in range(0)] +7 | [(a := 0) for b in range (0) for a in range(0)] +8 | [(a := 0) for a in range (0) for b in range(0)] + | ^ Syntax Error: assignment expression cannot rebind comprehension variable +9 | [((a := 0), (b := 1)) for a in range (0) for b in range(0)] + | + + + | +7 | [(a := 0) for b in range (0) for a in range(0)] +8 | [(a := 0) for a in range (0) for b in range(0)] +9 | [((a := 0), (b := 1)) for a in range (0) for b in range(0)] + | ^ Syntax Error: assignment expression cannot rebind comprehension variable + | + + + | +7 | [(a := 0) for b in range (0) for a in range(0)] +8 | [(a := 0) for a in range (0) for b in range(0)] +9 | [((a := 0), (b := 1)) for a in range (0) for b in range(0)] + | ^ Syntax Error: assignment expression cannot rebind comprehension variable + | diff --git a/crates/ruff_python_parser/tests/snapshots/valid_syntax@expressions__arguments.py.snap b/crates/ruff_python_parser/tests/snapshots/valid_syntax@expressions__arguments.py.snap index bf901c1519203..a7b3283c41e66 100644 --- a/crates/ruff_python_parser/tests/snapshots/valid_syntax@expressions__arguments.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/valid_syntax@expressions__arguments.py.snap @@ -1,7 +1,6 @@ --- source: crates/ruff_python_parser/tests/fixtures.rs input_file: crates/ruff_python_parser/resources/valid/expressions/arguments.py -snapshot_kind: text --- ## AST @@ -1159,7 +1158,7 @@ Module( target: Name( ExprName { range: 562..563, - id: Name("x"), + id: Name("i"), ctx: Store, }, ), diff --git a/crates/ruff_python_parser/tests/snapshots/valid_syntax@non_rebound_comprehension_variable.py.snap b/crates/ruff_python_parser/tests/snapshots/valid_syntax@non_rebound_comprehension_variable.py.snap new file mode 100644 index 0000000000000..8c2d4b83431f4 --- /dev/null +++ b/crates/ruff_python_parser/tests/snapshots/valid_syntax@non_rebound_comprehension_variable.py.snap @@ -0,0 +1,85 @@ +--- +source: crates/ruff_python_parser/tests/fixtures.rs +input_file: crates/ruff_python_parser/resources/inline/ok/non_rebound_comprehension_variable.py +--- +## AST + +``` +Module( + ModModule { + range: 0..27, + body: [ + Expr( + StmtExpr { + range: 0..26, + value: ListComp( + ExprListComp { + range: 0..26, + elt: Named( + ExprNamed { + range: 1..7, + target: Name( + ExprName { + range: 1..2, + id: Name("a"), + ctx: Store, + }, + ), + value: NumberLiteral( + ExprNumberLiteral { + range: 6..7, + value: Int( + 0, + ), + }, + ), + }, + ), + generators: [ + Comprehension { + range: 8..25, + target: Name( + ExprName { + range: 12..13, + id: Name("x"), + ctx: Store, + }, + ), + iter: Call( + ExprCall { + range: 17..25, + func: Name( + ExprName { + range: 17..22, + id: Name("range"), + ctx: Load, + }, + ), + arguments: Arguments { + range: 22..25, + args: [ + NumberLiteral( + ExprNumberLiteral { + range: 23..24, + value: Int( + 0, + ), + }, + ), + ], + keywords: [], + }, + }, + ), + ifs: [], + is_async: false, + }, + ], + }, + ), + }, + ), + ], + }, +) +``` diff --git a/crates/ruff_python_semantic/src/model.rs b/crates/ruff_python_semantic/src/model.rs index e1b25670d53a7..d24060b7c41ca 100644 --- a/crates/ruff_python_semantic/src/model.rs +++ b/crates/ruff_python_semantic/src/model.rs @@ -1965,11 +1965,6 @@ impl<'a> SemanticModel<'a> { self.flags.intersects(SemanticModelFlags::IMPORT_BOUNDARY) } - /// Return `true` if the model has traverse past the `__future__` import boundary. - pub const fn seen_futures_boundary(&self) -> bool { - self.flags.intersects(SemanticModelFlags::FUTURES_BOUNDARY) - } - /// Return `true` if the model has traversed past the module docstring boundary. pub const fn seen_module_docstring_boundary(&self) -> bool { self.flags @@ -2338,21 +2333,6 @@ bitflags! { /// ``` const IMPORT_BOUNDARY = 1 << 13; - /// The model has traversed past the `__future__` import boundary. - /// - /// For example, the model could be visiting `x` in: - /// ```python - /// from __future__ import annotations - /// - /// import os - /// - /// x: int = 1 - /// ``` - /// - /// Python considers it a syntax error to import from `__future__` after - /// any other non-`__future__`-importing statements. - const FUTURES_BOUNDARY = 1 << 14; - /// The model is in a file that has `from __future__ import annotations` /// at the top of the module. /// @@ -2364,10 +2344,10 @@ bitflags! { /// def f(x: int) -> int: /// ... /// ``` - const FUTURE_ANNOTATIONS = 1 << 15; + const FUTURE_ANNOTATIONS = 1 << 14; /// The model is in a Python stub file (i.e., a `.pyi` file). - const STUB_FILE = 1 << 16; + const STUB_FILE = 1 << 15; /// `__future__`-style type annotations are enabled in this model. /// That could be because it's a stub file, @@ -2383,7 +2363,7 @@ bitflags! { /// /// x: int = 1 /// ``` - const MODULE_DOCSTRING_BOUNDARY = 1 << 17; + const MODULE_DOCSTRING_BOUNDARY = 1 << 16; /// The model is in a (deferred) [type parameter definition]. /// @@ -2407,7 +2387,7 @@ bitflags! { /// not when we "pass by" it when initially traversing the source tree. /// /// [type parameter definition]: https://docs.python.org/3/reference/executionmodel.html#annotation-scopes - const TYPE_PARAM_DEFINITION = 1 << 18; + const TYPE_PARAM_DEFINITION = 1 << 17; /// The model is in a named expression assignment. /// @@ -2415,7 +2395,7 @@ bitflags! { /// ```python /// if (x := 1): ... /// ``` - const NAMED_EXPRESSION_ASSIGNMENT = 1 << 19; + const NAMED_EXPRESSION_ASSIGNMENT = 1 << 18; /// The model is in a docstring as described in [PEP 257]. /// @@ -2436,7 +2416,7 @@ bitflags! { /// ``` /// /// [PEP 257]: https://peps.python.org/pep-0257/#what-is-a-docstring - const PEP_257_DOCSTRING = 1 << 20; + const PEP_257_DOCSTRING = 1 << 19; /// The model is visiting the r.h.s. of a module-level `__all__` definition. /// @@ -2448,7 +2428,7 @@ bitflags! { /// __all__ = ("bar",) /// __all__ += ("baz,") /// ``` - const DUNDER_ALL_DEFINITION = 1 << 21; + const DUNDER_ALL_DEFINITION = 1 << 20; /// The model is in an f-string replacement field. /// @@ -2457,7 +2437,7 @@ bitflags! { /// ```python /// f"first {x} second {y}" /// ``` - const F_STRING_REPLACEMENT_FIELD = 1 << 22; + const F_STRING_REPLACEMENT_FIELD = 1 << 21; /// The model is visiting the bases tuple of a class. /// @@ -2467,11 +2447,11 @@ bitflags! { /// class Baz(Foo, Bar): /// pass /// ``` - const CLASS_BASE = 1 << 23; + const CLASS_BASE = 1 << 22; /// The model is visiting a class base that was initially deferred /// while traversing the AST. (This only happens in stub files.) - const DEFERRED_CLASS_BASE = 1 << 24; + const DEFERRED_CLASS_BASE = 1 << 23; /// The model is in an attribute docstring. /// @@ -2496,7 +2476,7 @@ bitflags! { /// static-analysis tools. /// /// [PEP 257]: https://peps.python.org/pep-0257/#what-is-a-docstring - const ATTRIBUTE_DOCSTRING = 1 << 25; + const ATTRIBUTE_DOCSTRING = 1 << 24; /// The model is in the value expression of a [PEP 613] explicit type alias. /// @@ -2508,7 +2488,7 @@ bitflags! { /// ``` /// /// [PEP 613]: https://peps.python.org/pep-0613/ - const ANNOTATED_TYPE_ALIAS = 1 << 27; + const ANNOTATED_TYPE_ALIAS = 1 << 25; /// The model is in the value expression of a [PEP 695] type statement. /// @@ -2518,7 +2498,7 @@ bitflags! { /// ``` /// /// [PEP 695]: https://peps.python.org/pep-0695/#generic-type-alias - const DEFERRED_TYPE_ALIAS = 1 << 28; + const DEFERRED_TYPE_ALIAS = 1 << 26; /// The model is visiting an `assert` statement. /// @@ -2526,7 +2506,7 @@ bitflags! { /// ```python /// assert (y := x**2) > 42, y /// ``` - const ASSERT_STATEMENT = 1 << 29; + const ASSERT_STATEMENT = 1 << 27; /// The model is in a [`@no_type_check`] context. /// @@ -2543,7 +2523,7 @@ bitflags! { /// /// [no_type_check]: https://docs.python.org/3/library/typing.html#typing.no_type_check /// [#13824]: https://github.com/astral-sh/ruff/issues/13824 - const NO_TYPE_CHECK = 1 << 30; + const NO_TYPE_CHECK = 1 << 28; /// The context is in any type annotation. const ANNOTATION = Self::TYPING_ONLY_ANNOTATION.bits() | Self::RUNTIME_EVALUATED_ANNOTATION.bits() | Self::RUNTIME_REQUIRED_ANNOTATION.bits();