Skip to content

Commit

Permalink
Remove lexing for colon-matching use cases
Browse files Browse the repository at this point in the history
  • Loading branch information
charliermarsh committed Aug 23, 2023
1 parent 5f5de52 commit a8de234
Show file tree
Hide file tree
Showing 3 changed files with 25 additions and 65 deletions.
20 changes: 9 additions & 11 deletions crates/ruff/src/rules/flake8_simplify/rules/ast_if.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@ use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::comparable::{ComparableConstant, ComparableExpr, ComparableStmt};
use ruff_python_ast::helpers::{any_over_expr, contains_effect};
use ruff_python_ast::stmt_if::{if_elif_branches, IfElifBranch};
use ruff_python_parser::first_colon_range;
use ruff_python_semantic::SemanticModel;
use ruff_python_trivia::{SimpleTokenKind, SimpleTokenizer};
use ruff_source_file::{Locator, UniversalNewlines};

use crate::checkers::ast::Checker;
Expand Down Expand Up @@ -373,12 +373,6 @@ pub(crate) fn nested_if_statements(
return;
};

let colon = first_colon_range(
TextRange::new(test.end(), first_stmt.start()),
checker.locator().contents(),
checker.source_type.is_jupyter(),
);

// Check if the parent is already emitting a larger diagnostic including this if statement
if let Some(Stmt::If(stmt_if)) = parent {
if let Some((body, _range, _is_elif)) = nested_if_body(stmt_if) {
Expand All @@ -392,10 +386,14 @@ pub(crate) fn nested_if_statements(
}
}

let mut diagnostic = Diagnostic::new(
CollapsibleIf,
colon.map_or(range, |colon| TextRange::new(range.start(), colon.end())),
);
let Some(colon) = SimpleTokenizer::starts_at(test.end(), checker.locator().contents())
.skip_trivia()
.find(|token| token.kind == SimpleTokenKind::Colon)
else {
return;
};

let mut diagnostic = Diagnostic::new(CollapsibleIf, TextRange::new(range.start(), colon.end()));
if checker.patch(diagnostic.kind.rule()) {
// The fixer preserves comments in the nested body, but removes comments between
// the outer and inner if statements.
Expand Down
30 changes: 11 additions & 19 deletions crates/ruff/src/rules/flake8_simplify/rules/ast_with.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
use log::error;
use ruff_python_ast::{self as ast, Ranged, Stmt, WithItem};
use ruff_text_size::TextRange;

use ruff_diagnostics::{AutofixKind, Violation};
use ruff_diagnostics::{Diagnostic, Fix};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_parser::first_colon_range;
use ruff_python_ast::{self as ast, Ranged, Stmt, WithItem};
use ruff_python_trivia::{SimpleTokenKind, SimpleTokenizer};
use ruff_source_file::UniversalNewlines;
use ruff_text_size::TextRange;

use crate::checkers::ast::Checker;
use crate::line_width::LineWidth;
Expand Down Expand Up @@ -113,25 +113,17 @@ pub(crate) fn multiple_with_statements(
return;
}

let last_item = items.last().expect("Expected items to be non-empty");
let colon = first_colon_range(
TextRange::new(
last_item
.optional_vars
.as_ref()
.map_or(last_item.context_expr.end(), |v| v.end()),
body.first().expect("Expected body to be non-empty").start(),
),
checker.locator().contents(),
checker.source_type.is_jupyter(),
);
let Some(colon) = items.last().and_then(|item| {
SimpleTokenizer::starts_at(item.end(), checker.locator().contents())
.skip_trivia()
.find(|token| token.kind == SimpleTokenKind::Colon)
}) else {
return;
};

let mut diagnostic = Diagnostic::new(
MultipleWithStatements,
colon.map_or_else(
|| with_stmt.range(),
|colon| TextRange::new(with_stmt.start(), colon.end()),
),
TextRange::new(with_stmt.start(), colon.end()),
);
if checker.patch(diagnostic.kind.rule()) {
if !checker
Expand Down
40 changes: 5 additions & 35 deletions crates/ruff_python_parser/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,6 @@
//! [parsing]: https://en.wikipedia.org/wiki/Parsing
//! [lexer]: crate::lexer

use crate::lexer::LexResult;
pub use parser::{
parse, parse_expression, parse_expression_starts_at, parse_program, parse_starts_at,
parse_suite, parse_tokens, ParseError, ParseErrorType,
Expand All @@ -119,6 +118,8 @@ use ruff_text_size::{TextRange, TextSize};
pub use string::FStringErrorType;
pub use token::{StringKind, Tok, TokenKind};

use crate::lexer::LexResult;

mod function;
// Skip flattening lexer to distinguish from full ruff_python_parser
mod context;
Expand Down Expand Up @@ -159,25 +160,6 @@ pub fn parse_program_tokens(
}
}

/// Return the `Range` of the first `Tok::Colon` token in a `Range`.
pub fn first_colon_range(
range: TextRange,
source: &str,
is_jupyter_notebook: bool,
) -> Option<TextRange> {
let contents = &source[range];
let mode = if is_jupyter_notebook {
Mode::Jupyter
} else {
Mode::Module
};
let range = lexer::lex_starts_at(contents, mode, range.start())
.flatten()
.find(|(tok, _)| tok.is_colon())
.map(|(_, range)| range);
range
}

/// Extract all [`CmpOp`] operators from an expression snippet, with appropriate
/// ranges.
///
Expand Down Expand Up @@ -373,24 +355,12 @@ mod python {

#[cfg(test)]
mod tests {
use crate::{first_colon_range, locate_cmp_ops, parse_expression, LocatedCmpOp};
use anyhow::Result;
use ruff_python_ast::CmpOp;

use ruff_text_size::{TextLen, TextRange, TextSize};
use ruff_python_ast::CmpOp;
use ruff_text_size::TextSize;

#[test]
fn extract_first_colon_range() {
let contents = "with a: pass";
let range = first_colon_range(
TextRange::new(TextSize::from(0), contents.text_len()),
contents,
false,
)
.unwrap();
assert_eq!(&contents[range], ":");
assert_eq!(range, TextRange::new(TextSize::from(6), TextSize::from(7)));
}
use crate::{locate_cmp_ops, parse_expression, LocatedCmpOp};

#[test]
fn extract_cmp_op_location() -> Result<()> {
Expand Down

0 comments on commit a8de234

Please sign in to comment.