Skip to content

Commit

Permalink
Rearrange tests in pytest style
Browse files Browse the repository at this point in the history
  • Loading branch information
takluyver committed Jan 15, 2025
1 parent 42ce358 commit 2ea1d47
Showing 1 changed file with 175 additions and 169 deletions.
344 changes: 175 additions & 169 deletions test_ipython_pygments_lexers.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.

from unittest import TestCase
from pygments import __version__ as pygments_version
from pygments.token import Token
from pygments.lexers import BashLexer
Expand All @@ -15,171 +14,178 @@
TOKEN_WS = Token.Text.Whitespace if pyg214 else Token.Text


class TestLexers(TestCase):
"""Collection of lexers tests"""

def setUp(self):
self.lexer = lexers.IPythonLexer()
self.bash_lexer = BashLexer()

def testIPythonLexer(self):
fragment = "!echo $HOME\n"
bash_tokens = [
(Token.Operator, "!"),
]
bash_tokens.extend(self.bash_lexer.get_tokens(fragment[1:]))
ipylex_token = list(self.lexer.get_tokens(fragment))
assert bash_tokens[:-1] == ipylex_token[:-1]

fragment_2 = "!" + fragment
tokens_2 = [
(Token.Operator, "!!"),
] + bash_tokens[1:]
assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]

fragment_2 = "\t %%!\n" + fragment[1:]
tokens_2 = [
(Token.Text, "\t "),
(Token.Operator, "%%!"),
(Token.Text, "\n"),
] + bash_tokens[1:]
assert tokens_2 == list(self.lexer.get_tokens(fragment_2))

fragment_2 = "x = " + fragment
tokens_2 = [
(Token.Name, "x"),
(Token.Text, " "),
(Token.Operator, "="),
(Token.Text, " "),
] + bash_tokens
assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]

fragment_2 = "x, = " + fragment
tokens_2 = [
(Token.Name, "x"),
(Token.Punctuation, ","),
(Token.Text, " "),
(Token.Operator, "="),
(Token.Text, " "),
] + bash_tokens
assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]

fragment_2 = "x, = %sx " + fragment[1:]
tokens_2 = [
(Token.Name, "x"),
(Token.Punctuation, ","),
(Token.Text, " "),
(Token.Operator, "="),
(Token.Text, " "),
(Token.Operator, "%"),
(Token.Keyword, "sx"),
(TOKEN_WS, " "),
] + bash_tokens[1:]
assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]

fragment_2 = "f = %R function () {}\n"
tokens_2 = [
(Token.Name, "f"),
(Token.Text, " "),
(Token.Operator, "="),
(Token.Text, " "),
(Token.Operator, "%"),
(Token.Keyword, "R"),
(Token.Text, " function () {}\n"),
]
assert tokens_2 == list(self.lexer.get_tokens(fragment_2))

fragment_2 = "\t%%xyz\n$foo\n"
tokens_2 = [
(Token.Text, "\t"),
(Token.Operator, "%%"),
(Token.Keyword, "xyz"),
(Token.Text, "\n$foo\n"),
]
assert tokens_2 == list(self.lexer.get_tokens(fragment_2))

fragment_2 = "%system?\n"
tokens_2 = [
(Token.Operator, "%"),
(Token.Keyword, "system"),
(Token.Operator, "?"),
(Token.Text, "\n"),
]
assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]

fragment_2 = "x != y\n"
tokens_2 = [
(Token.Name, "x"),
(Token.Text, " "),
(Token.Operator, "!="),
(Token.Text, " "),
(Token.Name, "y"),
(Token.Text, "\n"),
]
assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]

fragment_2 = " ?math.sin\n"
tokens_2 = [
(Token.Text, " "),
(Token.Operator, "?"),
(Token.Text, "math.sin"),
(Token.Text, "\n"),
]
assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]

fragment = " *int*?\n"
tokens = [
(Token.Text, " *int*"),
(Token.Operator, "?"),
(Token.Text, "\n"),
]
assert tokens == list(self.lexer.get_tokens(fragment))

fragment = "%%writefile -a foo.py\nif a == b:\n pass"
tokens = [
(Token.Operator, "%%writefile"),
(Token.Text, " -a foo.py\n"),
(Token.Keyword, "if"),
(Token.Text, " "),
(Token.Name, "a"),
(Token.Text, " "),
(Token.Operator, "=="),
(Token.Text, " "),
(Token.Name, "b"),
(Token.Punctuation, ":"),
(TOKEN_WS, "\n"),
(Token.Text, " "),
(Token.Keyword, "pass"),
(TOKEN_WS, "\n"),
]
assert tokens == list(self.lexer.get_tokens(fragment))

fragment = "%%timeit\nmath.sin(0)"
tokens = [
(Token.Operator, "%%timeit"),
(Token.Text, "\n"),
(Token.Name, "math"),
(Token.Operator, "."),
(Token.Name, "sin"),
(Token.Punctuation, "("),
(Token.Literal.Number.Integer, "0"),
(Token.Punctuation, ")"),
(TOKEN_WS, "\n"),
]
assert tokens == list(self.lexer.get_tokens(fragment))

fragment = "%%HTML\n<div>foo</div>"
tokens = [
(Token.Operator, "%%HTML"),
(Token.Text, "\n"),
(Token.Punctuation, "<"),
(Token.Name.Tag, "div"),
(Token.Punctuation, ">"),
(Token.Text, "foo"),
(Token.Punctuation, "<"),
(Token.Punctuation, "/"),
(Token.Name.Tag, "div"),
(Token.Punctuation, ">"),
(Token.Text, "\n"),
]
assert tokens == list(self.lexer.get_tokens(fragment))
def test_plain_python():
lexer = lexers.IPythonLexer()
fragment_2 = "x != y\n"
tokens_2 = [
(Token.Name, "x"),
(Token.Text, " "),
(Token.Operator, "!="),
(Token.Text, " "),
(Token.Name, "y"),
(Token.Text, "\n"),
]
assert tokens_2[:-1] == list(lexer.get_tokens(fragment_2))[:-1]


def test_shell_commands():
lexer = lexers.IPythonLexer()
bash_lexer = BashLexer()
fragment = "!echo $HOME\n"
bash_tokens = [
(Token.Operator, "!"),
]
bash_tokens.extend(bash_lexer.get_tokens(fragment[1:]))
ipylex_token = list(lexer.get_tokens(fragment))
assert bash_tokens[:-1] == ipylex_token[:-1]

fragment_2 = "!" + fragment
tokens_2 = [
(Token.Operator, "!!"),
] + bash_tokens[1:]
assert tokens_2[:-1] == list(lexer.get_tokens(fragment_2))[:-1]

fragment_2 = "\t %%!\n" + fragment[1:]
tokens_2 = [
(Token.Text, "\t "),
(Token.Operator, "%%!"),
(Token.Text, "\n"),
] + bash_tokens[1:]
assert tokens_2 == list(lexer.get_tokens(fragment_2))

fragment_2 = "x = " + fragment
tokens_2 = [
(Token.Name, "x"),
(Token.Text, " "),
(Token.Operator, "="),
(Token.Text, " "),
] + bash_tokens
assert tokens_2[:-1] == list(lexer.get_tokens(fragment_2))[:-1]

fragment_2 = "x, = " + fragment
tokens_2 = [
(Token.Name, "x"),
(Token.Punctuation, ","),
(Token.Text, " "),
(Token.Operator, "="),
(Token.Text, " "),
] + bash_tokens
assert tokens_2[:-1] == list(lexer.get_tokens(fragment_2))[:-1]

fragment_2 = "x, = %sx " + fragment[1:]
tokens_2 = [
(Token.Name, "x"),
(Token.Punctuation, ","),
(Token.Text, " "),
(Token.Operator, "="),
(Token.Text, " "),
(Token.Operator, "%"),
(Token.Keyword, "sx"),
(TOKEN_WS, " "),
] + bash_tokens[1:]
assert tokens_2[:-1] == list(lexer.get_tokens(fragment_2))[:-1]


def test_magics():
lexer = lexers.IPythonLexer()
fragment_2 = "f = %R function () {}\n"
tokens_2 = [
(Token.Name, "f"),
(Token.Text, " "),
(Token.Operator, "="),
(Token.Text, " "),
(Token.Operator, "%"),
(Token.Keyword, "R"),
(Token.Text, " function () {}\n"),
]
assert tokens_2 == list(lexer.get_tokens(fragment_2))

fragment_2 = "%system?\n"
tokens_2 = [
(Token.Operator, "%"),
(Token.Keyword, "system"),
(Token.Operator, "?"),
(Token.Text, "\n"),
]
assert tokens_2[:-1] == list(lexer.get_tokens(fragment_2))[:-1]


def test_help():
lexer = lexers.IPythonLexer()
fragment_2 = " ?math.sin\n"
tokens_2 = [
(Token.Text, " "),
(Token.Operator, "?"),
(Token.Text, "math.sin"),
(Token.Text, "\n"),
]
assert tokens_2[:-1] == list(lexer.get_tokens(fragment_2))[:-1]

fragment = " *int*?\n"
tokens = [
(Token.Text, " *int*"),
(Token.Operator, "?"),
(Token.Text, "\n"),
]
assert tokens == list(lexer.get_tokens(fragment))


def test_cell_magics():
lexer = lexers.IPythonLexer()
fragment = "%%writefile -a foo.py\nif a == b:\n pass"
tokens = [
(Token.Operator, "%%writefile"),
(Token.Text, " -a foo.py\n"),
(Token.Keyword, "if"),
(Token.Text, " "),
(Token.Name, "a"),
(Token.Text, " "),
(Token.Operator, "=="),
(Token.Text, " "),
(Token.Name, "b"),
(Token.Punctuation, ":"),
(TOKEN_WS, "\n"),
(Token.Text, " "),
(Token.Keyword, "pass"),
(TOKEN_WS, "\n"),
]
assert tokens == list(lexer.get_tokens(fragment))

fragment = "%%timeit\nmath.sin(0)"
tokens = [
(Token.Operator, "%%timeit"),
(Token.Text, "\n"),
(Token.Name, "math"),
(Token.Operator, "."),
(Token.Name, "sin"),
(Token.Punctuation, "("),
(Token.Literal.Number.Integer, "0"),
(Token.Punctuation, ")"),
(TOKEN_WS, "\n"),
]
assert tokens == list(lexer.get_tokens(fragment))

fragment = "%%HTML\n<div>foo</div>"
tokens = [
(Token.Operator, "%%HTML"),
(Token.Text, "\n"),
(Token.Punctuation, "<"),
(Token.Name.Tag, "div"),
(Token.Punctuation, ">"),
(Token.Text, "foo"),
(Token.Punctuation, "<"),
(Token.Punctuation, "/"),
(Token.Name.Tag, "div"),
(Token.Punctuation, ">"),
(Token.Text, "\n"),
]
assert tokens == list(lexer.get_tokens(fragment))

fragment_2 = "\t%%xyz\n$foo\n"
tokens_2 = [
(Token.Text, "\t"),
(Token.Operator, "%%"),
(Token.Keyword, "xyz"),
(Token.Text, "\n$foo\n"),
]
assert tokens_2 == list(lexer.get_tokens(fragment_2))

0 comments on commit 2ea1d47

Please sign in to comment.