Skip to content

Commit d10c6f1

Browse files
kurtmckeemichaelmior
authored andcommitted
Refactor tests/test_lexer.py
1 parent 02a9ea4 commit d10c6f1

File tree

1 file changed

+52
-65
lines changed

1 file changed

+52
-65
lines changed

tests/test_lexer.py

Lines changed: 52 additions & 65 deletions
Original file line numberDiff line numberDiff line change
@@ -1,68 +1,55 @@
1-
import logging
2-
import unittest
3-
4-
from ply.lex import LexToken
1+
import pytest
52

63
from jsonpath_ng.lexer import JsonPathLexer, JsonPathLexerError
74

8-
class TestLexer(unittest.TestCase):
9-
10-
def token(self, value, ty=None):
11-
t = LexToken()
12-
t.type = ty if ty != None else value
13-
t.value = value
14-
t.lineno = -1
15-
t.lexpos = -1
16-
return t
17-
18-
def assert_lex_equiv(self, s, stream2):
19-
# NOTE: lexer fails to reset after call?
20-
l = JsonPathLexer(debug=True)
21-
stream1 = list(l.tokenize(s)) # Save the stream for debug output when a test fails
22-
stream2 = list(stream2)
23-
assert len(stream1) == len(stream2)
24-
for token1, token2 in zip(stream1, stream2):
25-
print(token1, token2)
26-
assert token1.type == token2.type
27-
assert token1.value == token2.value
28-
29-
@classmethod
30-
def setup_class(cls):
31-
logging.basicConfig()
32-
33-
def test_simple_inputs(self):
34-
self.assert_lex_equiv('$', [self.token('$', '$')])
35-
self.assert_lex_equiv('"hello"', [self.token('hello', 'ID')])
36-
self.assert_lex_equiv("'goodbye'", [self.token('goodbye', 'ID')])
37-
self.assert_lex_equiv("'doublequote\"'", [self.token('doublequote"', 'ID')])
38-
self.assert_lex_equiv(r'"doublequote\""', [self.token('doublequote"', 'ID')])
39-
self.assert_lex_equiv(r"'singlequote\''", [self.token("singlequote'", 'ID')])
40-
self.assert_lex_equiv('"singlequote\'"', [self.token("singlequote'", 'ID')])
41-
self.assert_lex_equiv('fuzz', [self.token('fuzz', 'ID')])
42-
self.assert_lex_equiv('1', [self.token(1, 'NUMBER')])
43-
self.assert_lex_equiv('45', [self.token(45, 'NUMBER')])
44-
self.assert_lex_equiv('-1', [self.token(-1, 'NUMBER')])
45-
self.assert_lex_equiv(' -13 ', [self.token(-13, 'NUMBER')])
46-
self.assert_lex_equiv('"fuzz.bang"', [self.token('fuzz.bang', 'ID')])
47-
self.assert_lex_equiv('fuzz.bang', [self.token('fuzz', 'ID'), self.token('.', '.'), self.token('bang', 'ID')])
48-
self.assert_lex_equiv('fuzz.*', [self.token('fuzz', 'ID'), self.token('.', '.'), self.token('*', '*')])
49-
self.assert_lex_equiv('fuzz..bang', [self.token('fuzz', 'ID'), self.token('..', 'DOUBLEDOT'), self.token('bang', 'ID')])
50-
self.assert_lex_equiv('&', [self.token('&', '&')])
51-
self.assert_lex_equiv('@', [self.token('@', 'ID')])
52-
self.assert_lex_equiv('`this`', [self.token('this', 'NAMED_OPERATOR')])
53-
self.assert_lex_equiv('|', [self.token('|', '|')])
54-
self.assert_lex_equiv('where', [self.token('where', 'WHERE')])
55-
56-
def test_basic_errors(self):
57-
def tokenize(s):
58-
l = JsonPathLexer(debug=True)
59-
return list(l.tokenize(s))
60-
61-
self.assertRaises(JsonPathLexerError, tokenize, "'\"")
62-
self.assertRaises(JsonPathLexerError, tokenize, '"\'')
63-
self.assertRaises(JsonPathLexerError, tokenize, '`"')
64-
self.assertRaises(JsonPathLexerError, tokenize, "`'")
65-
self.assertRaises(JsonPathLexerError, tokenize, '"`')
66-
self.assertRaises(JsonPathLexerError, tokenize, "'`")
67-
self.assertRaises(JsonPathLexerError, tokenize, '?')
68-
self.assertRaises(JsonPathLexerError, tokenize, '$.foo.bar.#')
5+
token_test_cases = (
6+
("$", (("$", "$"),)),
7+
('"hello"', (("hello", "ID"),)),
8+
("'goodbye'", (("goodbye", "ID"),)),
9+
("'doublequote\"'", (('doublequote"', "ID"),)),
10+
(r'"doublequote\""', (('doublequote"', "ID"),)),
11+
(r"'singlequote\''", (("singlequote'", "ID"),)),
12+
('"singlequote\'"', (("singlequote'", "ID"),)),
13+
("fuzz", (("fuzz", "ID"),)),
14+
("1", ((1, "NUMBER"),)),
15+
("45", ((45, "NUMBER"),)),
16+
("-1", ((-1, "NUMBER"),)),
17+
(" -13 ", ((-13, "NUMBER"),)),
18+
('"fuzz.bang"', (("fuzz.bang", "ID"),)),
19+
("fuzz.bang", (("fuzz", "ID"), (".", "."), ("bang", "ID"))),
20+
("fuzz.*", (("fuzz", "ID"), (".", "."), ("*", "*"))),
21+
("fuzz..bang", (("fuzz", "ID"), ("..", "DOUBLEDOT"), ("bang", "ID"))),
22+
("&", (("&", "&"),)),
23+
("@", (("@", "ID"),)),
24+
("`this`", (("this", "NAMED_OPERATOR"),)),
25+
("|", (("|", "|"),)),
26+
("where", (("where", "WHERE"),)),
27+
)
28+
29+
30+
@pytest.mark.parametrize("string, expected_token_info", token_test_cases)
31+
def test_lexer(string, expected_token_info):
32+
lexer = JsonPathLexer(debug=True)
33+
tokens = list(lexer.tokenize(string))
34+
assert len(tokens) == len(expected_token_info)
35+
for token, (expected_value, expected_type) in zip(tokens, expected_token_info):
36+
assert token.type == expected_type
37+
assert token.value == expected_value
38+
39+
40+
invalid_token_test_cases = (
41+
"'\"",
42+
"\"'",
43+
'`"',
44+
"`'",
45+
'"`',
46+
"'`",
47+
"?",
48+
"$.foo.bar.#",
49+
)
50+
51+
52+
@pytest.mark.parametrize("string", invalid_token_test_cases)
53+
def test_lexer_errors(string):
54+
with pytest.raises(JsonPathLexerError):
55+
list(JsonPathLexer().tokenize(string))

0 commit comments

Comments
 (0)