Skip to content

Commit fe4a019

Browse files
committed
Added final batch of unit tests, and support for token comparison
1 parent 3773647 commit fe4a019

File tree

2 files changed

+86
-9
lines changed

2 files changed

+86
-9
lines changed

Token.py

+11-2
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,16 @@ def __init__(self, type, value = None):
66

77
# Overriding the string representation method (so it looks nice in the terminal)
88
def __repr__(self):
9-
if self.value: # If the token has a value, return the type and value
9+
# Return the type and value if the token has a value
10+
if self.value:
1011
return f"{self.type}:{self.value}"
1112
else:
12-
return f"{self.type}"
13+
return f"{self.type}"
14+
15+
# Overriding the equality method (so we can compare tokens during unit testing)
16+
def __eq__(self, other):
17+
# Compare the type and value if the other object is a token,
18+
if isinstance(other, Token):
19+
return self.type == other.type and self.value == other.value
20+
else:
21+
return False

tests/test_lexer.py

+75-7
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,9 @@
22
# python -m unittest discover -s ./tests -p test_*.py
33
# python -m unittest discover -s tests
44

5-
import unittest
65
from unittest import TestCase
76

8-
import lexer
7+
from Token import Token
98
from lexer import Lexer
109

1110
class TestLexer(TestCase):
@@ -15,6 +14,14 @@ def setUp(self):
1514
def tearDown(self):
1615
self.lexer = None
1716

17+
def test_lexer_should_exist(self):
18+
self.lexer = Lexer("Test")
19+
self.assertEqual(self.lexer.input_string, "Test")
20+
self.assertEqual(self.lexer.position.index, 0)
21+
self.assertEqual(self.lexer.position.row, 0)
22+
self.assertEqual(self.lexer.position.column, 0)
23+
self.assertEqual(self.lexer.current_character, 'T')
24+
1825
def test_current_character_in_focus_should_be_T(self):
1926
self.assertEqual(self.lexer.current_character, 'T')
2027

@@ -30,27 +37,88 @@ def test_advance_3_characters_should_be_t(self):
3037
self.lexer.advance_n(3)
3138
self.assertEqual(self.lexer.current_character, 't')
3239

40+
def test_tokenize_should_return_correct_token_list(self):
41+
lexer = Lexer("x is 42+3.14")
42+
tokens = lexer.tokenize()
43+
expected_tokens = [Token("ID", "x"), Token("WS"), Token("ASSIGN"),
44+
Token("WS"), Token("INT", 42), Token("PLUS"),
45+
Token("FLOAT", 3.14)]
46+
self.assertEqual(tokens, expected_tokens)
47+
3348
def test_peeked_word_ahead_should_be_Batman(self):
3449
self.lexer = Lexer(" Batman")
3550
self.assertEqual(self.lexer.peek_word_ahead(), "Batman")
3651

3752
def test_digit_tokenize_should_return_integer_42_token(self):
3853
self.lexer = Lexer("42")
3954
token = self.lexer.digit_tokenize()
40-
self.assertTrue(token.type, "INTEGER")
55+
self.assertEqual(token.type, "INT")
4156
self.assertEqual(token.value, 42)
4257

4358
def test_digit_tokenize_should_return_float_3_14_token(self):
4459
self.lexer = Lexer("3.14")
4560
token = self.lexer.digit_tokenize()
46-
self.assertTrue(token.type, "FLOAT")
61+
self.assertEqual(token.type, "FLOAT")
4762
self.assertEqual(token.value, 3.14)
4863

4964
def test_escape_character_token_should_be_newline(self):
5065
self.lexer = Lexer('\n')
5166
token = self.lexer.escape_tokenize()
52-
self.assertTrue(token.type, "NEWLINE")
67+
self.assertEqual(token.type, "NL")
68+
self.assertEqual(token.value, None)
69+
70+
def test_token_should_be_assignment(self):
71+
self.lexer = Lexer("is")
72+
token = self.lexer.keyword_tokenize()
73+
self.assertEqual(token.type, "ASSIGN")
74+
self.assertEqual(token.value, None)
75+
76+
def test_token_should_be_cell_R2(self):
77+
self.lexer = Lexer("cell R2")
78+
token = self.lexer.keyword_tokenize()
79+
self.assertEqual(token.type, "CELL")
80+
self.assertEqual(token.value, "R2")
81+
82+
def test_cell_value_should_be_D2(self):
83+
self.lexer = Lexer(" D2")
84+
token = self.lexer.handle_excel_cell()
85+
self.assertEqual(token.type, "CELL")
86+
self.assertEqual(token.value, "D2")
87+
88+
def test_token_should_be_return_keyword(self):
89+
self.lexer = Lexer("return")
90+
token = self.lexer.keyword_tokenize()
91+
self.assertEqual(token.type, "KW")
92+
self.assertEqual(token.value, "return")
93+
94+
def test_token_should_be_identifier(self):
95+
self.lexer = Lexer("myVariable")
96+
token = self.lexer.keyword_tokenize()
97+
self.assertEqual(token.type, "ID")
98+
self.assertEqual(token.value, "myVariable")
99+
100+
def test_token_should_be_string(self):
101+
self.lexer = Lexer('"Beam me up, Scotty!"')
102+
token = self.lexer.string_tokenize()
103+
self.assertEqual(token.type, "STR")
104+
self.assertEqual(token.value, '"Beam me up, Scotty!"')
105+
106+
def test_token_should_be_greater_than_or_equal_to(self):
107+
self.lexer = Lexer("is greater than or equal to")
108+
token = self.lexer.keyword_tokenize()
109+
self.assertEqual(token.type, ">=")
110+
self.assertEqual(token.value, None)
111+
112+
def test_multi_word_operator_should_be_equal_to(self):
113+
token = self.lexer.handle_multi_word_operator("is equal to")
114+
self.assertEqual(token.type, "==")
53115
self.assertEqual(token.value, None)
54116

55-
if __name__ == '__main__':
56-
unittest.main()
117+
def test_multi_word_operator_should_be_illegal(self):
118+
with self.assertRaises(SystemExit):
119+
self.lexer.handle_multi_word_operator("is ekual to")
120+
121+
def test_character_should_be_illegal(self):
122+
self.lexer = Lexer("?")
123+
with self.assertRaises(SystemExit):
124+
self.lexer.tokenize()

0 commit comments

Comments
 (0)