2
2
# python -m unittest discover -s ./tests -p test_*.py
3
3
# python -m unittest discover -s tests
4
4
5
- import unittest
6
5
from unittest import TestCase
7
6
8
- import lexer
7
+ from Token import Token
9
8
from lexer import Lexer
10
9
11
10
class TestLexer (TestCase ):
@@ -15,6 +14,14 @@ def setUp(self):
15
14
def tearDown (self ):
16
15
self .lexer = None
17
16
17
+ def test_lexer_should_exist (self ):
18
+ self .lexer = Lexer ("Test" )
19
+ self .assertEqual (self .lexer .input_string , "Test" )
20
+ self .assertEqual (self .lexer .position .index , 0 )
21
+ self .assertEqual (self .lexer .position .row , 0 )
22
+ self .assertEqual (self .lexer .position .column , 0 )
23
+ self .assertEqual (self .lexer .current_character , 'T' )
24
+
18
25
def test_current_character_in_focus_should_be_T (self ):
19
26
self .assertEqual (self .lexer .current_character , 'T' )
20
27
@@ -30,27 +37,88 @@ def test_advance_3_characters_should_be_t(self):
30
37
self .lexer .advance_n (3 )
31
38
self .assertEqual (self .lexer .current_character , 't' )
32
39
40
+ def test_tokenize_should_return_correct_token_list (self ):
41
+ lexer = Lexer ("x is 42+3.14" )
42
+ tokens = lexer .tokenize ()
43
+ expected_tokens = [Token ("ID" , "x" ), Token ("WS" ), Token ("ASSIGN" ),
44
+ Token ("WS" ), Token ("INT" , 42 ), Token ("PLUS" ),
45
+ Token ("FLOAT" , 3.14 )]
46
+ self .assertEqual (tokens , expected_tokens )
47
+
33
48
def test_peeked_word_ahead_should_be_Batman (self ):
34
49
self .lexer = Lexer (" Batman" )
35
50
self .assertEqual (self .lexer .peek_word_ahead (), "Batman" )
36
51
37
52
def test_digit_tokenize_should_return_integer_42_token (self ):
38
53
self .lexer = Lexer ("42" )
39
54
token = self .lexer .digit_tokenize ()
40
- self .assertTrue (token .type , "INTEGER " )
55
+ self .assertEqual (token .type , "INT " )
41
56
self .assertEqual (token .value , 42 )
42
57
43
58
def test_digit_tokenize_should_return_float_3_14_token (self ):
44
59
self .lexer = Lexer ("3.14" )
45
60
token = self .lexer .digit_tokenize ()
46
- self .assertTrue (token .type , "FLOAT" )
61
+ self .assertEqual (token .type , "FLOAT" )
47
62
self .assertEqual (token .value , 3.14 )
48
63
49
64
def test_escape_character_token_should_be_newline (self ):
50
65
self .lexer = Lexer ('\n ' )
51
66
token = self .lexer .escape_tokenize ()
52
- self .assertTrue (token .type , "NEWLINE" )
67
+ self .assertEqual (token .type , "NL" )
68
+ self .assertEqual (token .value , None )
69
+
70
+ def test_token_should_be_assignment (self ):
71
+ self .lexer = Lexer ("is" )
72
+ token = self .lexer .keyword_tokenize ()
73
+ self .assertEqual (token .type , "ASSIGN" )
74
+ self .assertEqual (token .value , None )
75
+
76
+ def test_token_should_be_cell_R2 (self ):
77
+ self .lexer = Lexer ("cell R2" )
78
+ token = self .lexer .keyword_tokenize ()
79
+ self .assertEqual (token .type , "CELL" )
80
+ self .assertEqual (token .value , "R2" )
81
+
82
+ def test_cell_value_should_be_D2 (self ):
83
+ self .lexer = Lexer (" D2" )
84
+ token = self .lexer .handle_excel_cell ()
85
+ self .assertEqual (token .type , "CELL" )
86
+ self .assertEqual (token .value , "D2" )
87
+
88
+ def test_token_should_be_return_keyword (self ):
89
+ self .lexer = Lexer ("return" )
90
+ token = self .lexer .keyword_tokenize ()
91
+ self .assertEqual (token .type , "KW" )
92
+ self .assertEqual (token .value , "return" )
93
+
94
+ def test_token_should_be_identifier (self ):
95
+ self .lexer = Lexer ("myVariable" )
96
+ token = self .lexer .keyword_tokenize ()
97
+ self .assertEqual (token .type , "ID" )
98
+ self .assertEqual (token .value , "myVariable" )
99
+
100
+ def test_token_should_be_string (self ):
101
+ self .lexer = Lexer ('"Beam me up, Scotty!"' )
102
+ token = self .lexer .string_tokenize ()
103
+ self .assertEqual (token .type , "STR" )
104
+ self .assertEqual (token .value , '"Beam me up, Scotty!"' )
105
+
106
+ def test_token_should_be_greater_than_or_equal_to (self ):
107
+ self .lexer = Lexer ("is greater than or equal to" )
108
+ token = self .lexer .keyword_tokenize ()
109
+ self .assertEqual (token .type , ">=" )
110
+ self .assertEqual (token .value , None )
111
+
112
+ def test_multi_word_operator_should_be_equal_to (self ):
113
+ token = self .lexer .handle_multi_word_operator ("is equal to" )
114
+ self .assertEqual (token .type , "==" )
53
115
self .assertEqual (token .value , None )
54
116
55
- if __name__ == '__main__' :
56
- unittest .main ()
117
+ def test_multi_word_operator_should_be_illegal (self ):
118
+ with self .assertRaises (SystemExit ):
119
+ self .lexer .handle_multi_word_operator ("is ekual to" )
120
+
121
+ def test_character_should_be_illegal (self ):
122
+ self .lexer = Lexer ("?" )
123
+ with self .assertRaises (SystemExit ):
124
+ self .lexer .tokenize ()
0 commit comments