src/lexer.cc

changeset 79
2425fa6a4f21
parent 75
bf8c57437231
child 81
071715c17296
equal deleted inserted replaced
78:e6d7e32e6481 79:2425fa6a4f21
36 36
37 lexer::lexer() 37 lexer::lexer()
38 { 38 {
39 assert (g_main_lexer == null); 39 assert (g_main_lexer == null);
40 g_main_lexer = this; 40 g_main_lexer = this;
41 devf ("Lexer initialized\n");
42 } 41 }
43 42
44 lexer::~lexer() 43 lexer::~lexer()
45 { 44 {
46 g_main_lexer = null; 45 g_main_lexer = null;
47 devf ("Lexer de-initialized\n");
48 } 46 }
49 47
50 void lexer::process_file (string file_name) 48 void lexer::process_file (string file_name)
51 { 49 {
52 devf ("Lexer: processing %1\n", file_name);
53 FILE* fp = fopen (file_name, "r"); 50 FILE* fp = fopen (file_name, "r");
54 51
55 if (fp == null) 52 if (fp == null)
56 error ("couldn't open %1 for reading: %2", file_name, strerror (errno)); 53 error ("couldn't open %1 for reading: %2", file_name, strerror (errno));
57 54
58 lexer_scanner sc (fp); 55 lexer_scanner sc (fp);
59 56
60 devf ("Processing tokens...\n");
61 while (sc.get_next_token()) 57 while (sc.get_next_token())
62 { 58 {
63 devf (".\n");
64 // Preprocessor commands: 59 // Preprocessor commands:
65 if (sc.get_token_type() == tk_hash) 60 if (sc.get_token_type() == tk_hash)
66 { 61 {
67 must_get_next_from_scanner (sc, tk_symbol); 62 must_get_next_from_scanner (sc, tk_symbol);
68 63
69 if (sc.get_token_text() == "include") 64 if (sc.get_token_text() == "include")
70 { 65 {
71 devf ("Lexer: encountered #include\n");
72
73 must_get_next_from_scanner (sc, tk_string); 66 must_get_next_from_scanner (sc, tk_string);
74 string file_name = sc.get_token_text(); 67 string file_name = sc.get_token_text();
75 68
76 if (g_file_name_stack.contains (file_name)) 69 if (g_file_name_stack.contains (file_name))
77 error ("attempted to #include %1 recursively", sc.get_token_text()); 70 error ("attempted to #include %1 recursively", sc.get_token_text());
88 tok.line = sc.get_line(); 81 tok.line = sc.get_line();
89 tok.column = sc.get_column(); 82 tok.column = sc.get_column();
90 tok.type = sc.get_token_type(); 83 tok.type = sc.get_token_type();
91 tok.text = sc.get_token_text(); 84 tok.text = sc.get_token_text();
92 m_tokens << tok; 85 m_tokens << tok;
93 devf ("Lexer: added %1 (%2)\n", describe_token_type (tok.type),
94 describe_token (&tok));
95 } 86 }
96 } 87 }
97 88
98 devf ("Lexer: File %1 processed (%2 tokens).\n", file_name, m_tokens.size());
99 m_token_position = m_tokens.begin() - 1; 89 m_token_position = m_tokens.begin() - 1;
100 } 90 }
101 91
102 // ============================================================================= 92 // =============================================================================
103 // 93 //
104 bool lexer::get_next (e_token req) 94 bool lexer::get_next (e_token req)
105 { 95 {
106 iterator pos = m_token_position; 96 iterator pos = m_token_position;
107 devf ("Lexer: Requested next token, requirement: %1\n", describe_token_type (req));
108 97
109 if (m_tokens.is_empty()) 98 if (m_tokens.is_empty())
110 {
111 devf ("Lexer: no tokens! Failed.\n");
112 return false; 99 return false;
113 } 100
101 m_token_position++;
114 102
115 if (is_at_end()) 103 if (is_at_end())
116 {
117 devf ("Lexer: at end of tokens. Failed.\n");
118 return false; 104 return false;
119 }
120
121 m_token_position++;
122 105
123 if (req != tk_any && get_token_type() != req) 106 if (req != tk_any && get_token_type() != req)
124 { 107 {
125 devf ("Lexer: Token %1 does not meet the requirement\n", describe_token (get_token()));
126 m_token_position = pos; 108 m_token_position = pos;
127 return false; 109 return false;
128 } 110 }
129 111
130 devf ("Lexer: Get successful: %1\n", describe_token (get_token()));
131 return true; 112 return true;
132 } 113 }
133 114
134 // ============================================================================= 115 // =============================================================================
135 // 116 //
156 137
157 // ============================================================================= 138 // =============================================================================
158 // 139 //
159 void lexer::must_get_any_of (const list<e_token>& toks) 140 void lexer::must_get_any_of (const list<e_token>& toks)
160 { 141 {
161 devf ("Lexer: need to get a token that is any of: %1\n", toks);
162
163 if (!get_next()) 142 if (!get_next())
164 error ("unexpected EOF"); 143 error ("unexpected EOF");
165 144
166 for (e_token tok : toks) 145 for (e_token tok : toks)
167 if (get_token_type() == tok) 146 if (get_token_type() == tok)
204 183
205 // ============================================================================= 184 // =============================================================================
206 // 185 //
207 void lexer::must_be (e_token tok) 186 void lexer::must_be (e_token tok)
208 { 187 {
209 print ("pos: %1", m_token_position - m_tokens.begin());
210 if (get_token_type() != tok) 188 if (get_token_type() != tok)
211 error ("expected %1, got %2", describe_token_type (tok), 189 error ("expected %1, got %2", describe_token_type (tok),
212 describe_token (get_token())); 190 describe_token (get_token()));
213 } 191 }
214 192
215 // ============================================================================= 193 // =============================================================================
216 // 194 //
217 string lexer::describe_token_private (e_token tok_type, lexer::token* tok) 195 string lexer::describe_token_private (e_token tok_type, lexer::token* tok)
218 { 196 {
219 if ( (int) tok_type < (int) last_named_token) 197 if (tok_type < tk_last_named_token)
220 return "\"" + lexer_scanner::get_token_string (tok_type) + "\""; 198 return "\"" + lexer_scanner::get_token_string (tok_type) + "\"";
221 199
222 switch (tok_type) 200 switch (tok_type)
223 { 201 {
224 case tk_symbol: 202 case tk_symbol:

mercurial