src/lexer.cc

changeset 82
841562f5a32f
parent 81
071715c17296
child 85
264a61e9eba0
equal deleted inserted replaced
81:071715c17296 82:841562f5a32f
32 #include "lexer.h" 32 #include "lexer.h"
33 33
34 static string_list g_file_name_stack; 34 static string_list g_file_name_stack;
35 static lexer* g_main_lexer = null; 35 static lexer* g_main_lexer = null;
36 36
37 // =============================================================================
38 //
37 lexer::lexer() 39 lexer::lexer()
38 { 40 {
39 assert (g_main_lexer == null); 41 assert (g_main_lexer == null);
40 g_main_lexer = this; 42 g_main_lexer = this;
41 } 43 }
42 44
45 // =============================================================================
46 //
43 lexer::~lexer() 47 lexer::~lexer()
44 { 48 {
45 g_main_lexer = null; 49 g_main_lexer = null;
46 } 50 }
47 51
52 // =============================================================================
53 //
48 void lexer::process_file (string file_name) 54 void lexer::process_file (string file_name)
49 { 55 {
56 g_file_name_stack << file_name;
50 FILE* fp = fopen (file_name, "r"); 57 FILE* fp = fopen (file_name, "r");
51 58
52 if (fp == null) 59 if (fp == null)
53 error ("couldn't open %1 for reading: %2", file_name, strerror (errno)); 60 error ("couldn't open %1 for reading: %2", file_name, strerror (errno));
54 61
55 lexer_scanner sc (fp); 62 lexer_scanner sc (fp);
63 check_file_header (sc);
56 64
57 while (sc.get_next_token()) 65 while (sc.get_next_token())
58 { 66 {
59 // Preprocessor commands: 67 // Preprocessor commands:
60 if (sc.get_token_type() == tk_hash) 68 if (sc.get_token_type() == tk_hash)
80 tok.file = file_name; 88 tok.file = file_name;
81 tok.line = sc.get_line(); 89 tok.line = sc.get_line();
82 tok.column = sc.get_column(); 90 tok.column = sc.get_column();
83 tok.type = sc.get_token_type(); 91 tok.type = sc.get_token_type();
84 tok.text = sc.get_token_text(); 92 tok.text = sc.get_token_text();
85 // devf ("Token #%1: %2:%3:%4: %5 (%6)\n", m_tokens.size(), 93
86 // tok.file, tok.line, tok.column, describe_token (&tok), describe_token_type (tok.type)); 94 // devf ("Token #%1: %2:%3:%4: %5 (%6)\n", m_tokens.size(),
95 // tok.file, tok.line, tok.column, describe_token (&tok), describe_token_type (tok.type));
96
87 m_tokens << tok; 97 m_tokens << tok;
88 } 98 }
89 } 99 }
90 100
91 m_token_position = m_tokens.begin() - 1; 101 m_token_position = m_tokens.begin() - 1;
102 g_file_name_stack.remove (file_name);
103 }
104
105 // ============================================================================
106 //
107 static bool is_valid_header (string header)
108 {
109 if (header.ends_with ("\n"))
110 header.remove_from_end (1);
111
112 string_list tokens = header.split (" ");
113
114 if (tokens.size() != 2 || tokens[0] != "#!botc" || tokens[1].empty())
115 return false;
116
117 string_list nums = tokens[1].split (".");
118
119 if (nums.size() == 2)
120 nums << "0";
121 elif (nums.size() != 3)
122 return false;
123
124 bool ok_a, ok_b, ok_c;
125 long major = nums[0].to_long (&ok_a);
126 long minor = nums[1].to_long (&ok_b);
127 long patch = nums[2].to_long (&ok_c);
128
129 if (!ok_a || !ok_b || !ok_c)
130 return false;
131
132 if (VERSION_NUMBER < MAKE_VERSION_NUMBER (major, minor, patch))
133 error ("The script file requires " APPNAME " v%1, this is v%2",
134 make_version_string (major, minor, patch), get_version_string (e_short_form));
135
136 return true;
137 }
138
139 // ============================================================================
140 //
141 void lexer::check_file_header (lexer_scanner& sc)
142 {
143 if (!is_valid_header (sc.read_line()))
144 error ("Not a valid botscript file! File must start with '#!botc <version>'");
92 } 145 }
93 146
94 // ============================================================================= 147 // =============================================================================
95 // 148 //
96 bool lexer::get_next (e_token req) 149 bool lexer::get_next (e_token req)
123 } 176 }
124 177
125 // ============================================================================= 178 // =============================================================================
126 // eugh.. 179 // eugh..
127 // 180 //
128 void lexer::must_get_next_from_scanner (lexer_scanner& sc, e_token tok) 181 void lexer::must_get_next_from_scanner (lexer_scanner& sc, e_token tt)
129 { 182 {
130 if (!sc.get_next_token()) 183 if (!sc.get_next_token())
131 error ("unexpected EOF"); 184 error ("unexpected EOF");
132 185
133 if (tok != tk_any && sc.get_token_type() != tok) 186 if (tt != tk_any && sc.get_token_type() != tt)
134 error ("expected %1, got %2", describe_token_type (tok), 187 { // TODO
135 describe_token (get_token())); 188 token tok;
189 tok.type = sc.get_token_type();
190 tok.text = sc.get_token_text();
191
192 error ("at %1:%2: expected %3, got %4",
193 g_file_name_stack.last(),
194 sc.get_line(),
195 describe_token_type (tt),
196 describe_token (&tok));
197 }
136 } 198 }
137 199
138 // ============================================================================= 200 // =============================================================================
139 // 201 //
140 void lexer::must_get_any_of (const list<e_token>& toks) 202 void lexer::must_get_any_of (const list<e_token>& toks)
197 if (tok_type < tk_last_named_token) 259 if (tok_type < tk_last_named_token)
198 return "\"" + lexer_scanner::get_token_string (tok_type) + "\""; 260 return "\"" + lexer_scanner::get_token_string (tok_type) + "\"";
199 261
200 switch (tok_type) 262 switch (tok_type)
201 { 263 {
202 case tk_symbol: 264 case tk_symbol: return tok ? tok->text : "a symbol";
203 return tok ? tok->text : "a symbol"; 265 case tk_number: return tok ? tok->text : "a number";
204 266 case tk_string: return tok ? ("\"" + tok->text + "\"") : "a string";
205 case tk_number: 267 case tk_any: return tok ? tok->text : "any token";
206 return tok ? tok->text : "a number"; 268 default: break;
207
208 case tk_string:
209 return tok ? ("\"" + tok->text + "\"") : "a string";
210
211 case tk_any:
212 return tok ? tok->text : "any token";
213
214 default:
215 break;
216 } 269 }
217 270
218 return ""; 271 return "";
219 } 272 }
220 273

mercurial