Commit | Line | Data |
---|---|---|
34e49164 C |
1 | (* |
2 | * Copyright 2005-2008, Ecole des Mines de Nantes, University of Copenhagen | |
3 | * Yoann Padioleau, Julia Lawall, Rene Rydhof Hansen, Henrik Stuart, Gilles Muller | |
4 | * This file is part of Coccinelle. | |
5 | * | |
6 | * Coccinelle is free software: you can redistribute it and/or modify | |
7 | * it under the terms of the GNU General Public License as published by | |
8 | * the Free Software Foundation, according to version 2 of the License. | |
9 | * | |
10 | * Coccinelle is distributed in the hope that it will be useful, | |
11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
13 | * GNU General Public License for more details. | |
14 | * | |
15 | * You should have received a copy of the GNU General Public License | |
16 | * along with Coccinelle. If not, see <http://www.gnu.org/licenses/>. | |
17 | * | |
18 | * The authors reserve the right to distribute this or future versions of | |
19 | * Coccinelle under other licenses. | |
20 | *) | |
21 | ||
22 | ||
23 | { | |
24 | open Parser_cocci_menhir | |
25 | module D = Data | |
26 | module Ast = Ast_cocci | |
27 | module Ast0 = Ast0_cocci | |
28 | module P = Parse_aux | |
29 | exception Lexical of string | |
30 | let tok = Lexing.lexeme | |
31 | ||
32 | let line = ref 1 | |
33 | let logical_line = ref 0 | |
34 | ||
35 | (* ---------------------------------------------------------------------- *) | |
36 | (* control codes *) | |
37 | ||
38 | (* Defined in data.ml | |
39 | type line_type = MINUS | OPTMINUS | UNIQUEMINUS | PLUS | CONTEXT | UNIQUE | OPT | |
40 | *) | |
41 | ||
42 | let current_line_type = ref (D.CONTEXT,!line,!logical_line) | |
43 | ||
44 | let prev_plus = ref false | |
45 | let line_start = ref 0 (* offset of the beginning of the line *) | |
46 | let get_current_line_type lexbuf = | |
47 | let (c,l,ll) = !current_line_type in | |
48 | let lex_start = Lexing.lexeme_start lexbuf in | |
49 | let preceeding_spaces = | |
50 | if !line_start < 0 then 0 else lex_start - !line_start in | |
51 | line_start := -1; | |
52 | prev_plus := (c = D.PLUS); | |
53 | (c,l,ll,lex_start,preceeding_spaces,[],[],Ast0.NoMetaPos) | |
54 | let current_line_started = ref false | |
55 | let col_zero = ref true | |
56 | ||
57 | let reset_line lexbuf = | |
58 | line := !line + 1; | |
59 | current_line_type := (D.CONTEXT,!line,!logical_line); | |
60 | current_line_started := false; | |
61 | col_zero := true; | |
62 | line_start := Lexing.lexeme_start lexbuf + 1 | |
63 | ||
64 | let started_line = ref (-1) | |
65 | ||
66 | let start_line seen_char = | |
67 | current_line_started := true; | |
68 | col_zero := false; | |
69 | (if seen_char && not(!line = !started_line) | |
70 | then | |
71 | begin | |
72 | started_line := !line; | |
73 | logical_line := !logical_line + 1 | |
74 | end) | |
75 | ||
76 | let pass_zero _ = col_zero := false | |
77 | ||
78 | let lexerr s1 s2 = raise (Lexical (Printf.sprintf "%s%s" s1 s2)) | |
79 | ||
80 | let add_current_line_type x = | |
81 | match (x,!current_line_type) with | |
82 | (D.MINUS,(D.CONTEXT,ln,lln)) -> | |
83 | current_line_type := (D.MINUS,ln,lln) | |
84 | | (D.MINUS,(D.UNIQUE,ln,lln)) -> | |
85 | current_line_type := (D.UNIQUEMINUS,ln,lln) | |
86 | | (D.MINUS,(D.OPT,ln,lln)) -> | |
87 | current_line_type := (D.OPTMINUS,ln,lln) | |
88 | | (D.PLUS,(D.CONTEXT,ln,lln)) -> | |
89 | current_line_type := (D.PLUS,ln,lln) | |
90 | | (D.UNIQUE,(D.CONTEXT,ln,lln)) -> | |
91 | current_line_type := (D.UNIQUE,ln,lln) | |
92 | | (D.OPT,(D.CONTEXT,ln,lln)) -> | |
93 | current_line_type := (D.OPT,ln,lln) | |
94 | | _ -> lexerr "invalid control character combination" "" | |
95 | ||
96 | let check_minus_context_linetype s = | |
97 | match !current_line_type with | |
98 | (D.PLUS,_,_) -> lexerr "invalid in a + context: " s | |
99 | | _ -> () | |
100 | ||
101 | let check_context_linetype s = | |
102 | match !current_line_type with | |
103 | (D.CONTEXT,_,_) -> () | |
104 | | _ -> lexerr "invalid in a nonempty context: " s | |
105 | ||
106 | let check_plus_linetype s = | |
107 | match !current_line_type with | |
108 | (D.PLUS,_,_) -> () | |
109 | | _ -> lexerr "invalid in a non + context: " s | |
110 | ||
111 | let check_arity_context_linetype s = | |
112 | match !current_line_type with | |
113 | (D.CONTEXT,_,_) | (D.PLUS,_,_) | (D.UNIQUE,_,_) | (D.OPT,_,_) -> () | |
114 | | _ -> lexerr "invalid in a nonempty context: " s | |
115 | ||
116 | let process_include start finish str = | |
117 | (match !current_line_type with | |
118 | (D.PLUS,_,_) -> | |
119 | (try | |
120 | let _ = Str.search_forward (Str.regexp "\\.\\.\\.") str start in | |
121 | lexerr "... not allowed in + include" "" | |
122 | with Not_found -> ()) | |
123 | | _ -> ()); | |
124 | String.sub str (start + 1) (finish - start - 1) | |
125 | ||
126 | (* ---------------------------------------------------------------------- *) | |
127 | type pm = PATCH | MATCH | UNKNOWN | |
128 | ||
129 | let pm = ref UNKNOWN | |
130 | ||
131 | let patch_or_match = function | |
132 | PATCH -> | |
133 | (match !pm with | |
134 | MATCH -> lexerr "- or + not allowed in the first column for a match" "" | |
135 | | PATCH -> () | |
136 | | UNKNOWN -> Flag.sgrep_mode2 := false; pm := PATCH) | |
137 | | MATCH -> | |
138 | (match !pm with | |
139 | PATCH -> lexerr "* not allowed in the first column for a patch" "" | |
140 | | MATCH -> () | |
141 | | UNKNOWN -> Flag.sgrep_mode2 := true; pm := MATCH) | |
142 | | _ -> failwith "unexpected argument" | |
143 | ||
144 | (* ---------------------------------------------------------------------- *) | |
145 | (* identifiers, including metavariables *) | |
146 | ||
147 | let metavariables = (Hashtbl.create(100) : (string, D.clt -> token) Hashtbl.t) | |
148 | ||
149 | let all_metavariables = | |
150 | (Hashtbl.create(100) : (string,(string * (D.clt -> token)) list) Hashtbl.t) | |
151 | ||
152 | let type_names = (Hashtbl.create(100) : (string, D.clt -> token) Hashtbl.t) | |
153 | ||
154 | let declarer_names = (Hashtbl.create(100) : (string, D.clt -> token) Hashtbl.t) | |
155 | ||
156 | let iterator_names = (Hashtbl.create(100) : (string, D.clt -> token) Hashtbl.t) | |
157 | ||
158 | let rule_names = (Hashtbl.create(100) : (string, unit) Hashtbl.t) | |
159 | ||
160 | let check_var s linetype = | |
161 | let fail _ = | |
162 | if (!Data.in_prolog || !Data.in_rule_name) && | |
163 | Str.string_match (Str.regexp "<.*>") s 0 | |
164 | then TPathIsoFile s | |
165 | else | |
166 | try (Hashtbl.find metavariables s) linetype | |
167 | with Not_found -> | |
168 | (try (Hashtbl.find type_names s) linetype | |
169 | with Not_found -> | |
170 | (try (Hashtbl.find declarer_names s) linetype | |
171 | with Not_found -> | |
172 | (try (Hashtbl.find iterator_names s) linetype | |
173 | with Not_found -> TIdent (s,linetype)))) in | |
174 | if !Data.in_meta or !Data.in_rule_name | |
175 | then (try Hashtbl.find rule_names s; TRuleName s with Not_found -> fail()) | |
176 | else fail() | |
177 | ||
178 | let id_tokens lexbuf = | |
179 | let s = tok lexbuf in | |
180 | let linetype = get_current_line_type lexbuf in | |
181 | let in_rule_name = !Data.in_rule_name in | |
182 | let in_meta = !Data.in_meta in | |
183 | let in_iso = !Data.in_iso in | |
184 | let in_prolog = !Data.in_prolog in | |
185 | match s with | |
186 | "identifier" when in_meta -> check_arity_context_linetype s; TIdentifier | |
187 | | "type" when in_meta -> check_arity_context_linetype s; TType | |
188 | | "parameter" when in_meta -> check_arity_context_linetype s; TParameter | |
189 | | "constant" when in_meta -> check_arity_context_linetype s; TConstant | |
190 | | "expression" when in_meta || in_rule_name -> | |
191 | check_arity_context_linetype s; TExpression | |
192 | | "idexpression" when in_meta -> | |
193 | check_arity_context_linetype s; TIdExpression | |
194 | | "statement" when in_meta -> check_arity_context_linetype s; TStatement | |
195 | | "function" when in_meta -> check_arity_context_linetype s; TFunction | |
196 | | "local" when in_meta -> check_arity_context_linetype s; TLocal | |
197 | | "list" when in_meta -> check_arity_context_linetype s; Tlist | |
198 | | "fresh" when in_meta -> check_arity_context_linetype s; TFresh | |
199 | | "typedef" when in_meta -> check_arity_context_linetype s; TTypedef | |
200 | | "declarer" when in_meta -> check_arity_context_linetype s; TDeclarer | |
201 | | "iterator" when in_meta -> check_arity_context_linetype s; TIterator | |
202 | | "name" when in_meta -> check_arity_context_linetype s; TName | |
203 | | "position" when in_meta -> check_arity_context_linetype s; TPosition | |
204 | | "any" when in_meta -> check_arity_context_linetype s; TPosAny | |
205 | | "pure" when in_meta && in_iso -> | |
206 | check_arity_context_linetype s; TPure | |
207 | | "context" when in_meta && in_iso -> | |
208 | check_arity_context_linetype s; TContext | |
209 | | "error" when in_meta -> check_arity_context_linetype s; TError | |
210 | | "words" when in_meta -> check_context_linetype s; TWords | |
211 | ||
212 | | "using" when in_rule_name || in_prolog -> check_context_linetype s; TUsing | |
213 | | "disable" when in_rule_name -> check_context_linetype s; TDisable | |
214 | | "extends" when in_rule_name -> check_context_linetype s; TExtends | |
215 | | "depends" when in_rule_name -> check_context_linetype s; TDepends | |
216 | | "on" when in_rule_name -> check_context_linetype s; TOn | |
217 | | "ever" when in_rule_name -> check_context_linetype s; TEver | |
218 | | "never" when in_rule_name -> check_context_linetype s; TNever | |
219 | | "exists" when in_rule_name -> check_context_linetype s; TExists | |
220 | | "forall" when in_rule_name -> check_context_linetype s; TForall | |
221 | | "reverse" when in_rule_name -> check_context_linetype s; TReverse | |
1be43e12 | 222 | | "script" when in_rule_name -> check_context_linetype s; TScript |
34e49164 C |
223 | |
224 | | "char" -> Tchar linetype | |
225 | | "short" -> Tshort linetype | |
226 | | "int" -> Tint linetype | |
227 | | "double" -> Tdouble linetype | |
228 | | "float" -> Tfloat linetype | |
229 | | "long" -> Tlong linetype | |
230 | | "void" -> Tvoid linetype | |
231 | | "struct" -> Tstruct linetype | |
232 | | "union" -> Tunion linetype | |
233 | | "unsigned" -> Tunsigned linetype | |
234 | | "signed" -> Tsigned linetype | |
235 | ||
236 | | "auto" -> Tauto linetype | |
237 | | "register" -> Tregister linetype | |
238 | | "extern" -> Textern linetype | |
239 | | "static" -> Tstatic linetype | |
240 | | "inline" -> Tinline linetype | |
241 | | "typedef" -> Ttypedef linetype | |
242 | ||
243 | | "const" -> Tconst linetype | |
244 | | "volatile" -> Tvolatile linetype | |
245 | ||
246 | | "if" -> TIf linetype | |
247 | | "else" -> TElse linetype | |
248 | | "while" -> TWhile linetype | |
249 | | "do" -> TDo linetype | |
250 | | "for" -> TFor linetype | |
251 | | "switch" -> TSwitch linetype | |
252 | | "case" -> TCase linetype | |
253 | | "default" -> TDefault linetype | |
254 | | "return" -> TReturn linetype | |
255 | | "break" -> TBreak linetype | |
256 | | "continue" -> TContinue linetype | |
257 | | "goto" -> TGoto linetype | |
258 | ||
259 | | "sizeof" -> TSizeof linetype | |
260 | ||
261 | | "Expression" -> TIsoExpression | |
262 | | "ArgExpression" -> TIsoArgExpression | |
263 | | "TestExpression" -> TIsoTestExpression | |
264 | | "Statement" -> TIsoStatement | |
265 | | "Declaration" -> TIsoDeclaration | |
266 | | "Type" -> TIsoType | |
267 | | "TopLevel" -> TIsoTopLevel | |
268 | ||
269 | | s -> check_var s linetype | |
270 | ||
271 | let mkassign op lexbuf = | |
272 | TAssign (Ast.OpAssign op, (get_current_line_type lexbuf)) | |
273 | ||
274 | let init _ = | |
275 | line := 1; | |
276 | logical_line := 0; | |
277 | prev_plus := false; | |
278 | line_start := 0; | |
279 | current_line_started := false; | |
280 | col_zero := true; | |
281 | pm := UNKNOWN; | |
282 | Data.in_rule_name := false; | |
283 | Data.in_meta := false; | |
284 | Data.in_prolog := false; | |
285 | Data.inheritable_positions := []; | |
286 | Hashtbl.clear all_metavariables; | |
287 | Hashtbl.clear Data.all_metadecls; | |
288 | Hashtbl.clear metavariables; | |
289 | Hashtbl.clear type_names; | |
290 | Hashtbl.clear rule_names; | |
291 | let get_name (_,x) = x in | |
292 | Data.add_id_meta := | |
293 | (fun name constraints pure -> | |
294 | let fn clt = TMetaId(name,constraints,pure,clt) in | |
295 | Hashtbl.replace metavariables (get_name name) fn); | |
296 | Data.add_type_meta := | |
297 | (fun name pure -> | |
298 | let fn clt = TMetaType(name,pure,clt) in | |
299 | Hashtbl.replace metavariables (get_name name) fn); | |
300 | Data.add_param_meta := | |
301 | (function name -> function pure -> | |
302 | let fn clt = TMetaParam(name,pure,clt) in | |
303 | Hashtbl.replace metavariables (get_name name) fn); | |
304 | Data.add_paramlist_meta := | |
305 | (function name -> function lenname -> function pure -> | |
306 | let fn clt = TMetaParamList(name,lenname,pure,clt) in | |
307 | Hashtbl.replace metavariables (get_name name) fn); | |
308 | Data.add_const_meta := | |
309 | (fun tyopt name constraints pure -> | |
310 | let fn clt = TMetaConst(name,constraints,pure,tyopt,clt) in | |
311 | Hashtbl.replace metavariables (get_name name) fn); | |
312 | Data.add_err_meta := | |
313 | (fun name constraints pure -> | |
314 | let fn clt = TMetaErr(name,constraints,pure,clt) in | |
315 | Hashtbl.replace metavariables (get_name name) fn); | |
316 | Data.add_exp_meta := | |
317 | (fun tyopt name constraints pure -> | |
318 | let fn clt = TMetaExp(name,constraints,pure,tyopt,clt) in | |
319 | Hashtbl.replace metavariables (get_name name) fn); | |
320 | Data.add_idexp_meta := | |
321 | (fun tyopt name constraints pure -> | |
322 | let fn clt = TMetaIdExp(name,constraints,pure,tyopt,clt) in | |
323 | Hashtbl.replace metavariables (get_name name) fn); | |
324 | Data.add_local_idexp_meta := | |
325 | (fun tyopt name constraints pure -> | |
326 | let fn clt = TMetaLocalIdExp(name,constraints,pure,tyopt,clt) in | |
327 | Hashtbl.replace metavariables (get_name name) fn); | |
328 | Data.add_explist_meta := | |
329 | (function name -> function lenname -> function pure -> | |
330 | let fn clt = TMetaExpList(name,lenname,pure,clt) in | |
331 | Hashtbl.replace metavariables (get_name name) fn); | |
332 | Data.add_stm_meta := | |
333 | (function name -> function pure -> | |
334 | let fn clt = TMetaStm(name,pure,clt) in | |
335 | Hashtbl.replace metavariables (get_name name) fn); | |
336 | Data.add_stmlist_meta := | |
337 | (function name -> function pure -> | |
338 | let fn clt = TMetaStmList(name,pure,clt) in | |
339 | Hashtbl.replace metavariables (get_name name) fn); | |
340 | Data.add_func_meta := | |
341 | (fun name constraints pure -> | |
342 | let fn clt = TMetaFunc(name,constraints,pure,clt) in | |
343 | Hashtbl.replace metavariables (get_name name) fn); | |
344 | Data.add_local_func_meta := | |
345 | (fun name constraints pure -> | |
346 | let fn clt = TMetaLocalFunc(name,constraints,pure,clt) in | |
347 | Hashtbl.replace metavariables (get_name name) fn); | |
348 | Data.add_iterator_meta := | |
349 | (fun name constraints pure -> | |
350 | let fn clt = TMetaIterator(name,constraints,pure,clt) in | |
351 | Hashtbl.replace metavariables (get_name name) fn); | |
352 | Data.add_declarer_meta := | |
353 | (fun name constraints pure -> | |
354 | let fn clt = TMetaDeclarer(name,constraints,pure,clt) in | |
355 | Hashtbl.replace metavariables (get_name name) fn); | |
356 | Data.add_pos_meta := | |
357 | (fun name constraints any -> | |
358 | let fn ((d,ln,_,_,_,_,_,_) as clt) = | |
359 | (if d = Data.PLUS | |
360 | then | |
361 | failwith | |
362 | (Printf.sprintf "%d: positions only allowed in minus code" ln)); | |
363 | TMetaPos(name,constraints,any,clt) in | |
364 | Hashtbl.replace metavariables (get_name name) fn); | |
365 | Data.add_type_name := | |
366 | (function name -> | |
367 | let fn clt = TTypeId(name,clt) in | |
368 | Hashtbl.replace type_names name fn); | |
369 | Data.add_declarer_name := | |
370 | (function name -> | |
371 | let fn clt = TDeclarerId(name,clt) in | |
372 | Hashtbl.replace declarer_names name fn); | |
373 | Data.add_iterator_name := | |
374 | (function name -> | |
375 | let fn clt = TIteratorId(name,clt) in | |
376 | Hashtbl.replace iterator_names name fn); | |
377 | Data.init_rule := (function _ -> Hashtbl.clear metavariables); | |
378 | Data.install_bindings := | |
379 | (function parent -> | |
380 | List.iter (function (name,fn) -> Hashtbl.add metavariables name fn) | |
381 | (Hashtbl.find all_metavariables parent)) | |
382 | ||
383 | let drop_spaces s = | |
384 | let len = String.length s in | |
385 | let rec loop n = | |
386 | if n = len | |
387 | then n | |
388 | else | |
389 | if List.mem (String.get s n) [' ';'\t'] | |
390 | then loop (n+1) | |
391 | else n in | |
392 | let start = loop 0 in | |
393 | String.sub s start (len - start) | |
394 | } | |
395 | ||
396 | (* ---------------------------------------------------------------------- *) | |
397 | (* tokens *) | |
398 | ||
399 | let letter = ['A'-'Z' 'a'-'z' '_'] | |
400 | let digit = ['0'-'9'] | |
401 | ||
402 | let dec = ['0'-'9'] | |
403 | let oct = ['0'-'7'] | |
404 | let hex = ['0'-'9' 'a'-'f' 'A'-'F'] | |
405 | ||
406 | let decimal = ('0' | (['1'-'9'] dec*)) | |
407 | let octal = ['0'] oct+ | |
408 | let hexa = ("0x" |"0X") hex+ | |
409 | ||
410 | let pent = dec+ | |
411 | let pfract = dec+ | |
412 | let sign = ['-' '+'] | |
413 | let exp = ['e''E'] sign? dec+ | |
414 | let real = pent exp | ((pent? '.' pfract | pent '.' pfract? ) exp?) | |
415 | ||
416 | ||
417 | rule token = parse | |
418 | | [' ' '\t' ]+ { start_line false; token lexbuf } | |
419 | | ['\n' '\r' '\011' '\012'] { reset_line lexbuf; token lexbuf } | |
420 | ||
421 | | "//" [^ '\n']* { start_line false; token lexbuf } | |
422 | ||
423 | | "@@" { start_line true; TArobArob } | |
424 | | "@" { pass_zero(); | |
425 | if !Data.in_rule_name or not !current_line_started | |
426 | then (start_line true; TArob) | |
427 | else (check_minus_context_linetype "@"; TPArob) } | |
428 | ||
429 | | "WHEN" | "when" | |
430 | { start_line true; check_minus_context_linetype (tok lexbuf); | |
431 | TWhen (get_current_line_type lexbuf) } | |
432 | ||
433 | | "..." | |
434 | { start_line true; check_minus_context_linetype (tok lexbuf); | |
435 | TEllipsis (get_current_line_type lexbuf) } | |
436 | (* | |
437 | | "ooo" | |
438 | { start_line true; check_minus_context_linetype (tok lexbuf); | |
439 | TCircles (get_current_line_type lexbuf) } | |
440 | ||
441 | | "***" | |
442 | { start_line true; check_minus_context_linetype (tok lexbuf); | |
443 | TStars (get_current_line_type lexbuf) } | |
444 | *) | |
445 | | "<..." { start_line true; check_context_linetype (tok lexbuf); | |
446 | TOEllipsis (get_current_line_type lexbuf) } | |
447 | | "...>" { start_line true; check_context_linetype (tok lexbuf); | |
448 | TCEllipsis (get_current_line_type lexbuf) } | |
449 | | "<+..." { start_line true; check_context_linetype (tok lexbuf); | |
450 | TPOEllipsis (get_current_line_type lexbuf) } | |
451 | | "...+>" { start_line true; check_context_linetype (tok lexbuf); | |
452 | TPCEllipsis (get_current_line_type lexbuf) } | |
453 | (* | |
454 | | "<ooo" { start_line true; check_context_linetype (tok lexbuf); | |
455 | TOCircles (get_current_line_type lexbuf) } | |
456 | | "ooo>" { start_line true; check_context_linetype (tok lexbuf); | |
457 | TCCircles (get_current_line_type lexbuf) } | |
458 | ||
459 | | "<***" { start_line true; check_context_linetype (tok lexbuf); | |
460 | TOStars (get_current_line_type lexbuf) } | |
461 | | "***>" { start_line true; check_context_linetype (tok lexbuf); | |
462 | TCStars (get_current_line_type lexbuf) } | |
463 | *) | |
464 | | "-" { pass_zero(); | |
465 | if !current_line_started | |
466 | then (start_line true; TMinus (get_current_line_type lexbuf)) | |
467 | else (patch_or_match PATCH; | |
468 | add_current_line_type D.MINUS; token lexbuf) } | |
469 | | "+" { pass_zero(); | |
470 | if !current_line_started | |
471 | then (start_line true; TPlus (get_current_line_type lexbuf)) | |
472 | else if !Data.in_meta | |
473 | then TPlus0 | |
474 | else (patch_or_match PATCH; | |
475 | add_current_line_type D.PLUS; token lexbuf) } | |
476 | | "?" { pass_zero(); | |
477 | if !current_line_started | |
478 | then (start_line true; TWhy (get_current_line_type lexbuf)) | |
479 | else if !Data.in_meta | |
480 | then TWhy0 | |
481 | else (add_current_line_type D.OPT; token lexbuf) } | |
482 | | "!" { pass_zero(); | |
483 | if !current_line_started | |
484 | then (start_line true; TBang (get_current_line_type lexbuf)) | |
485 | else if !Data.in_meta | |
486 | then TBang0 | |
487 | else (add_current_line_type D.UNIQUE; token lexbuf) } | |
488 | | "(" { if not !col_zero | |
489 | then (start_line true; TOPar (get_current_line_type lexbuf)) | |
490 | else | |
491 | (start_line true; check_context_linetype (tok lexbuf); | |
492 | TOPar0 (get_current_line_type lexbuf))} | |
493 | | "\\(" { start_line true; TOPar0 (get_current_line_type lexbuf) } | |
494 | | "|" { if not (!col_zero) | |
495 | then (start_line true; TOr(get_current_line_type lexbuf)) | |
496 | else (start_line true; | |
497 | check_context_linetype (tok lexbuf); | |
498 | TMid0 (get_current_line_type lexbuf))} | |
499 | | "\\|" { start_line true; TMid0 (get_current_line_type lexbuf) } | |
500 | | ")" { if not !col_zero | |
501 | then (start_line true; TCPar (get_current_line_type lexbuf)) | |
502 | else | |
503 | (start_line true; check_context_linetype (tok lexbuf); | |
504 | TCPar0 (get_current_line_type lexbuf))} | |
505 | | "\\)" { start_line true; TCPar0 (get_current_line_type lexbuf) } | |
506 | ||
507 | | '[' { start_line true; TOCro (get_current_line_type lexbuf) } | |
508 | | ']' { start_line true; TCCro (get_current_line_type lexbuf) } | |
509 | | '{' { start_line true; TOBrace (get_current_line_type lexbuf) } | |
510 | | '}' { start_line true; TCBrace (get_current_line_type lexbuf) } | |
511 | ||
512 | | "->" { start_line true; TPtrOp (get_current_line_type lexbuf) } | |
513 | | '.' { start_line true; TDot (get_current_line_type lexbuf) } | |
514 | | ',' { start_line true; TComma (get_current_line_type lexbuf) } | |
515 | | ";" { start_line true; | |
516 | if !Data.in_meta | |
517 | then TMPtVirg (* works better with tokens_all *) | |
518 | else TPtVirg (get_current_line_type lexbuf) } | |
519 | ||
520 | ||
521 | | '*' { pass_zero(); | |
522 | if !current_line_started | |
523 | then | |
524 | (start_line true; TMul (get_current_line_type lexbuf)) | |
525 | else | |
526 | (patch_or_match MATCH; | |
527 | add_current_line_type D.MINUS; token lexbuf) } | |
528 | | '/' { start_line true; | |
529 | TDmOp (Ast.Div,get_current_line_type lexbuf) } | |
530 | | '%' { start_line true; | |
531 | TDmOp (Ast.Mod,get_current_line_type lexbuf) } | |
532 | | '~' { start_line true; TTilde (get_current_line_type lexbuf) } | |
533 | ||
534 | | "++" { start_line true; TInc (get_current_line_type lexbuf) } | |
535 | | "--" { start_line true; TDec (get_current_line_type lexbuf) } | |
536 | ||
537 | | "=" { start_line true; TEq (get_current_line_type lexbuf) } | |
538 | ||
539 | | "-=" { start_line true; mkassign Ast.Minus lexbuf } | |
540 | | "+=" { start_line true; mkassign Ast.Plus lexbuf } | |
541 | ||
542 | | "*=" { start_line true; mkassign Ast.Mul lexbuf } | |
543 | | "/=" { start_line true; mkassign Ast.Div lexbuf } | |
544 | | "%=" { start_line true; mkassign Ast.Mod lexbuf } | |
545 | ||
546 | | "&=" { start_line true; mkassign Ast.And lexbuf } | |
547 | | "|=" { start_line true; mkassign Ast.Or lexbuf } | |
548 | | "^=" { start_line true; mkassign Ast.Xor lexbuf } | |
549 | ||
550 | | "<<=" { start_line true; mkassign Ast.DecLeft lexbuf } | |
551 | | ">>=" { start_line true; mkassign Ast.DecRight lexbuf } | |
552 | ||
553 | | ":" { start_line true; TDotDot (get_current_line_type lexbuf) } | |
554 | ||
555 | | "==" { start_line true; TEqEq (get_current_line_type lexbuf) } | |
556 | | "!=" { start_line true; TNotEq (get_current_line_type lexbuf) } | |
557 | | ">=" { start_line true; | |
558 | TLogOp(Ast.SupEq,get_current_line_type lexbuf) } | |
559 | | "<=" { start_line true; | |
560 | TLogOp(Ast.InfEq,get_current_line_type lexbuf) } | |
561 | | "<" { start_line true; | |
562 | TLogOp(Ast.Inf,get_current_line_type lexbuf) } | |
563 | | ">" { start_line true; | |
564 | TLogOp(Ast.Sup,get_current_line_type lexbuf) } | |
565 | ||
566 | | "&&" { start_line true; TAndLog (get_current_line_type lexbuf) } | |
567 | | "||" { start_line true; TOrLog (get_current_line_type lexbuf) } | |
568 | ||
569 | | ">>" { start_line true; | |
570 | TShOp(Ast.DecRight,get_current_line_type lexbuf) } | |
571 | | "<<" { start_line true; | |
572 | TShOp(Ast.DecLeft,get_current_line_type lexbuf) } | |
573 | ||
574 | | "&" { start_line true; TAnd (get_current_line_type lexbuf) } | |
575 | | "^" { start_line true; TXor(get_current_line_type lexbuf) } | |
576 | ||
577 | | ( ("#" [' ' '\t']* "define" [' ' '\t']+)) | |
578 | ( (letter (letter |digit)*) as ident) | |
579 | { start_line true; | |
580 | let (arity,line,lline,offset,col,strbef,straft,pos) as lt = | |
581 | get_current_line_type lexbuf in | |
582 | let off = String.length "#define " in | |
583 | (* -1 in the code below because the ident is not at the line start *) | |
584 | TDefine | |
585 | (lt, | |
586 | check_var ident | |
587 | (arity,line,lline,offset+off,(-1),[],[],Ast0.NoMetaPos)) } | |
588 | | ( ("#" [' ' '\t']* "define" [' ' '\t']+)) | |
589 | ( (letter (letter | digit)*) as ident) | |
590 | '(' | |
591 | { start_line true; | |
592 | let (arity,line,lline,offset,col,strbef,straft,pos) as lt = | |
593 | get_current_line_type lexbuf in | |
594 | let off = String.length "#define " in | |
595 | TDefineParam | |
596 | (lt, | |
597 | check_var ident | |
598 | (* why pos here but not above? *) | |
599 | (arity,line,lline,offset+off,(-1),strbef,straft,pos), | |
600 | offset + off + (String.length ident)) } | |
601 | | "#" [' ' '\t']* "include" [' ' '\t']* '"' [^ '"']+ '"' | |
602 | { TIncludeL | |
603 | (let str = tok lexbuf in | |
604 | let start = String.index str '"' in | |
605 | let finish = String.rindex str '"' in | |
606 | start_line true; | |
607 | (process_include start finish str,get_current_line_type lexbuf)) } | |
608 | | "#" [' ' '\t']* "include" [' ' '\t']* '<' [^ '>']+ '>' | |
609 | { TIncludeNL | |
610 | (let str = tok lexbuf in | |
611 | let start = String.index str '<' in | |
612 | let finish = String.rindex str '>' in | |
613 | start_line true; | |
614 | (process_include start finish str,get_current_line_type lexbuf)) } | |
615 | | "#" [' ' '\t']* "if" [^'\n']* | |
616 | | "#" [' ' '\t']* "ifdef" [^'\n']* | |
617 | | "#" [' ' '\t']* "ifndef" [^'\n']* | |
618 | | "#" [' ' '\t']* "else" [^'\n']* | |
619 | | "#" [' ' '\t']* "elif" [^'\n']* | |
620 | | "#" [' ' '\t']* "endif" [^'\n']* | |
621 | | "#" [' ' '\t']* "error" [^'\n']* | |
622 | { start_line true; check_plus_linetype (tok lexbuf); | |
623 | TPragma (tok lexbuf) } | |
624 | | "---" [^'\n']* | |
625 | { (if !current_line_started | |
626 | then lexerr "--- must be at the beginning of the line" ""); | |
627 | start_line true; | |
628 | TMinusFile | |
629 | (let str = tok lexbuf in | |
630 | (drop_spaces(String.sub str 3 (String.length str - 3)), | |
631 | (get_current_line_type lexbuf))) } | |
632 | | "+++" [^'\n']* | |
633 | { (if !current_line_started | |
634 | then lexerr "+++ must be at the beginning of the line" ""); | |
635 | start_line true; | |
636 | TPlusFile | |
637 | (let str = tok lexbuf in | |
638 | (drop_spaces(String.sub str 3 (String.length str - 3)), | |
639 | (get_current_line_type lexbuf))) } | |
640 | ||
641 | | letter (letter | digit)* | |
642 | { start_line true; id_tokens lexbuf } | |
643 | ||
644 | | "'" { start_line true; | |
645 | TChar(char lexbuf,get_current_line_type lexbuf) } | |
646 | | '"' { start_line true; | |
647 | TString(string lexbuf,(get_current_line_type lexbuf)) } | |
648 | | (real as x) { start_line true; | |
649 | TFloat(x,(get_current_line_type lexbuf)) } | |
650 | | ((( decimal | hexa | octal) | |
651 | ( ['u' 'U'] | |
652 | | ['l' 'L'] | |
653 | | (['l' 'L'] ['u' 'U']) | |
654 | | (['u' 'U'] ['l' 'L']) | |
655 | | (['u' 'U'] ['l' 'L'] ['l' 'L']) | |
656 | | (['l' 'L'] ['l' 'L']) | |
657 | )? | |
658 | ) as x) { start_line true; TInt(x,(get_current_line_type lexbuf)) } | |
659 | ||
660 | | "<=>" { TIso } | |
661 | | "=>" { TRightIso } | |
662 | ||
663 | | eof { EOF } | |
664 | ||
665 | | _ { lexerr "unrecognised symbol, in token rule: " (tok lexbuf) } | |
666 | ||
667 | ||
668 | and char = parse | |
669 | | (_ as x) "'" { String.make 1 x } | |
670 | | (("\\" (oct | oct oct | oct oct oct)) as x "'") { x } | |
671 | | (("\\x" (hex | hex hex)) as x "'") { x } | |
672 | | (("\\" (_ as v)) as x "'") | |
673 | { (match v with | |
674 | | 'n' -> () | 't' -> () | 'v' -> () | 'b' -> () | |
675 | | 'r' -> () | 'f' -> () | 'a' -> () | |
676 | | '\\' -> () | '?' -> () | '\'' -> () | '"' -> () | |
677 | | 'e' -> () | |
678 | | _ -> lexerr "unrecognised symbol: " (tok lexbuf) | |
679 | ); | |
680 | x | |
681 | } | |
682 | | _ { lexerr "unrecognised symbol: " (tok lexbuf) } | |
683 | ||
684 | and string = parse | |
685 | | '"' { "" } | |
686 | | (_ as x) { Common.string_of_char x ^ string lexbuf } | |
687 | | ("\\" (oct | oct oct | oct oct oct)) as x { x ^ string lexbuf } | |
688 | | ("\\x" (hex | hex hex)) as x { x ^ string lexbuf } | |
689 | | ("\\" (_ as v)) as x | |
690 | { | |
691 | (match v with | |
692 | | 'n' -> () | 't' -> () | 'v' -> () | 'b' -> () | 'r' -> () | |
693 | | 'f' -> () | 'a' -> () | |
694 | | '\\' -> () | '?' -> () | '\'' -> () | '"' -> () | |
695 | | 'e' -> () | |
696 | | '\n' -> () | |
697 | | _ -> lexerr "unrecognised symbol:" (tok lexbuf) | |
698 | ); | |
699 | x ^ string lexbuf | |
700 | } | |
701 | | _ { lexerr "unrecognised symbol: " (tok lexbuf) } |