| 1 | (* |
| 2 | * Copyright 2005-2010, Ecole des Mines de Nantes, University of Copenhagen |
| 3 | * Yoann Padioleau, Julia Lawall, Rene Rydhof Hansen, Henrik Stuart, Gilles Muller, Nicolas Palix |
| 4 | * This file is part of Coccinelle. |
| 5 | * |
| 6 | * Coccinelle is free software: you can redistribute it and/or modify |
| 7 | * it under the terms of the GNU General Public License as published by |
| 8 | * the Free Software Foundation, according to version 2 of the License. |
| 9 | * |
| 10 | * Coccinelle is distributed in the hope that it will be useful, |
| 11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
| 12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| 13 | * GNU General Public License for more details. |
| 14 | * |
| 15 | * You should have received a copy of the GNU General Public License |
| 16 | * along with Coccinelle. If not, see <http://www.gnu.org/licenses/>. |
| 17 | * |
| 18 | * The authors reserve the right to distribute this or future versions of |
| 19 | * Coccinelle under other licenses. |
| 20 | *) |
| 21 | |
| 22 | |
| 23 | (* |
| 24 | * Copyright 2005-2010, Ecole des Mines de Nantes, University of Copenhagen |
| 25 | * Yoann Padioleau, Julia Lawall, Rene Rydhof Hansen, Henrik Stuart, Gilles Muller, Nicolas Palix |
| 26 | * This file is part of Coccinelle. |
| 27 | * |
| 28 | * Coccinelle is free software: you can redistribute it and/or modify |
| 29 | * it under the terms of the GNU General Public License as published by |
| 30 | * the Free Software Foundation, according to version 2 of the License. |
| 31 | * |
| 32 | * Coccinelle is distributed in the hope that it will be useful, |
| 33 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
| 34 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| 35 | * GNU General Public License for more details. |
| 36 | * |
| 37 | * You should have received a copy of the GNU General Public License |
| 38 | * along with Coccinelle. If not, see <http://www.gnu.org/licenses/>. |
| 39 | * |
| 40 | * The authors reserve the right to distribute this or future versions of |
| 41 | * Coccinelle under other licenses. |
| 42 | *) |
| 43 | |
| 44 | |
| 45 | { |
| 46 | open Parser_cocci_menhir |
| 47 | module D = Data |
| 48 | module Ast = Ast_cocci |
| 49 | module Ast0 = Ast0_cocci |
| 50 | module P = Parse_aux |
| 51 | exception Lexical of string |
| 52 | let tok = Lexing.lexeme |
| 53 | |
| 54 | let line = ref 1 |
| 55 | let logical_line = ref 0 |
| 56 | |
| 57 | (* ---------------------------------------------------------------------- *) |
| 58 | (* control codes *) |
| 59 | |
| 60 | (* Defined in data.ml |
| 61 | type line_type = MINUS | OPTMINUS | UNIQUEMINUS | PLUS | CONTEXT | UNIQUE | OPT |
| 62 | *) |
| 63 | |
| 64 | let current_line_type = ref (D.CONTEXT,!line,!logical_line) |
| 65 | |
| 66 | let prev_plus = ref false |
| 67 | let line_start = ref 0 (* offset of the beginning of the line *) |
| 68 | let get_current_line_type lexbuf = |
| 69 | let (c,l,ll) = !current_line_type in |
| 70 | let lex_start = Lexing.lexeme_start lexbuf in |
| 71 | let preceeding_spaces = |
| 72 | if !line_start < 0 then 0 else lex_start - !line_start in |
| 73 | (*line_start := -1;*) |
| 74 | prev_plus := (c = D.PLUS) or (c = D.PLUSPLUS); |
| 75 | (c,l,ll,lex_start,preceeding_spaces,[],[],Ast0.NoMetaPos) |
| 76 | let current_line_started = ref false |
| 77 | let col_zero = ref true |
| 78 | |
| 79 | let reset_line lexbuf = |
| 80 | line := !line + 1; |
| 81 | current_line_type := (D.CONTEXT,!line,!logical_line); |
| 82 | current_line_started := false; |
| 83 | col_zero := true; |
| 84 | line_start := Lexing.lexeme_start lexbuf + 1 |
| 85 | |
| 86 | let started_line = ref (-1) |
| 87 | |
| 88 | let start_line seen_char = |
| 89 | current_line_started := true; |
| 90 | col_zero := false; |
| 91 | (if seen_char && not(!line = !started_line) |
| 92 | then |
| 93 | begin |
| 94 | started_line := !line; |
| 95 | logical_line := !logical_line + 1 |
| 96 | end) |
| 97 | |
| 98 | let pass_zero _ = col_zero := false |
| 99 | |
| 100 | let lexerr s1 s2 = raise (Lexical (Printf.sprintf "%s%s" s1 s2)) |
| 101 | |
| 102 | let add_current_line_type x = |
| 103 | match (x,!current_line_type) with |
| 104 | (D.MINUS,(D.CONTEXT,ln,lln)) -> |
| 105 | current_line_type := (D.MINUS,ln,lln) |
| 106 | | (D.MINUS,(D.UNIQUE,ln,lln)) -> |
| 107 | current_line_type := (D.UNIQUEMINUS,ln,lln) |
| 108 | | (D.MINUS,(D.OPT,ln,lln)) -> |
| 109 | current_line_type := (D.OPTMINUS,ln,lln) |
| 110 | | (D.PLUS,(D.CONTEXT,ln,lln)) -> |
| 111 | current_line_type := (D.PLUS,ln,lln) |
| 112 | | (D.PLUSPLUS,(D.CONTEXT,ln,lln)) -> |
| 113 | current_line_type := (D.PLUSPLUS,ln,lln) |
| 114 | | (D.UNIQUE,(D.CONTEXT,ln,lln)) -> |
| 115 | current_line_type := (D.UNIQUE,ln,lln) |
| 116 | | (D.OPT,(D.CONTEXT,ln,lln)) -> |
| 117 | current_line_type := (D.OPT,ln,lln) |
| 118 | | _ -> lexerr "invalid control character combination" "" |
| 119 | |
| 120 | let check_minus_context_linetype s = |
| 121 | match !current_line_type with |
| 122 | (D.PLUS,_,_) | (D.PLUSPLUS,_,_) -> lexerr "invalid in a + context: " s |
| 123 | | _ -> () |
| 124 | |
| 125 | let check_context_linetype s = |
| 126 | match !current_line_type with |
| 127 | (D.CONTEXT,_,_) -> () |
| 128 | | _ -> lexerr "invalid in a nonempty context: " s |
| 129 | |
| 130 | let check_plus_linetype s = |
| 131 | match !current_line_type with |
| 132 | (D.PLUS,_,_) | (D.PLUSPLUS,_,_) -> () |
| 133 | | _ -> lexerr "invalid in a non + context: " s |
| 134 | |
| 135 | let check_arity_context_linetype s = |
| 136 | match !current_line_type with |
| 137 | (D.CONTEXT,_,_) | (D.PLUS,_,_) | (D.PLUSPLUS,_,_) |
| 138 | | (D.UNIQUE,_,_) | (D.OPT,_,_) -> () |
| 139 | | _ -> lexerr "invalid in a nonempty context: " s |
| 140 | |
| 141 | let check_comment s = |
| 142 | if not !current_line_started |
| 143 | then lexerr "+ expected at the beginning of the line" s |
| 144 | |
| 145 | let process_include start finish str = |
| 146 | (match !current_line_type with |
| 147 | (D.PLUS,_,_) | (D.PLUSPLUS,_,_) -> |
| 148 | (try |
| 149 | let _ = Str.search_forward (Str.regexp "\\.\\.\\.") str start in |
| 150 | lexerr "... not allowed in + include" "" |
| 151 | with Not_found -> ()) |
| 152 | | _ -> ()); |
| 153 | String.sub str (start + 1) (finish - start - 1) |
| 154 | |
| 155 | (* ---------------------------------------------------------------------- *) |
| 156 | type pm = PATCH | MATCH | UNKNOWN |
| 157 | |
| 158 | let pm = ref UNKNOWN |
| 159 | |
| 160 | let patch_or_match = function |
| 161 | PATCH -> |
| 162 | if not !D.ignore_patch_or_match |
| 163 | then |
| 164 | (match !pm with |
| 165 | MATCH -> |
| 166 | lexerr "- or + not allowed in the first column for a match" "" |
| 167 | | PATCH -> () |
| 168 | | UNKNOWN -> Flag.sgrep_mode2 := false; pm := PATCH) |
| 169 | | MATCH -> |
| 170 | if not !D.ignore_patch_or_match |
| 171 | then |
| 172 | (match !pm with |
| 173 | PATCH -> lexerr "* not allowed in the first column for a patch" "" |
| 174 | | MATCH -> () |
| 175 | | UNKNOWN -> Flag.sgrep_mode2 := true; pm := MATCH) |
| 176 | | _ -> failwith "unexpected argument" |
| 177 | |
| 178 | (* ---------------------------------------------------------------------- *) |
| 179 | (* identifiers, including metavariables *) |
| 180 | |
| 181 | let metavariables = (Hashtbl.create(100) : (string, D.clt -> token) Hashtbl.t) |
| 182 | |
| 183 | let all_metavariables = |
| 184 | (Hashtbl.create(100) : (string,(string * (D.clt -> token)) list) Hashtbl.t) |
| 185 | |
| 186 | let type_names = (Hashtbl.create(100) : (string, D.clt -> token) Hashtbl.t) |
| 187 | |
| 188 | let declarer_names = (Hashtbl.create(100) : (string, D.clt -> token) Hashtbl.t) |
| 189 | |
| 190 | let iterator_names = (Hashtbl.create(100) : (string, D.clt -> token) Hashtbl.t) |
| 191 | |
| 192 | let rule_names = (Hashtbl.create(100) : (string, unit) Hashtbl.t) |
| 193 | |
| 194 | let check_var s linetype = |
| 195 | let fail _ = |
| 196 | if (!Data.in_prolog || !Data.in_rule_name) && |
| 197 | Str.string_match (Str.regexp "<.*>") s 0 |
| 198 | then TPathIsoFile s |
| 199 | else |
| 200 | try (Hashtbl.find metavariables s) linetype |
| 201 | with Not_found -> |
| 202 | (try (Hashtbl.find type_names s) linetype |
| 203 | with Not_found -> |
| 204 | (try (Hashtbl.find declarer_names s) linetype |
| 205 | with Not_found -> |
| 206 | (try (Hashtbl.find iterator_names s) linetype |
| 207 | with Not_found -> TIdent (s,linetype)))) in |
| 208 | if !Data.in_meta or !Data.in_rule_name |
| 209 | then (try Hashtbl.find rule_names s; TRuleName s with Not_found -> fail()) |
| 210 | else fail() |
| 211 | |
| 212 | let id_tokens lexbuf = |
| 213 | let s = tok lexbuf in |
| 214 | let linetype = get_current_line_type lexbuf in |
| 215 | let in_rule_name = !Data.in_rule_name in |
| 216 | let in_meta = !Data.in_meta && not !Data.saw_struct in |
| 217 | let in_iso = !Data.in_iso in |
| 218 | let in_prolog = !Data.in_prolog in |
| 219 | match s with |
| 220 | "identifier" when in_meta -> check_arity_context_linetype s; TIdentifier |
| 221 | | "type" when in_meta -> check_arity_context_linetype s; TType |
| 222 | | "parameter" when in_meta -> check_arity_context_linetype s; TParameter |
| 223 | | "constant" when in_meta -> check_arity_context_linetype s; TConstant |
| 224 | | "generated" when in_rule_name && not (!Flag.make_hrule = None) -> |
| 225 | check_arity_context_linetype s; TGenerated |
| 226 | | "expression" when in_meta || in_rule_name -> |
| 227 | check_arity_context_linetype s; TExpression |
| 228 | | "initialiser" when in_meta || in_rule_name -> |
| 229 | check_arity_context_linetype s; TInitialiser |
| 230 | | "initializer" when in_meta || in_rule_name -> |
| 231 | check_arity_context_linetype s; TInitialiser |
| 232 | | "idexpression" when in_meta -> |
| 233 | check_arity_context_linetype s; TIdExpression |
| 234 | | "statement" when in_meta -> check_arity_context_linetype s; TStatement |
| 235 | | "function" when in_meta -> check_arity_context_linetype s; TFunction |
| 236 | | "local" when in_meta -> check_arity_context_linetype s; TLocal |
| 237 | | "list" when in_meta -> check_arity_context_linetype s; Tlist |
| 238 | | "fresh" when in_meta -> check_arity_context_linetype s; TFresh |
| 239 | | "typedef" when in_meta -> check_arity_context_linetype s; TTypedef |
| 240 | | "declarer" when in_meta -> check_arity_context_linetype s; TDeclarer |
| 241 | | "iterator" when in_meta -> check_arity_context_linetype s; TIterator |
| 242 | | "name" when in_meta -> check_arity_context_linetype s; TName |
| 243 | | "position" when in_meta -> check_arity_context_linetype s; TPosition |
| 244 | | "any" when in_meta -> check_arity_context_linetype s; TPosAny |
| 245 | | "pure" when in_meta && in_iso -> |
| 246 | check_arity_context_linetype s; TPure |
| 247 | | "context" when in_meta && in_iso -> |
| 248 | check_arity_context_linetype s; TContext |
| 249 | | "error" when in_meta -> check_arity_context_linetype s; TError |
| 250 | | "words" when in_meta -> check_context_linetype s; TWords |
| 251 | |
| 252 | | "using" when in_rule_name || in_prolog -> check_context_linetype s; TUsing |
| 253 | | "virtual" when in_prolog or in_rule_name or in_meta -> |
| 254 | (* don't want to allow virtual as a rule name *) |
| 255 | check_context_linetype s; TVirtual |
| 256 | | "disable" when in_rule_name -> check_context_linetype s; TDisable |
| 257 | | "extends" when in_rule_name -> check_context_linetype s; TExtends |
| 258 | | "depends" when in_rule_name -> check_context_linetype s; TDepends |
| 259 | | "on" when in_rule_name -> check_context_linetype s; TOn |
| 260 | | "ever" when in_rule_name -> check_context_linetype s; TEver |
| 261 | | "never" when in_rule_name -> check_context_linetype s; TNever |
| 262 | (* exists and forall for when are reparsed in parse_cocci.ml *) |
| 263 | | "exists" when in_rule_name -> check_context_linetype s; TExists |
| 264 | | "forall" when in_rule_name -> check_context_linetype s; TForall |
| 265 | | "script" when in_rule_name -> check_context_linetype s; TScript |
| 266 | | "initialize" when in_rule_name -> check_context_linetype s; TInitialize |
| 267 | | "finalize" when in_rule_name -> check_context_linetype s; TFinalize |
| 268 | |
| 269 | | "char" -> Tchar linetype |
| 270 | | "short" -> Tshort linetype |
| 271 | | "int" -> Tint linetype |
| 272 | | "double" -> Tdouble linetype |
| 273 | | "float" -> Tfloat linetype |
| 274 | | "long" -> Tlong linetype |
| 275 | | "void" -> Tvoid linetype |
| 276 | (* in_meta is only for the first keyword; drop it now to allow any type |
| 277 | name *) |
| 278 | | "struct" -> Data.saw_struct := true; Tstruct linetype |
| 279 | | "union" -> Data.saw_struct := true; Tunion linetype |
| 280 | | "enum" -> Data.saw_struct := true; Tenum linetype |
| 281 | | "unsigned" -> Tunsigned linetype |
| 282 | | "signed" -> Tsigned linetype |
| 283 | |
| 284 | | "auto" -> Tauto linetype |
| 285 | | "register" -> Tregister linetype |
| 286 | | "extern" -> Textern linetype |
| 287 | | "static" -> Tstatic linetype |
| 288 | | "inline" -> Tinline linetype |
| 289 | | "typedef" -> Ttypedef linetype |
| 290 | |
| 291 | | "const" -> Tconst linetype |
| 292 | | "volatile" -> Tvolatile linetype |
| 293 | |
| 294 | | "if" -> TIf linetype |
| 295 | | "else" -> TElse linetype |
| 296 | | "while" -> TWhile linetype |
| 297 | | "do" -> TDo linetype |
| 298 | | "for" -> TFor linetype |
| 299 | | "switch" -> TSwitch linetype |
| 300 | | "case" -> TCase linetype |
| 301 | | "default" -> TDefault linetype |
| 302 | | "return" -> TReturn linetype |
| 303 | | "break" -> TBreak linetype |
| 304 | | "continue" -> TContinue linetype |
| 305 | | "goto" -> TGoto linetype |
| 306 | |
| 307 | | "sizeof" -> TSizeof linetype |
| 308 | |
| 309 | | "Expression" -> TIsoExpression |
| 310 | | "ArgExpression" -> TIsoArgExpression |
| 311 | | "TestExpression" -> TIsoTestExpression |
| 312 | | "ToTestExpression" -> TIsoToTestExpression |
| 313 | | "Statement" -> TIsoStatement |
| 314 | | "Declaration" -> TIsoDeclaration |
| 315 | | "Type" -> TIsoType |
| 316 | | "TopLevel" -> TIsoTopLevel |
| 317 | |
| 318 | | s -> check_var s linetype |
| 319 | |
| 320 | let mkassign op lexbuf = |
| 321 | TAssign (Ast.OpAssign op, (get_current_line_type lexbuf)) |
| 322 | |
| 323 | let init _ = |
| 324 | line := 1; |
| 325 | logical_line := 0; |
| 326 | prev_plus := false; |
| 327 | line_start := 0; |
| 328 | current_line_started := false; |
| 329 | current_line_type := (D.CONTEXT,0,0); |
| 330 | col_zero := true; |
| 331 | pm := UNKNOWN; |
| 332 | Data.in_rule_name := false; |
| 333 | Data.in_meta := false; |
| 334 | Data.in_prolog := false; |
| 335 | Data.saw_struct := false; |
| 336 | Data.inheritable_positions := []; |
| 337 | Hashtbl.clear all_metavariables; |
| 338 | Hashtbl.clear Data.all_metadecls; |
| 339 | Hashtbl.clear metavariables; |
| 340 | Hashtbl.clear type_names; |
| 341 | Hashtbl.clear rule_names; |
| 342 | Hashtbl.clear iterator_names; |
| 343 | Hashtbl.clear declarer_names; |
| 344 | let get_name (_,x) = x in |
| 345 | Data.add_id_meta := |
| 346 | (fun name constraints pure -> |
| 347 | let fn clt = TMetaId(name,constraints,pure,clt) in |
| 348 | Hashtbl.replace metavariables (get_name name) fn); |
| 349 | Data.add_virt_id_meta_found := |
| 350 | (fun name vl -> |
| 351 | let fn clt = TIdent(vl,clt) in |
| 352 | Hashtbl.replace metavariables name fn); |
| 353 | Data.add_virt_id_meta_not_found := |
| 354 | (fun name pure -> |
| 355 | let fn clt = TMetaId(name,Ast.IdNoConstraint,pure,clt) in |
| 356 | Hashtbl.replace metavariables (get_name name) fn); |
| 357 | Data.add_fresh_id_meta := |
| 358 | (fun name -> |
| 359 | let fn clt = TMetaId(name,Ast.IdNoConstraint,Ast0.Impure,clt) in |
| 360 | Hashtbl.replace metavariables (get_name name) fn); |
| 361 | Data.add_type_meta := |
| 362 | (fun name pure -> |
| 363 | let fn clt = TMetaType(name,pure,clt) in |
| 364 | Hashtbl.replace metavariables (get_name name) fn); |
| 365 | Data.add_init_meta := |
| 366 | (fun name pure -> |
| 367 | let fn clt = TMetaInit(name,pure,clt) in |
| 368 | Hashtbl.replace metavariables (get_name name) fn); |
| 369 | Data.add_param_meta := |
| 370 | (function name -> function pure -> |
| 371 | let fn clt = TMetaParam(name,pure,clt) in |
| 372 | Hashtbl.replace metavariables (get_name name) fn); |
| 373 | Data.add_paramlist_meta := |
| 374 | (function name -> function lenname -> function pure -> |
| 375 | let fn clt = TMetaParamList(name,lenname,pure,clt) in |
| 376 | Hashtbl.replace metavariables (get_name name) fn); |
| 377 | Data.add_const_meta := |
| 378 | (fun tyopt name constraints pure -> |
| 379 | let fn clt = TMetaConst(name,constraints,pure,tyopt,clt) in |
| 380 | Hashtbl.replace metavariables (get_name name) fn); |
| 381 | Data.add_err_meta := |
| 382 | (fun name constraints pure -> |
| 383 | let fn clt = TMetaErr(name,constraints,pure,clt) in |
| 384 | Hashtbl.replace metavariables (get_name name) fn); |
| 385 | Data.add_exp_meta := |
| 386 | (fun tyopt name constraints pure -> |
| 387 | let fn clt = TMetaExp(name,constraints,pure,tyopt,clt) in |
| 388 | Hashtbl.replace metavariables (get_name name) fn); |
| 389 | Data.add_idexp_meta := |
| 390 | (fun tyopt name constraints pure -> |
| 391 | let fn clt = TMetaIdExp(name,constraints,pure,tyopt,clt) in |
| 392 | Hashtbl.replace metavariables (get_name name) fn); |
| 393 | Data.add_local_idexp_meta := |
| 394 | (fun tyopt name constraints pure -> |
| 395 | let fn clt = TMetaLocalIdExp(name,constraints,pure,tyopt,clt) in |
| 396 | Hashtbl.replace metavariables (get_name name) fn); |
| 397 | Data.add_explist_meta := |
| 398 | (function name -> function lenname -> function pure -> |
| 399 | let fn clt = TMetaExpList(name,lenname,pure,clt) in |
| 400 | Hashtbl.replace metavariables (get_name name) fn); |
| 401 | Data.add_stm_meta := |
| 402 | (function name -> function pure -> |
| 403 | let fn clt = TMetaStm(name,pure,clt) in |
| 404 | Hashtbl.replace metavariables (get_name name) fn); |
| 405 | Data.add_stmlist_meta := |
| 406 | (function name -> function pure -> |
| 407 | let fn clt = TMetaStmList(name,pure,clt) in |
| 408 | Hashtbl.replace metavariables (get_name name) fn); |
| 409 | Data.add_func_meta := |
| 410 | (fun name constraints pure -> |
| 411 | let fn clt = TMetaFunc(name,constraints,pure,clt) in |
| 412 | Hashtbl.replace metavariables (get_name name) fn); |
| 413 | Data.add_local_func_meta := |
| 414 | (fun name constraints pure -> |
| 415 | let fn clt = TMetaLocalFunc(name,constraints,pure,clt) in |
| 416 | Hashtbl.replace metavariables (get_name name) fn); |
| 417 | Data.add_iterator_meta := |
| 418 | (fun name constraints pure -> |
| 419 | let fn clt = TMetaIterator(name,constraints,pure,clt) in |
| 420 | Hashtbl.replace metavariables (get_name name) fn); |
| 421 | Data.add_declarer_meta := |
| 422 | (fun name constraints pure -> |
| 423 | let fn clt = TMetaDeclarer(name,constraints,pure,clt) in |
| 424 | Hashtbl.replace metavariables (get_name name) fn); |
| 425 | Data.add_pos_meta := |
| 426 | (fun name constraints any -> |
| 427 | let fn ((d,ln,_,_,_,_,_,_) as clt) = |
| 428 | (if d = Data.PLUS |
| 429 | then |
| 430 | failwith |
| 431 | (Printf.sprintf "%d: positions only allowed in minus code" ln)); |
| 432 | TMetaPos(name,constraints,any,clt) in |
| 433 | Hashtbl.replace metavariables (get_name name) fn); |
| 434 | Data.add_type_name := |
| 435 | (function name -> |
| 436 | let fn clt = TTypeId(name,clt) in |
| 437 | Hashtbl.replace type_names name fn); |
| 438 | Data.add_declarer_name := |
| 439 | (function name -> |
| 440 | let fn clt = TDeclarerId(name,clt) in |
| 441 | Hashtbl.replace declarer_names name fn); |
| 442 | Data.add_iterator_name := |
| 443 | (function name -> |
| 444 | let fn clt = TIteratorId(name,clt) in |
| 445 | Hashtbl.replace iterator_names name fn); |
| 446 | Data.init_rule := (function _ -> Hashtbl.clear metavariables); |
| 447 | Data.install_bindings := |
| 448 | (function parent -> |
| 449 | List.iter (function (name,fn) -> Hashtbl.add metavariables name fn) |
| 450 | (Hashtbl.find all_metavariables parent)) |
| 451 | |
| 452 | (* the following is needed to properly tokenize include files. Because an |
| 453 | include file is included after seeing a @, so current_line_started is true. |
| 454 | Current_line_started is not important for parsing the name of a rule, so we |
| 455 | don't have to reset this value to true after parsing an included file. *) |
| 456 | let include_init _ = |
| 457 | current_line_started := false |
| 458 | |
| 459 | let drop_spaces s = |
| 460 | let len = String.length s in |
| 461 | let rec loop n = |
| 462 | if n = len |
| 463 | then n |
| 464 | else |
| 465 | if List.mem (String.get s n) [' ';'\t'] |
| 466 | then loop (n+1) |
| 467 | else n in |
| 468 | let start = loop 0 in |
| 469 | String.sub s start (len - start) |
| 470 | } |
| 471 | |
| 472 | (* ---------------------------------------------------------------------- *) |
| 473 | (* tokens *) |
| 474 | |
| 475 | let letter = ['A'-'Z' 'a'-'z' '_'] |
| 476 | let digit = ['0'-'9'] |
| 477 | |
| 478 | let dec = ['0'-'9'] |
| 479 | let oct = ['0'-'7'] |
| 480 | let hex = ['0'-'9' 'a'-'f' 'A'-'F'] |
| 481 | |
| 482 | let decimal = ('0' | (['1'-'9'] dec*)) |
| 483 | let octal = ['0'] oct+ |
| 484 | let hexa = ("0x" |"0X") hex+ |
| 485 | |
| 486 | let pent = dec+ |
| 487 | let pfract = dec+ |
| 488 | let sign = ['-' '+'] |
| 489 | let exp = ['e''E'] sign? dec+ |
| 490 | let real = pent exp | ((pent? '.' pfract | pent '.' pfract? ) exp?) |
| 491 | |
| 492 | |
| 493 | rule token = parse |
| 494 | | [' ' '\t']* ['\n' '\r' '\011' '\012'] |
| 495 | { let cls = !current_line_started in |
| 496 | |
| 497 | if not cls |
| 498 | then |
| 499 | begin |
| 500 | match !current_line_type with |
| 501 | (D.PLUS,_,_) | (D.PLUSPLUS,_,_) -> |
| 502 | let info = get_current_line_type lexbuf in |
| 503 | reset_line lexbuf; |
| 504 | TPragma (Ast.Noindent "", info) |
| 505 | | _ -> reset_line lexbuf; token lexbuf |
| 506 | end |
| 507 | else (reset_line lexbuf; token lexbuf) } |
| 508 | |
| 509 | | [' ' '\t' ]+ { start_line false; token lexbuf } |
| 510 | |
| 511 | | "//" [^ '\n']* { |
| 512 | match !current_line_type with |
| 513 | (D.PLUS,_,_) | (D.PLUSPLUS,_,_) -> |
| 514 | TPragma (Ast.Indent (tok lexbuf), get_current_line_type lexbuf) |
| 515 | | _ -> start_line false; token lexbuf } |
| 516 | |
| 517 | | "@@" { start_line true; TArobArob } |
| 518 | | "@" { pass_zero(); |
| 519 | if !Data.in_rule_name or not !current_line_started |
| 520 | then (start_line true; TArob) |
| 521 | else (check_minus_context_linetype "@"; TPArob) } |
| 522 | |
| 523 | | "~=" { start_line true; TTildeEq (get_current_line_type lexbuf) } |
| 524 | | "!~=" { start_line true; TTildeExclEq (get_current_line_type lexbuf) } |
| 525 | | "WHEN" | "when" |
| 526 | { start_line true; check_minus_context_linetype (tok lexbuf); |
| 527 | TWhen (get_current_line_type lexbuf) } |
| 528 | |
| 529 | | "..." |
| 530 | { start_line true; check_minus_context_linetype (tok lexbuf); |
| 531 | TEllipsis (get_current_line_type lexbuf) } |
| 532 | (* |
| 533 | | "ooo" |
| 534 | { start_line true; check_minus_context_linetype (tok lexbuf); |
| 535 | TCircles (get_current_line_type lexbuf) } |
| 536 | |
| 537 | | "***" |
| 538 | { start_line true; check_minus_context_linetype (tok lexbuf); |
| 539 | TStars (get_current_line_type lexbuf) } |
| 540 | *) |
| 541 | | "<..." { start_line true; check_context_linetype (tok lexbuf); |
| 542 | TOEllipsis (get_current_line_type lexbuf) } |
| 543 | | "...>" { start_line true; check_context_linetype (tok lexbuf); |
| 544 | TCEllipsis (get_current_line_type lexbuf) } |
| 545 | | "<+..." { start_line true; check_minus_context_linetype (tok lexbuf); |
| 546 | TPOEllipsis (get_current_line_type lexbuf) } |
| 547 | | "...+>" { start_line true; check_minus_context_linetype (tok lexbuf); |
| 548 | TPCEllipsis (get_current_line_type lexbuf) } |
| 549 | (* |
| 550 | | "<ooo" { start_line true; check_context_linetype (tok lexbuf); |
| 551 | TOCircles (get_current_line_type lexbuf) } |
| 552 | | "ooo>" { start_line true; check_context_linetype (tok lexbuf); |
| 553 | TCCircles (get_current_line_type lexbuf) } |
| 554 | |
| 555 | | "<***" { start_line true; check_context_linetype (tok lexbuf); |
| 556 | TOStars (get_current_line_type lexbuf) } |
| 557 | | "***>" { start_line true; check_context_linetype (tok lexbuf); |
| 558 | TCStars (get_current_line_type lexbuf) } |
| 559 | *) |
| 560 | | "-" { pass_zero(); |
| 561 | if !current_line_started |
| 562 | then (start_line true; TMinus (get_current_line_type lexbuf)) |
| 563 | else (patch_or_match PATCH; |
| 564 | add_current_line_type D.MINUS; token lexbuf) } |
| 565 | | "+" { pass_zero(); |
| 566 | if !current_line_started |
| 567 | then (start_line true; TPlus (get_current_line_type lexbuf)) |
| 568 | else if !Data.in_meta |
| 569 | then TPlus0 |
| 570 | else (patch_or_match PATCH; |
| 571 | add_current_line_type D.PLUS; token lexbuf) } |
| 572 | | "?" { pass_zero(); |
| 573 | if !current_line_started |
| 574 | then (start_line true; TWhy (get_current_line_type lexbuf)) |
| 575 | else if !Data.in_meta |
| 576 | then TWhy0 |
| 577 | else (add_current_line_type D.OPT; token lexbuf) } |
| 578 | | "!" { pass_zero(); |
| 579 | if !current_line_started |
| 580 | then (start_line true; TBang (get_current_line_type lexbuf)) |
| 581 | else if !Data.in_meta |
| 582 | then TBang0 |
| 583 | else (add_current_line_type D.UNIQUE; token lexbuf) } |
| 584 | | "(" { if not !col_zero |
| 585 | then (start_line true; TOPar (get_current_line_type lexbuf)) |
| 586 | else |
| 587 | (start_line true; check_context_linetype (tok lexbuf); |
| 588 | TOPar0 (get_current_line_type lexbuf))} |
| 589 | | "\\(" { start_line true; TOPar0 (get_current_line_type lexbuf) } |
| 590 | | "|" { if not (!col_zero) |
| 591 | then (start_line true; TOr(get_current_line_type lexbuf)) |
| 592 | else (start_line true; |
| 593 | check_context_linetype (tok lexbuf); |
| 594 | TMid0 (get_current_line_type lexbuf))} |
| 595 | | "\\|" { start_line true; TMid0 (get_current_line_type lexbuf) } |
| 596 | | ")" { if not !col_zero |
| 597 | then (start_line true; TCPar (get_current_line_type lexbuf)) |
| 598 | else |
| 599 | (start_line true; check_context_linetype (tok lexbuf); |
| 600 | TCPar0 (get_current_line_type lexbuf))} |
| 601 | | "\\)" { start_line true; TCPar0 (get_current_line_type lexbuf) } |
| 602 | |
| 603 | | '[' { start_line true; TOCro (get_current_line_type lexbuf) } |
| 604 | | ']' { start_line true; TCCro (get_current_line_type lexbuf) } |
| 605 | | '{' { start_line true; TOBrace (get_current_line_type lexbuf) } |
| 606 | | '}' { start_line true; TCBrace (get_current_line_type lexbuf) } |
| 607 | |
| 608 | | "->" { start_line true; TPtrOp (get_current_line_type lexbuf) } |
| 609 | | '.' { start_line true; TDot (get_current_line_type lexbuf) } |
| 610 | | ',' { start_line true; TComma (get_current_line_type lexbuf) } |
| 611 | | ";" { start_line true; |
| 612 | if !Data.in_meta |
| 613 | then TMPtVirg (* works better with tokens_all *) |
| 614 | else TPtVirg (get_current_line_type lexbuf) } |
| 615 | |
| 616 | |
| 617 | | '*' { pass_zero(); |
| 618 | if !current_line_started |
| 619 | then |
| 620 | (start_line true; TMul (get_current_line_type lexbuf)) |
| 621 | else |
| 622 | (patch_or_match MATCH; |
| 623 | add_current_line_type D.MINUS; token lexbuf) } |
| 624 | | '/' { start_line true; |
| 625 | TDmOp (Ast.Div,get_current_line_type lexbuf) } |
| 626 | | '%' { start_line true; |
| 627 | TDmOp (Ast.Mod,get_current_line_type lexbuf) } |
| 628 | | '~' { start_line true; TTilde (get_current_line_type lexbuf) } |
| 629 | |
| 630 | | "++" { pass_zero(); |
| 631 | if !current_line_started |
| 632 | then |
| 633 | (start_line true; TInc (get_current_line_type lexbuf)) |
| 634 | else (patch_or_match PATCH; |
| 635 | add_current_line_type D.PLUSPLUS; token lexbuf) } |
| 636 | | "--" { start_line true; TDec (get_current_line_type lexbuf) } |
| 637 | |
| 638 | | "=" { start_line true; TEq (get_current_line_type lexbuf) } |
| 639 | |
| 640 | | "-=" { start_line true; mkassign Ast.Minus lexbuf } |
| 641 | | "+=" { start_line true; mkassign Ast.Plus lexbuf } |
| 642 | |
| 643 | | "*=" { start_line true; mkassign Ast.Mul lexbuf } |
| 644 | | "/=" { start_line true; mkassign Ast.Div lexbuf } |
| 645 | | "%=" { start_line true; mkassign Ast.Mod lexbuf } |
| 646 | |
| 647 | | "&=" { start_line true; mkassign Ast.And lexbuf } |
| 648 | | "|=" { start_line true; mkassign Ast.Or lexbuf } |
| 649 | | "^=" { start_line true; mkassign Ast.Xor lexbuf } |
| 650 | |
| 651 | | "<<=" { start_line true; mkassign Ast.DecLeft lexbuf } |
| 652 | | ">>=" { start_line true; mkassign Ast.DecRight lexbuf } |
| 653 | |
| 654 | | ":" { start_line true; TDotDot (get_current_line_type lexbuf) } |
| 655 | |
| 656 | | "==" { start_line true; TEqEq (get_current_line_type lexbuf) } |
| 657 | | "!=" { start_line true; TNotEq (get_current_line_type lexbuf) } |
| 658 | | ">=" { start_line true; |
| 659 | TLogOp(Ast.SupEq,get_current_line_type lexbuf) } |
| 660 | | "<=" { start_line true; |
| 661 | if !Data.in_meta |
| 662 | then TSub(get_current_line_type lexbuf) |
| 663 | else TLogOp(Ast.InfEq,get_current_line_type lexbuf) } |
| 664 | | "<" { start_line true; |
| 665 | TLogOp(Ast.Inf,get_current_line_type lexbuf) } |
| 666 | | ">" { start_line true; |
| 667 | TLogOp(Ast.Sup,get_current_line_type lexbuf) } |
| 668 | |
| 669 | | "&&" { start_line true; TAndLog (get_current_line_type lexbuf) } |
| 670 | | "||" { start_line true; TOrLog (get_current_line_type lexbuf) } |
| 671 | |
| 672 | | ">>" { start_line true; |
| 673 | TShOp(Ast.DecRight,get_current_line_type lexbuf) } |
| 674 | | "<<" { start_line true; |
| 675 | TShOp(Ast.DecLeft,get_current_line_type lexbuf) } |
| 676 | |
| 677 | | "&" { start_line true; TAnd (get_current_line_type lexbuf) } |
| 678 | | "^" { start_line true; TXor(get_current_line_type lexbuf) } |
| 679 | |
| 680 | | "##" { start_line true; TCppConcatOp } |
| 681 | | (( ("#" [' ' '\t']* "define" [' ' '\t']+)) as def) |
| 682 | ( (letter (letter |digit)*) as ident) |
| 683 | { start_line true; |
| 684 | let (arity,line,lline,offset,col,strbef,straft,pos) as lt = |
| 685 | get_current_line_type lexbuf in |
| 686 | let off = String.length def in |
| 687 | (* -1 in the code below because the ident is not at the line start *) |
| 688 | TDefine |
| 689 | (lt, |
| 690 | check_var ident |
| 691 | (arity,line,lline,offset+off,col+off,[],[],Ast0.NoMetaPos)) } |
| 692 | | (( ("#" [' ' '\t']* "define" [' ' '\t']+)) as def) |
| 693 | ( (letter (letter | digit)*) as ident) |
| 694 | '(' |
| 695 | { start_line true; |
| 696 | let (arity,line,lline,offset,col,strbef,straft,pos) as lt = |
| 697 | get_current_line_type lexbuf in |
| 698 | let off = String.length def in |
| 699 | TDefineParam |
| 700 | (lt, |
| 701 | check_var ident |
| 702 | (* why pos here but not above? *) |
| 703 | (arity,line,lline,offset+off,col+off,strbef,straft,pos), |
| 704 | offset + off + (String.length ident), |
| 705 | col + off + (String.length ident)) } |
| 706 | | "#" [' ' '\t']* "include" [' ' '\t']* '"' [^ '"']+ '"' |
| 707 | { TIncludeL |
| 708 | (let str = tok lexbuf in |
| 709 | let start = String.index str '"' in |
| 710 | let finish = String.rindex str '"' in |
| 711 | start_line true; |
| 712 | (process_include start finish str,get_current_line_type lexbuf)) } |
| 713 | | "#" [' ' '\t']* "include" [' ' '\t']* '<' [^ '>']+ '>' |
| 714 | { TIncludeNL |
| 715 | (let str = tok lexbuf in |
| 716 | let start = String.index str '<' in |
| 717 | let finish = String.rindex str '>' in |
| 718 | start_line true; |
| 719 | (process_include start finish str,get_current_line_type lexbuf)) } |
| 720 | | "#" [' ' '\t']* "if" [^'\n']* |
| 721 | | "#" [' ' '\t']* "ifdef" [^'\n']* |
| 722 | | "#" [' ' '\t']* "ifndef" [^'\n']* |
| 723 | | "#" [' ' '\t']* "else" [^'\n']* |
| 724 | | "#" [' ' '\t']* "elif" [^'\n']* |
| 725 | | "#" [' ' '\t']* "endif" [^'\n']* |
| 726 | | "#" [' ' '\t']* "error" [^'\n']* |
| 727 | { start_line true; check_plus_linetype (tok lexbuf); |
| 728 | TPragma (Ast.Noindent(tok lexbuf), get_current_line_type lexbuf) } |
| 729 | | "/*" |
| 730 | { start_line true; check_plus_linetype (tok lexbuf); |
| 731 | (* second argument to TPragma is not quite right, because |
| 732 | it represents only the first token of the comment, but that |
| 733 | should be good enough *) |
| 734 | TPragma (Ast.Indent("/*"^(comment lexbuf)), |
| 735 | get_current_line_type lexbuf) } |
| 736 | | "---" [^'\n']* |
| 737 | { (if !current_line_started |
| 738 | then lexerr "--- must be at the beginning of the line" ""); |
| 739 | start_line true; |
| 740 | TMinusFile |
| 741 | (let str = tok lexbuf in |
| 742 | (drop_spaces(String.sub str 3 (String.length str - 3)), |
| 743 | (get_current_line_type lexbuf))) } |
| 744 | | "+++" [^'\n']* |
| 745 | { (if !current_line_started |
| 746 | then lexerr "+++ must be at the beginning of the line" ""); |
| 747 | start_line true; |
| 748 | TPlusFile |
| 749 | (let str = tok lexbuf in |
| 750 | (drop_spaces(String.sub str 3 (String.length str - 3)), |
| 751 | (get_current_line_type lexbuf))) } |
| 752 | |
| 753 | | letter (letter | digit)* |
| 754 | { start_line true; id_tokens lexbuf } |
| 755 | |
| 756 | | "'" { start_line true; |
| 757 | TChar(char lexbuf,get_current_line_type lexbuf) } |
| 758 | | '"' { start_line true; |
| 759 | TString(string lexbuf,(get_current_line_type lexbuf)) } |
| 760 | | (real as x) { start_line true; |
| 761 | TFloat(x,(get_current_line_type lexbuf)) } |
| 762 | | ((( decimal | hexa | octal) |
| 763 | ( ['u' 'U'] |
| 764 | | ['l' 'L'] |
| 765 | | (['l' 'L'] ['u' 'U']) |
| 766 | | (['u' 'U'] ['l' 'L']) |
| 767 | | (['u' 'U'] ['l' 'L'] ['l' 'L']) |
| 768 | | (['l' 'L'] ['l' 'L']) |
| 769 | )? |
| 770 | ) as x) { start_line true; TInt(x,(get_current_line_type lexbuf)) } |
| 771 | |
| 772 | | "<=>" { TIso } |
| 773 | | "=>" { TRightIso } |
| 774 | |
| 775 | | eof { EOF } |
| 776 | |
| 777 | | _ { lexerr "unrecognised symbol, in token rule: " (tok lexbuf) } |
| 778 | |
| 779 | |
| 780 | and char = parse |
| 781 | | (_ as x) "'" { String.make 1 x } |
| 782 | | (("\\" (oct | oct oct | oct oct oct)) as x "'") { x } |
| 783 | | (("\\x" (hex | hex hex)) as x "'") { x } |
| 784 | | (("\\" (_ as v)) as x "'") |
| 785 | { (match v with |
| 786 | | 'n' -> () | 't' -> () | 'v' -> () | 'b' -> () |
| 787 | | 'r' -> () | 'f' -> () | 'a' -> () |
| 788 | | '\\' -> () | '?' -> () | '\'' -> () | '"' -> () |
| 789 | | 'e' -> () |
| 790 | | _ -> lexerr "unrecognised symbol: " (tok lexbuf) |
| 791 | ); |
| 792 | x |
| 793 | } |
| 794 | | _ { lexerr "unrecognised symbol: " (tok lexbuf) } |
| 795 | |
| 796 | and string = parse |
| 797 | | '"' { "" } |
| 798 | | (_ as x) { Common.string_of_char x ^ string lexbuf } |
| 799 | | ("\\" (oct | oct oct | oct oct oct)) as x { x ^ string lexbuf } |
| 800 | | ("\\x" (hex | hex hex)) as x { x ^ string lexbuf } |
| 801 | | ("\\" (_ as v)) as x |
| 802 | { |
| 803 | (match v with |
| 804 | | 'n' -> () | 't' -> () | 'v' -> () | 'b' -> () | 'r' -> () |
| 805 | | 'f' -> () | 'a' -> () |
| 806 | | '\\' -> () | '?' -> () | '\'' -> () | '"' -> () |
| 807 | | 'e' -> () |
| 808 | | '\n' -> () |
| 809 | | '(' -> () | '|' -> () | ')' -> () |
| 810 | | _ -> lexerr "unrecognised symbol:" (tok lexbuf) |
| 811 | ); |
| 812 | x ^ string lexbuf |
| 813 | } |
| 814 | | _ { lexerr "unrecognised symbol: " (tok lexbuf) } |
| 815 | |
| 816 | and comment = parse |
| 817 | | "*/" { let s = tok lexbuf in check_comment s; start_line true; s } |
| 818 | | ['\n' '\r' '\011' '\012'] |
| 819 | { let s = tok lexbuf in |
| 820 | (* even blank line should have a + *) |
| 821 | check_comment s; |
| 822 | reset_line lexbuf; s ^ comment lexbuf } |
| 823 | | "+" { pass_zero(); |
| 824 | if !current_line_started |
| 825 | then (start_line true; let s = tok lexbuf in s^(comment lexbuf)) |
| 826 | else (start_line true; comment lexbuf) } |
| 827 | (* noteopti: *) |
| 828 | | [^ '*'] |
| 829 | { let s = tok lexbuf in |
| 830 | check_comment s; start_line true; s ^ comment lexbuf } |
| 831 | | [ '*'] |
| 832 | { let s = tok lexbuf in |
| 833 | check_comment s; start_line true; s ^ comment lexbuf } |
| 834 | | _ |
| 835 | { start_line true; let s = tok lexbuf in |
| 836 | Common.pr2 ("LEXER: unrecognised symbol in comment:"^s); |
| 837 | s ^ comment lexbuf |
| 838 | } |
| 839 | |