| 1 | (* |
| 2 | * Copyright 2005-2009, Ecole des Mines de Nantes, University of Copenhagen |
| 3 | * Yoann Padioleau, Julia Lawall, Rene Rydhof Hansen, Henrik Stuart, Gilles Muller, Nicolas Palix |
| 4 | * This file is part of Coccinelle. |
| 5 | * |
| 6 | * Coccinelle is free software: you can redistribute it and/or modify |
| 7 | * it under the terms of the GNU General Public License as published by |
| 8 | * the Free Software Foundation, according to version 2 of the License. |
| 9 | * |
| 10 | * Coccinelle is distributed in the hope that it will be useful, |
| 11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
| 12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| 13 | * GNU General Public License for more details. |
| 14 | * |
| 15 | * You should have received a copy of the GNU General Public License |
| 16 | * along with Coccinelle. If not, see <http://www.gnu.org/licenses/>. |
| 17 | * |
| 18 | * The authors reserve the right to distribute this or future versions of |
| 19 | * Coccinelle under other licenses. |
| 20 | *) |
| 21 | |
| 22 | |
| 23 | { |
| 24 | open Parser_cocci_menhir |
| 25 | module D = Data |
| 26 | module Ast = Ast_cocci |
| 27 | module Ast0 = Ast0_cocci |
| 28 | module P = Parse_aux |
| 29 | exception Lexical of string |
| 30 | let tok = Lexing.lexeme |
| 31 | |
| 32 | let line = ref 1 |
| 33 | let logical_line = ref 0 |
| 34 | |
| 35 | (* ---------------------------------------------------------------------- *) |
| 36 | (* control codes *) |
| 37 | |
| 38 | (* Defined in data.ml |
| 39 | type line_type = MINUS | OPTMINUS | UNIQUEMINUS | PLUS | CONTEXT | UNIQUE | OPT |
| 40 | *) |
| 41 | |
| 42 | let current_line_type = ref (D.CONTEXT,!line,!logical_line) |
| 43 | |
| 44 | let prev_plus = ref false |
| 45 | let line_start = ref 0 (* offset of the beginning of the line *) |
| 46 | let get_current_line_type lexbuf = |
| 47 | let (c,l,ll) = !current_line_type in |
| 48 | let lex_start = Lexing.lexeme_start lexbuf in |
| 49 | let preceeding_spaces = |
| 50 | if !line_start < 0 then 0 else lex_start - !line_start in |
| 51 | (*line_start := -1;*) |
| 52 | prev_plus := (c = D.PLUS) or (c = D.PLUSPLUS); |
| 53 | (c,l,ll,lex_start,preceeding_spaces,[],[],Ast0.NoMetaPos) |
| 54 | let current_line_started = ref false |
| 55 | let col_zero = ref true |
| 56 | |
| 57 | let reset_line lexbuf = |
| 58 | line := !line + 1; |
| 59 | current_line_type := (D.CONTEXT,!line,!logical_line); |
| 60 | current_line_started := false; |
| 61 | col_zero := true; |
| 62 | line_start := Lexing.lexeme_start lexbuf + 1 |
| 63 | |
| 64 | let started_line = ref (-1) |
| 65 | |
| 66 | let start_line seen_char = |
| 67 | current_line_started := true; |
| 68 | col_zero := false; |
| 69 | (if seen_char && not(!line = !started_line) |
| 70 | then |
| 71 | begin |
| 72 | started_line := !line; |
| 73 | logical_line := !logical_line + 1 |
| 74 | end) |
| 75 | |
| 76 | let pass_zero _ = col_zero := false |
| 77 | |
| 78 | let lexerr s1 s2 = raise (Lexical (Printf.sprintf "%s%s" s1 s2)) |
| 79 | |
| 80 | let add_current_line_type x = |
| 81 | match (x,!current_line_type) with |
| 82 | (D.MINUS,(D.CONTEXT,ln,lln)) -> |
| 83 | current_line_type := (D.MINUS,ln,lln) |
| 84 | | (D.MINUS,(D.UNIQUE,ln,lln)) -> |
| 85 | current_line_type := (D.UNIQUEMINUS,ln,lln) |
| 86 | | (D.MINUS,(D.OPT,ln,lln)) -> |
| 87 | current_line_type := (D.OPTMINUS,ln,lln) |
| 88 | | (D.PLUS,(D.CONTEXT,ln,lln)) -> |
| 89 | current_line_type := (D.PLUS,ln,lln) |
| 90 | | (D.PLUSPLUS,(D.CONTEXT,ln,lln)) -> |
| 91 | current_line_type := (D.PLUSPLUS,ln,lln) |
| 92 | | (D.UNIQUE,(D.CONTEXT,ln,lln)) -> |
| 93 | current_line_type := (D.UNIQUE,ln,lln) |
| 94 | | (D.OPT,(D.CONTEXT,ln,lln)) -> |
| 95 | current_line_type := (D.OPT,ln,lln) |
| 96 | | _ -> lexerr "invalid control character combination" "" |
| 97 | |
| 98 | let check_minus_context_linetype s = |
| 99 | match !current_line_type with |
| 100 | (D.PLUS,_,_) | (D.PLUSPLUS,_,_) -> lexerr "invalid in a + context: " s |
| 101 | | _ -> () |
| 102 | |
| 103 | let check_context_linetype s = |
| 104 | match !current_line_type with |
| 105 | (D.CONTEXT,_,_) -> () |
| 106 | | _ -> lexerr "invalid in a nonempty context: " s |
| 107 | |
| 108 | let check_plus_linetype s = |
| 109 | match !current_line_type with |
| 110 | (D.PLUS,_,_) | (D.PLUSPLUS,_,_) -> () |
| 111 | | _ -> lexerr "invalid in a non + context: " s |
| 112 | |
| 113 | let check_arity_context_linetype s = |
| 114 | match !current_line_type with |
| 115 | (D.CONTEXT,_,_) | (D.PLUS,_,_) | (D.PLUSPLUS,_,_) |
| 116 | | (D.UNIQUE,_,_) | (D.OPT,_,_) -> () |
| 117 | | _ -> lexerr "invalid in a nonempty context: " s |
| 118 | |
| 119 | let process_include start finish str = |
| 120 | (match !current_line_type with |
| 121 | (D.PLUS,_,_) | (D.PLUSPLUS,_,_) -> |
| 122 | (try |
| 123 | let _ = Str.search_forward (Str.regexp "\\.\\.\\.") str start in |
| 124 | lexerr "... not allowed in + include" "" |
| 125 | with Not_found -> ()) |
| 126 | | _ -> ()); |
| 127 | String.sub str (start + 1) (finish - start - 1) |
| 128 | |
| 129 | (* ---------------------------------------------------------------------- *) |
| 130 | type pm = PATCH | MATCH | UNKNOWN |
| 131 | |
| 132 | let pm = ref UNKNOWN |
| 133 | |
| 134 | let patch_or_match = function |
| 135 | PATCH -> |
| 136 | if not !D.ignore_patch_or_match |
| 137 | then |
| 138 | (match !pm with |
| 139 | MATCH -> |
| 140 | lexerr "- or + not allowed in the first column for a match" "" |
| 141 | | PATCH -> () |
| 142 | | UNKNOWN -> Flag.sgrep_mode2 := false; pm := PATCH) |
| 143 | | MATCH -> |
| 144 | if not !D.ignore_patch_or_match |
| 145 | then |
| 146 | (match !pm with |
| 147 | PATCH -> lexerr "* not allowed in the first column for a patch" "" |
| 148 | | MATCH -> () |
| 149 | | UNKNOWN -> Flag.sgrep_mode2 := true; pm := MATCH) |
| 150 | | _ -> failwith "unexpected argument" |
| 151 | |
| 152 | (* ---------------------------------------------------------------------- *) |
| 153 | (* identifiers, including metavariables *) |
| 154 | |
| 155 | let metavariables = (Hashtbl.create(100) : (string, D.clt -> token) Hashtbl.t) |
| 156 | |
| 157 | let all_metavariables = |
| 158 | (Hashtbl.create(100) : (string,(string * (D.clt -> token)) list) Hashtbl.t) |
| 159 | |
| 160 | let type_names = (Hashtbl.create(100) : (string, D.clt -> token) Hashtbl.t) |
| 161 | |
| 162 | let declarer_names = (Hashtbl.create(100) : (string, D.clt -> token) Hashtbl.t) |
| 163 | |
| 164 | let iterator_names = (Hashtbl.create(100) : (string, D.clt -> token) Hashtbl.t) |
| 165 | |
| 166 | let rule_names = (Hashtbl.create(100) : (string, unit) Hashtbl.t) |
| 167 | |
| 168 | let check_var s linetype = |
| 169 | let fail _ = |
| 170 | if (!Data.in_prolog || !Data.in_rule_name) && |
| 171 | Str.string_match (Str.regexp "<.*>") s 0 |
| 172 | then TPathIsoFile s |
| 173 | else |
| 174 | try (Hashtbl.find metavariables s) linetype |
| 175 | with Not_found -> |
| 176 | (try (Hashtbl.find type_names s) linetype |
| 177 | with Not_found -> |
| 178 | (try (Hashtbl.find declarer_names s) linetype |
| 179 | with Not_found -> |
| 180 | (try (Hashtbl.find iterator_names s) linetype |
| 181 | with Not_found -> TIdent (s,linetype)))) in |
| 182 | if !Data.in_meta or !Data.in_rule_name |
| 183 | then (try Hashtbl.find rule_names s; TRuleName s with Not_found -> fail()) |
| 184 | else fail() |
| 185 | |
| 186 | let id_tokens lexbuf = |
| 187 | let s = tok lexbuf in |
| 188 | let linetype = get_current_line_type lexbuf in |
| 189 | let in_rule_name = !Data.in_rule_name in |
| 190 | let in_meta = !Data.in_meta && not !Data.saw_struct in |
| 191 | let in_iso = !Data.in_iso in |
| 192 | let in_prolog = !Data.in_prolog in |
| 193 | match s with |
| 194 | "identifier" when in_meta -> check_arity_context_linetype s; TIdentifier |
| 195 | | "type" when in_meta -> check_arity_context_linetype s; TType |
| 196 | | "parameter" when in_meta -> check_arity_context_linetype s; TParameter |
| 197 | | "constant" when in_meta -> check_arity_context_linetype s; TConstant |
| 198 | | "generated" when in_rule_name && not (!Flag.make_hrule = None) -> |
| 199 | check_arity_context_linetype s; TGenerated |
| 200 | | "expression" when in_meta || in_rule_name -> |
| 201 | check_arity_context_linetype s; TExpression |
| 202 | | "initialiser" when in_meta || in_rule_name -> |
| 203 | check_arity_context_linetype s; TInitialiser |
| 204 | | "initializer" when in_meta || in_rule_name -> |
| 205 | check_arity_context_linetype s; TInitialiser |
| 206 | | "idexpression" when in_meta -> |
| 207 | check_arity_context_linetype s; TIdExpression |
| 208 | | "statement" when in_meta -> check_arity_context_linetype s; TStatement |
| 209 | | "function" when in_meta -> check_arity_context_linetype s; TFunction |
| 210 | | "local" when in_meta -> check_arity_context_linetype s; TLocal |
| 211 | | "list" when in_meta -> check_arity_context_linetype s; Tlist |
| 212 | | "fresh" when in_meta -> check_arity_context_linetype s; TFresh |
| 213 | | "typedef" when in_meta -> check_arity_context_linetype s; TTypedef |
| 214 | | "declarer" when in_meta -> check_arity_context_linetype s; TDeclarer |
| 215 | | "iterator" when in_meta -> check_arity_context_linetype s; TIterator |
| 216 | | "name" when in_meta -> check_arity_context_linetype s; TName |
| 217 | | "position" when in_meta -> check_arity_context_linetype s; TPosition |
| 218 | | "any" when in_meta -> check_arity_context_linetype s; TPosAny |
| 219 | | "pure" when in_meta && in_iso -> |
| 220 | check_arity_context_linetype s; TPure |
| 221 | | "context" when in_meta && in_iso -> |
| 222 | check_arity_context_linetype s; TContext |
| 223 | | "error" when in_meta -> check_arity_context_linetype s; TError |
| 224 | | "words" when in_meta -> check_context_linetype s; TWords |
| 225 | |
| 226 | | "using" when in_rule_name || in_prolog -> check_context_linetype s; TUsing |
| 227 | | "virtual" when in_prolog -> check_context_linetype s; TVirtual |
| 228 | | "disable" when in_rule_name -> check_context_linetype s; TDisable |
| 229 | | "extends" when in_rule_name -> check_context_linetype s; TExtends |
| 230 | | "depends" when in_rule_name -> check_context_linetype s; TDepends |
| 231 | | "on" when in_rule_name -> check_context_linetype s; TOn |
| 232 | | "ever" when in_rule_name -> check_context_linetype s; TEver |
| 233 | | "never" when in_rule_name -> check_context_linetype s; TNever |
| 234 | (* exists and forall for when are reparsed in parse_cocci.ml *) |
| 235 | | "exists" when in_rule_name -> check_context_linetype s; TExists |
| 236 | | "forall" when in_rule_name -> check_context_linetype s; TForall |
| 237 | | "script" when in_rule_name -> check_context_linetype s; TScript |
| 238 | | "initialize" when in_rule_name -> check_context_linetype s; TInitialize |
| 239 | | "finalize" when in_rule_name -> check_context_linetype s; TFinalize |
| 240 | |
| 241 | | "char" -> Tchar linetype |
| 242 | | "short" -> Tshort linetype |
| 243 | | "int" -> Tint linetype |
| 244 | | "double" -> Tdouble linetype |
| 245 | | "float" -> Tfloat linetype |
| 246 | | "long" -> Tlong linetype |
| 247 | | "void" -> Tvoid linetype |
| 248 | (* in_meta is only for the first keyword; drop it now to allow any type |
| 249 | name *) |
| 250 | | "struct" -> Data.saw_struct := true; Tstruct linetype |
| 251 | | "union" -> Data.saw_struct := true; Tunion linetype |
| 252 | | "enum" -> Data.saw_struct := true; Tenum linetype |
| 253 | | "unsigned" -> Tunsigned linetype |
| 254 | | "signed" -> Tsigned linetype |
| 255 | |
| 256 | | "auto" -> Tauto linetype |
| 257 | | "register" -> Tregister linetype |
| 258 | | "extern" -> Textern linetype |
| 259 | | "static" -> Tstatic linetype |
| 260 | | "inline" -> Tinline linetype |
| 261 | | "typedef" -> Ttypedef linetype |
| 262 | |
| 263 | | "const" -> Tconst linetype |
| 264 | | "volatile" -> Tvolatile linetype |
| 265 | |
| 266 | | "if" -> TIf linetype |
| 267 | | "else" -> TElse linetype |
| 268 | | "while" -> TWhile linetype |
| 269 | | "do" -> TDo linetype |
| 270 | | "for" -> TFor linetype |
| 271 | | "switch" -> TSwitch linetype |
| 272 | | "case" -> TCase linetype |
| 273 | | "default" -> TDefault linetype |
| 274 | | "return" -> TReturn linetype |
| 275 | | "break" -> TBreak linetype |
| 276 | | "continue" -> TContinue linetype |
| 277 | | "goto" -> TGoto linetype |
| 278 | |
| 279 | | "sizeof" -> TSizeof linetype |
| 280 | |
| 281 | | "Expression" -> TIsoExpression |
| 282 | | "ArgExpression" -> TIsoArgExpression |
| 283 | | "TestExpression" -> TIsoTestExpression |
| 284 | | "Statement" -> TIsoStatement |
| 285 | | "Declaration" -> TIsoDeclaration |
| 286 | | "Type" -> TIsoType |
| 287 | | "TopLevel" -> TIsoTopLevel |
| 288 | |
| 289 | | s -> check_var s linetype |
| 290 | |
| 291 | let mkassign op lexbuf = |
| 292 | TAssign (Ast.OpAssign op, (get_current_line_type lexbuf)) |
| 293 | |
| 294 | let init _ = |
| 295 | line := 1; |
| 296 | logical_line := 0; |
| 297 | prev_plus := false; |
| 298 | line_start := 0; |
| 299 | current_line_started := false; |
| 300 | col_zero := true; |
| 301 | pm := UNKNOWN; |
| 302 | Data.in_rule_name := false; |
| 303 | Data.in_meta := false; |
| 304 | Data.in_prolog := false; |
| 305 | Data.saw_struct := false; |
| 306 | Data.inheritable_positions := []; |
| 307 | Hashtbl.clear all_metavariables; |
| 308 | Hashtbl.clear Data.all_metadecls; |
| 309 | Hashtbl.clear metavariables; |
| 310 | Hashtbl.clear type_names; |
| 311 | Hashtbl.clear rule_names; |
| 312 | Hashtbl.clear iterator_names; |
| 313 | Hashtbl.clear declarer_names; |
| 314 | let get_name (_,x) = x in |
| 315 | Data.add_id_meta := |
| 316 | (fun name constraints pure -> |
| 317 | let fn clt = TMetaId(name,constraints,pure,clt) in |
| 318 | Hashtbl.replace metavariables (get_name name) fn); |
| 319 | Data.add_fresh_id_meta := |
| 320 | (fun name -> |
| 321 | let fn clt = TMetaId(name,Ast.IdNoConstraint,Ast0.Impure,clt) in |
| 322 | Hashtbl.replace metavariables (get_name name) fn); |
| 323 | Data.add_type_meta := |
| 324 | (fun name pure -> |
| 325 | let fn clt = TMetaType(name,pure,clt) in |
| 326 | Hashtbl.replace metavariables (get_name name) fn); |
| 327 | Data.add_init_meta := |
| 328 | (fun name pure -> |
| 329 | let fn clt = TMetaInit(name,pure,clt) in |
| 330 | Hashtbl.replace metavariables (get_name name) fn); |
| 331 | Data.add_param_meta := |
| 332 | (function name -> function pure -> |
| 333 | let fn clt = TMetaParam(name,pure,clt) in |
| 334 | Hashtbl.replace metavariables (get_name name) fn); |
| 335 | Data.add_paramlist_meta := |
| 336 | (function name -> function lenname -> function pure -> |
| 337 | let fn clt = TMetaParamList(name,lenname,pure,clt) in |
| 338 | Hashtbl.replace metavariables (get_name name) fn); |
| 339 | Data.add_const_meta := |
| 340 | (fun tyopt name constraints pure -> |
| 341 | let fn clt = TMetaConst(name,constraints,pure,tyopt,clt) in |
| 342 | Hashtbl.replace metavariables (get_name name) fn); |
| 343 | Data.add_err_meta := |
| 344 | (fun name constraints pure -> |
| 345 | let fn clt = TMetaErr(name,constraints,pure,clt) in |
| 346 | Hashtbl.replace metavariables (get_name name) fn); |
| 347 | Data.add_exp_meta := |
| 348 | (fun tyopt name constraints pure -> |
| 349 | let fn clt = TMetaExp(name,constraints,pure,tyopt,clt) in |
| 350 | Hashtbl.replace metavariables (get_name name) fn); |
| 351 | Data.add_idexp_meta := |
| 352 | (fun tyopt name constraints pure -> |
| 353 | let fn clt = TMetaIdExp(name,constraints,pure,tyopt,clt) in |
| 354 | Hashtbl.replace metavariables (get_name name) fn); |
| 355 | Data.add_local_idexp_meta := |
| 356 | (fun tyopt name constraints pure -> |
| 357 | let fn clt = TMetaLocalIdExp(name,constraints,pure,tyopt,clt) in |
| 358 | Hashtbl.replace metavariables (get_name name) fn); |
| 359 | Data.add_explist_meta := |
| 360 | (function name -> function lenname -> function pure -> |
| 361 | let fn clt = TMetaExpList(name,lenname,pure,clt) in |
| 362 | Hashtbl.replace metavariables (get_name name) fn); |
| 363 | Data.add_stm_meta := |
| 364 | (function name -> function pure -> |
| 365 | let fn clt = TMetaStm(name,pure,clt) in |
| 366 | Hashtbl.replace metavariables (get_name name) fn); |
| 367 | Data.add_stmlist_meta := |
| 368 | (function name -> function pure -> |
| 369 | let fn clt = TMetaStmList(name,pure,clt) in |
| 370 | Hashtbl.replace metavariables (get_name name) fn); |
| 371 | Data.add_func_meta := |
| 372 | (fun name constraints pure -> |
| 373 | let fn clt = TMetaFunc(name,constraints,pure,clt) in |
| 374 | Hashtbl.replace metavariables (get_name name) fn); |
| 375 | Data.add_local_func_meta := |
| 376 | (fun name constraints pure -> |
| 377 | let fn clt = TMetaLocalFunc(name,constraints,pure,clt) in |
| 378 | Hashtbl.replace metavariables (get_name name) fn); |
| 379 | Data.add_iterator_meta := |
| 380 | (fun name constraints pure -> |
| 381 | let fn clt = TMetaIterator(name,constraints,pure,clt) in |
| 382 | Hashtbl.replace metavariables (get_name name) fn); |
| 383 | Data.add_declarer_meta := |
| 384 | (fun name constraints pure -> |
| 385 | let fn clt = TMetaDeclarer(name,constraints,pure,clt) in |
| 386 | Hashtbl.replace metavariables (get_name name) fn); |
| 387 | Data.add_pos_meta := |
| 388 | (fun name constraints any -> |
| 389 | let fn ((d,ln,_,_,_,_,_,_) as clt) = |
| 390 | (if d = Data.PLUS |
| 391 | then |
| 392 | failwith |
| 393 | (Printf.sprintf "%d: positions only allowed in minus code" ln)); |
| 394 | TMetaPos(name,constraints,any,clt) in |
| 395 | Hashtbl.replace metavariables (get_name name) fn); |
| 396 | Data.add_type_name := |
| 397 | (function name -> |
| 398 | let fn clt = TTypeId(name,clt) in |
| 399 | Hashtbl.replace type_names name fn); |
| 400 | Data.add_declarer_name := |
| 401 | (function name -> |
| 402 | let fn clt = TDeclarerId(name,clt) in |
| 403 | Hashtbl.replace declarer_names name fn); |
| 404 | Data.add_iterator_name := |
| 405 | (function name -> |
| 406 | let fn clt = TIteratorId(name,clt) in |
| 407 | Hashtbl.replace iterator_names name fn); |
| 408 | Data.init_rule := (function _ -> Hashtbl.clear metavariables); |
| 409 | Data.install_bindings := |
| 410 | (function parent -> |
| 411 | List.iter (function (name,fn) -> Hashtbl.add metavariables name fn) |
| 412 | (Hashtbl.find all_metavariables parent)) |
| 413 | |
| 414 | let drop_spaces s = |
| 415 | let len = String.length s in |
| 416 | let rec loop n = |
| 417 | if n = len |
| 418 | then n |
| 419 | else |
| 420 | if List.mem (String.get s n) [' ';'\t'] |
| 421 | then loop (n+1) |
| 422 | else n in |
| 423 | let start = loop 0 in |
| 424 | String.sub s start (len - start) |
| 425 | } |
| 426 | |
| 427 | (* ---------------------------------------------------------------------- *) |
| 428 | (* tokens *) |
| 429 | |
| 430 | let letter = ['A'-'Z' 'a'-'z' '_'] |
| 431 | let digit = ['0'-'9'] |
| 432 | |
| 433 | let dec = ['0'-'9'] |
| 434 | let oct = ['0'-'7'] |
| 435 | let hex = ['0'-'9' 'a'-'f' 'A'-'F'] |
| 436 | |
| 437 | let decimal = ('0' | (['1'-'9'] dec*)) |
| 438 | let octal = ['0'] oct+ |
| 439 | let hexa = ("0x" |"0X") hex+ |
| 440 | |
| 441 | let pent = dec+ |
| 442 | let pfract = dec+ |
| 443 | let sign = ['-' '+'] |
| 444 | let exp = ['e''E'] sign? dec+ |
| 445 | let real = pent exp | ((pent? '.' pfract | pent '.' pfract? ) exp?) |
| 446 | |
| 447 | |
| 448 | rule token = parse |
| 449 | | [' ' '\t' ]+ { start_line false; token lexbuf } |
| 450 | | ['\n' '\r' '\011' '\012'] { reset_line lexbuf; token lexbuf } |
| 451 | |
| 452 | | "//" [^ '\n']* { start_line false; token lexbuf } |
| 453 | |
| 454 | | "@@" { start_line true; TArobArob } |
| 455 | | "@" { pass_zero(); |
| 456 | if !Data.in_rule_name or not !current_line_started |
| 457 | then (start_line true; TArob) |
| 458 | else (check_minus_context_linetype "@"; TPArob) } |
| 459 | |
| 460 | | "~=" { start_line true; TTildeEq (get_current_line_type lexbuf) } |
| 461 | | "!~=" { start_line true; TTildeExclEq (get_current_line_type lexbuf) } |
| 462 | | "WHEN" | "when" |
| 463 | { start_line true; check_minus_context_linetype (tok lexbuf); |
| 464 | TWhen (get_current_line_type lexbuf) } |
| 465 | |
| 466 | | "..." |
| 467 | { start_line true; check_minus_context_linetype (tok lexbuf); |
| 468 | TEllipsis (get_current_line_type lexbuf) } |
| 469 | (* |
| 470 | | "ooo" |
| 471 | { start_line true; check_minus_context_linetype (tok lexbuf); |
| 472 | TCircles (get_current_line_type lexbuf) } |
| 473 | |
| 474 | | "***" |
| 475 | { start_line true; check_minus_context_linetype (tok lexbuf); |
| 476 | TStars (get_current_line_type lexbuf) } |
| 477 | *) |
| 478 | | "<..." { start_line true; check_context_linetype (tok lexbuf); |
| 479 | TOEllipsis (get_current_line_type lexbuf) } |
| 480 | | "...>" { start_line true; check_context_linetype (tok lexbuf); |
| 481 | TCEllipsis (get_current_line_type lexbuf) } |
| 482 | | "<+..." { start_line true; check_context_linetype (tok lexbuf); |
| 483 | TPOEllipsis (get_current_line_type lexbuf) } |
| 484 | | "...+>" { start_line true; check_context_linetype (tok lexbuf); |
| 485 | TPCEllipsis (get_current_line_type lexbuf) } |
| 486 | (* |
| 487 | | "<ooo" { start_line true; check_context_linetype (tok lexbuf); |
| 488 | TOCircles (get_current_line_type lexbuf) } |
| 489 | | "ooo>" { start_line true; check_context_linetype (tok lexbuf); |
| 490 | TCCircles (get_current_line_type lexbuf) } |
| 491 | |
| 492 | | "<***" { start_line true; check_context_linetype (tok lexbuf); |
| 493 | TOStars (get_current_line_type lexbuf) } |
| 494 | | "***>" { start_line true; check_context_linetype (tok lexbuf); |
| 495 | TCStars (get_current_line_type lexbuf) } |
| 496 | *) |
| 497 | | "-" { pass_zero(); |
| 498 | if !current_line_started |
| 499 | then (start_line true; TMinus (get_current_line_type lexbuf)) |
| 500 | else (patch_or_match PATCH; |
| 501 | add_current_line_type D.MINUS; token lexbuf) } |
| 502 | | "+" { pass_zero(); |
| 503 | if !current_line_started |
| 504 | then (start_line true; TPlus (get_current_line_type lexbuf)) |
| 505 | else if !Data.in_meta |
| 506 | then TPlus0 |
| 507 | else (patch_or_match PATCH; |
| 508 | add_current_line_type D.PLUS; token lexbuf) } |
| 509 | | "?" { pass_zero(); |
| 510 | if !current_line_started |
| 511 | then (start_line true; TWhy (get_current_line_type lexbuf)) |
| 512 | else if !Data.in_meta |
| 513 | then TWhy0 |
| 514 | else (add_current_line_type D.OPT; token lexbuf) } |
| 515 | | "!" { pass_zero(); |
| 516 | if !current_line_started |
| 517 | then (start_line true; TBang (get_current_line_type lexbuf)) |
| 518 | else if !Data.in_meta |
| 519 | then TBang0 |
| 520 | else (add_current_line_type D.UNIQUE; token lexbuf) } |
| 521 | | "(" { if not !col_zero |
| 522 | then (start_line true; TOPar (get_current_line_type lexbuf)) |
| 523 | else |
| 524 | (start_line true; check_context_linetype (tok lexbuf); |
| 525 | TOPar0 (get_current_line_type lexbuf))} |
| 526 | | "\\(" { start_line true; TOPar0 (get_current_line_type lexbuf) } |
| 527 | | "|" { if not (!col_zero) |
| 528 | then (start_line true; TOr(get_current_line_type lexbuf)) |
| 529 | else (start_line true; |
| 530 | check_context_linetype (tok lexbuf); |
| 531 | TMid0 (get_current_line_type lexbuf))} |
| 532 | | "\\|" { start_line true; TMid0 (get_current_line_type lexbuf) } |
| 533 | | ")" { if not !col_zero |
| 534 | then (start_line true; TCPar (get_current_line_type lexbuf)) |
| 535 | else |
| 536 | (start_line true; check_context_linetype (tok lexbuf); |
| 537 | TCPar0 (get_current_line_type lexbuf))} |
| 538 | | "\\)" { start_line true; TCPar0 (get_current_line_type lexbuf) } |
| 539 | |
| 540 | | '[' { start_line true; TOCro (get_current_line_type lexbuf) } |
| 541 | | ']' { start_line true; TCCro (get_current_line_type lexbuf) } |
| 542 | | '{' { start_line true; TOBrace (get_current_line_type lexbuf) } |
| 543 | | '}' { start_line true; TCBrace (get_current_line_type lexbuf) } |
| 544 | |
| 545 | | "->" { start_line true; TPtrOp (get_current_line_type lexbuf) } |
| 546 | | '.' { start_line true; TDot (get_current_line_type lexbuf) } |
| 547 | | ',' { start_line true; TComma (get_current_line_type lexbuf) } |
| 548 | | ";" { start_line true; |
| 549 | if !Data.in_meta |
| 550 | then TMPtVirg (* works better with tokens_all *) |
| 551 | else TPtVirg (get_current_line_type lexbuf) } |
| 552 | |
| 553 | |
| 554 | | '*' { pass_zero(); |
| 555 | if !current_line_started |
| 556 | then |
| 557 | (start_line true; TMul (get_current_line_type lexbuf)) |
| 558 | else |
| 559 | (patch_or_match MATCH; |
| 560 | add_current_line_type D.MINUS; token lexbuf) } |
| 561 | | '/' { start_line true; |
| 562 | TDmOp (Ast.Div,get_current_line_type lexbuf) } |
| 563 | | '%' { start_line true; |
| 564 | TDmOp (Ast.Mod,get_current_line_type lexbuf) } |
| 565 | | '~' { start_line true; TTilde (get_current_line_type lexbuf) } |
| 566 | |
| 567 | | "++" { pass_zero(); |
| 568 | if !current_line_started |
| 569 | then |
| 570 | (start_line true; TInc (get_current_line_type lexbuf)) |
| 571 | else (patch_or_match PATCH; |
| 572 | add_current_line_type D.PLUSPLUS; token lexbuf) } |
| 573 | | "--" { start_line true; TDec (get_current_line_type lexbuf) } |
| 574 | |
| 575 | | "=" { start_line true; TEq (get_current_line_type lexbuf) } |
| 576 | |
| 577 | | "-=" { start_line true; mkassign Ast.Minus lexbuf } |
| 578 | | "+=" { start_line true; mkassign Ast.Plus lexbuf } |
| 579 | |
| 580 | | "*=" { start_line true; mkassign Ast.Mul lexbuf } |
| 581 | | "/=" { start_line true; mkassign Ast.Div lexbuf } |
| 582 | | "%=" { start_line true; mkassign Ast.Mod lexbuf } |
| 583 | |
| 584 | | "&=" { start_line true; mkassign Ast.And lexbuf } |
| 585 | | "|=" { start_line true; mkassign Ast.Or lexbuf } |
| 586 | | "^=" { start_line true; mkassign Ast.Xor lexbuf } |
| 587 | |
| 588 | | "<<=" { start_line true; mkassign Ast.DecLeft lexbuf } |
| 589 | | ">>=" { start_line true; mkassign Ast.DecRight lexbuf } |
| 590 | |
| 591 | | ":" { start_line true; TDotDot (get_current_line_type lexbuf) } |
| 592 | |
| 593 | | "==" { start_line true; TEqEq (get_current_line_type lexbuf) } |
| 594 | | "!=" { start_line true; TNotEq (get_current_line_type lexbuf) } |
| 595 | | ">=" { start_line true; |
| 596 | TLogOp(Ast.SupEq,get_current_line_type lexbuf) } |
| 597 | | "<=" { start_line true; |
| 598 | TLogOp(Ast.InfEq,get_current_line_type lexbuf) } |
| 599 | | "<" { start_line true; |
| 600 | TLogOp(Ast.Inf,get_current_line_type lexbuf) } |
| 601 | | ">" { start_line true; |
| 602 | TLogOp(Ast.Sup,get_current_line_type lexbuf) } |
| 603 | |
| 604 | | "&&" { start_line true; TAndLog (get_current_line_type lexbuf) } |
| 605 | | "||" { start_line true; TOrLog (get_current_line_type lexbuf) } |
| 606 | |
| 607 | | ">>" { start_line true; |
| 608 | TShOp(Ast.DecRight,get_current_line_type lexbuf) } |
| 609 | | "<<" { start_line true; |
| 610 | TShOp(Ast.DecLeft,get_current_line_type lexbuf) } |
| 611 | |
| 612 | | "&" { start_line true; TAnd (get_current_line_type lexbuf) } |
| 613 | | "^" { start_line true; TXor(get_current_line_type lexbuf) } |
| 614 | |
| 615 | | "##" { start_line true; TCppConcatOp } |
| 616 | | (( ("#" [' ' '\t']* "define" [' ' '\t']+)) as def) |
| 617 | ( (letter (letter |digit)*) as ident) |
| 618 | { start_line true; |
| 619 | let (arity,line,lline,offset,col,strbef,straft,pos) as lt = |
| 620 | get_current_line_type lexbuf in |
| 621 | let off = String.length def in |
| 622 | (* -1 in the code below because the ident is not at the line start *) |
| 623 | TDefine |
| 624 | (lt, |
| 625 | check_var ident |
| 626 | (arity,line,lline,offset+off,col+off,[],[],Ast0.NoMetaPos)) } |
| 627 | | (( ("#" [' ' '\t']* "define" [' ' '\t']+)) as def) |
| 628 | ( (letter (letter | digit)*) as ident) |
| 629 | '(' |
| 630 | { start_line true; |
| 631 | let (arity,line,lline,offset,col,strbef,straft,pos) as lt = |
| 632 | get_current_line_type lexbuf in |
| 633 | let off = String.length def in |
| 634 | TDefineParam |
| 635 | (lt, |
| 636 | check_var ident |
| 637 | (* why pos here but not above? *) |
| 638 | (arity,line,lline,offset+off,col+off,strbef,straft,pos), |
| 639 | offset + off + (String.length ident), |
| 640 | col + off + (String.length ident)) } |
| 641 | | "#" [' ' '\t']* "include" [' ' '\t']* '"' [^ '"']+ '"' |
| 642 | { TIncludeL |
| 643 | (let str = tok lexbuf in |
| 644 | let start = String.index str '"' in |
| 645 | let finish = String.rindex str '"' in |
| 646 | start_line true; |
| 647 | (process_include start finish str,get_current_line_type lexbuf)) } |
| 648 | | "#" [' ' '\t']* "include" [' ' '\t']* '<' [^ '>']+ '>' |
| 649 | { TIncludeNL |
| 650 | (let str = tok lexbuf in |
| 651 | let start = String.index str '<' in |
| 652 | let finish = String.rindex str '>' in |
| 653 | start_line true; |
| 654 | (process_include start finish str,get_current_line_type lexbuf)) } |
| 655 | | "#" [' ' '\t']* "if" [^'\n']* |
| 656 | | "#" [' ' '\t']* "ifdef" [^'\n']* |
| 657 | | "#" [' ' '\t']* "ifndef" [^'\n']* |
| 658 | | "#" [' ' '\t']* "else" [^'\n']* |
| 659 | | "#" [' ' '\t']* "elif" [^'\n']* |
| 660 | | "#" [' ' '\t']* "endif" [^'\n']* |
| 661 | | "#" [' ' '\t']* "error" [^'\n']* |
| 662 | { start_line true; check_plus_linetype (tok lexbuf); |
| 663 | TPragma (tok lexbuf, get_current_line_type lexbuf) } |
| 664 | | "/*" |
| 665 | { start_line true; check_plus_linetype (tok lexbuf); |
| 666 | (* second argument to TPragma is not quite right, because |
| 667 | it represents only the first token of the comemnt, but that |
| 668 | should be good enough *) |
| 669 | TPragma ("/*"^(comment lexbuf), get_current_line_type lexbuf) } |
| 670 | | "---" [^'\n']* |
| 671 | { (if !current_line_started |
| 672 | then lexerr "--- must be at the beginning of the line" ""); |
| 673 | start_line true; |
| 674 | TMinusFile |
| 675 | (let str = tok lexbuf in |
| 676 | (drop_spaces(String.sub str 3 (String.length str - 3)), |
| 677 | (get_current_line_type lexbuf))) } |
| 678 | | "+++" [^'\n']* |
| 679 | { (if !current_line_started |
| 680 | then lexerr "+++ must be at the beginning of the line" ""); |
| 681 | start_line true; |
| 682 | TPlusFile |
| 683 | (let str = tok lexbuf in |
| 684 | (drop_spaces(String.sub str 3 (String.length str - 3)), |
| 685 | (get_current_line_type lexbuf))) } |
| 686 | |
| 687 | | letter (letter | digit)* |
| 688 | { start_line true; id_tokens lexbuf } |
| 689 | |
| 690 | | "'" { start_line true; |
| 691 | TChar(char lexbuf,get_current_line_type lexbuf) } |
| 692 | | '"' { start_line true; |
| 693 | TString(string lexbuf,(get_current_line_type lexbuf)) } |
| 694 | | (real as x) { start_line true; |
| 695 | TFloat(x,(get_current_line_type lexbuf)) } |
| 696 | | ((( decimal | hexa | octal) |
| 697 | ( ['u' 'U'] |
| 698 | | ['l' 'L'] |
| 699 | | (['l' 'L'] ['u' 'U']) |
| 700 | | (['u' 'U'] ['l' 'L']) |
| 701 | | (['u' 'U'] ['l' 'L'] ['l' 'L']) |
| 702 | | (['l' 'L'] ['l' 'L']) |
| 703 | )? |
| 704 | ) as x) { start_line true; TInt(x,(get_current_line_type lexbuf)) } |
| 705 | |
| 706 | | "<=>" { TIso } |
| 707 | | "=>" { TRightIso } |
| 708 | |
| 709 | | eof { EOF } |
| 710 | |
| 711 | | _ { lexerr "unrecognised symbol, in token rule: " (tok lexbuf) } |
| 712 | |
| 713 | |
| 714 | and char = parse |
| 715 | | (_ as x) "'" { String.make 1 x } |
| 716 | | (("\\" (oct | oct oct | oct oct oct)) as x "'") { x } |
| 717 | | (("\\x" (hex | hex hex)) as x "'") { x } |
| 718 | | (("\\" (_ as v)) as x "'") |
| 719 | { (match v with |
| 720 | | 'n' -> () | 't' -> () | 'v' -> () | 'b' -> () |
| 721 | | 'r' -> () | 'f' -> () | 'a' -> () |
| 722 | | '\\' -> () | '?' -> () | '\'' -> () | '"' -> () |
| 723 | | 'e' -> () |
| 724 | | _ -> lexerr "unrecognised symbol: " (tok lexbuf) |
| 725 | ); |
| 726 | x |
| 727 | } |
| 728 | | _ { lexerr "unrecognised symbol: " (tok lexbuf) } |
| 729 | |
| 730 | and string = parse |
| 731 | | '"' { "" } |
| 732 | | (_ as x) { Common.string_of_char x ^ string lexbuf } |
| 733 | | ("\\" (oct | oct oct | oct oct oct)) as x { x ^ string lexbuf } |
| 734 | | ("\\x" (hex | hex hex)) as x { x ^ string lexbuf } |
| 735 | | ("\\" (_ as v)) as x |
| 736 | { |
| 737 | (match v with |
| 738 | | 'n' -> () | 't' -> () | 'v' -> () | 'b' -> () | 'r' -> () |
| 739 | | 'f' -> () | 'a' -> () |
| 740 | | '\\' -> () | '?' -> () | '\'' -> () | '"' -> () |
| 741 | | 'e' -> () |
| 742 | | '\n' -> () |
| 743 | | '(' -> () | '|' -> () | ')' -> () |
| 744 | | _ -> lexerr "unrecognised symbol:" (tok lexbuf) |
| 745 | ); |
| 746 | x ^ string lexbuf |
| 747 | } |
| 748 | | _ { lexerr "unrecognised symbol: " (tok lexbuf) } |
| 749 | |
| 750 | and comment = parse |
| 751 | | "*/" { start_line true; tok lexbuf } |
| 752 | | ['\n' '\r' '\011' '\012'] |
| 753 | { reset_line lexbuf; let s = tok lexbuf in s ^ comment lexbuf } |
| 754 | | "+" { pass_zero(); |
| 755 | if !current_line_started |
| 756 | then (start_line true; let s = tok lexbuf in s^(comment lexbuf)) |
| 757 | else comment lexbuf } |
| 758 | (* noteopti: *) |
| 759 | | [^ '*'] { start_line true; let s = tok lexbuf in s ^ comment lexbuf } |
| 760 | | [ '*'] { start_line true; let s = tok lexbuf in s ^ comment lexbuf } |
| 761 | | _ |
| 762 | { start_line true; let s = tok lexbuf in |
| 763 | Common.pr2 ("LEXER: unrecognised symbol in comment:"^s); |
| 764 | s ^ comment lexbuf |
| 765 | } |
| 766 | |