1 (**************************************************************************)
5 (* François Pottier, INRIA Rocquencourt *)
6 (* Yann Régis-Gianas, PPS, Université Paris Diderot *)
8 (* Copyright 2005-2008 Institut National de Recherche en Informatique *)
9 (* et en Automatique. All rights reserved. This file is distributed *)
10 (* under the terms of the GNU Library General Public License, with the *)
11 (* special exception on linking described in file LICENSE. *)
13 (**************************************************************************)
15 (* An ocamlyacc-style, or Menhir-style, parser requires access to
16 the lexer, which must be parameterized with a lexing buffer, and
17 to the lexing buffer itself, where it reads position information. *)
19 (* This traditional API is convenient when used with ocamllex, but
20 inelegant when used with other lexer generators. *)
22 type ('token
, 'semantic_value
) traditional
=
23 (Lexing.lexbuf
-> 'token
) -> Lexing.lexbuf
-> 'semantic_value
25 (* This revised API is independent of any lexer generator. Here, the
26 parser only requires access to the lexer, and the lexer takes no
27 parameters. The tokens returned by the lexer may contain position
30 type ('token
, 'semantic_value
) revised
=
31 (unit -> 'token
) -> 'semantic_value
33 (* --------------------------------------------------------------------------- *)
35 (* Converting a traditional parser, produced by ocamlyacc or Menhir,
36 into a revised parser. *)
38 (* A token of the revised lexer is essentially a triple of a token
39 of the traditional lexer (or raw token), a start position, and
40 and end position. The three [get] functions are accessors. *)
42 (* We do not require the type ['token] to actually be a triple type.
43 This enables complex applications where it is a record type with
44 more three fields. It also enables simple applications where
45 positions are of no interest, so ['token] is just ['raw_token]
46 and [get_startp] and [get_endp] return dummy positions. *)
48 let traditional2revised
49 (get_raw_token
: 'token
-> 'raw_token
)
50 (get_startp
: 'token
-> Lexing.position
)
51 (get_endp
: 'token
-> Lexing.position
)
52 (parser
: ('raw_token
, 'semantic_value
) traditional
)
53 : ('token
, 'semantic_value
) revised
=
55 (* Accept a revised lexer. *)
57 fun (lexer
: unit -> 'token
) ->
59 (* Create a dummy lexing buffer. *)
61 let lexbuf : Lexing.lexbuf =
65 (* Wrap the revised lexer as a traditional lexer. A traditional
66 lexer returns a raw token and updates the fields of the lexing
67 buffer with new positions, which will be read by the parser. *)
69 let lexer (lexbuf : Lexing.lexbuf) : 'raw_token
=
70 let token : '
token = lexer() in
71 lexbuf.Lexing.lex_start_p
<- get_startp
token;
72 lexbuf.Lexing.lex_curr_p
<- get_endp
token;
76 (* Invoke the traditional parser. *)
80 (* --------------------------------------------------------------------------- *)
82 (* Converting a revised parser back to a traditional parser. *)
84 let revised2traditional
85 (make_token
: 'raw_token
-> Lexing.position
-> Lexing.position
-> '
token)
86 (parser
: ('
token, 'semantic_value
) revised
)
87 : ('raw_token
, 'semantic_value
) traditional
=
89 (* Accept a traditional lexer and a lexing buffer. *)
91 fun (lexer : Lexing.lexbuf -> 'raw_token
) (lexbuf : Lexing.lexbuf) ->
93 (* Wrap the traditional lexer as a revised lexer. *)
95 let lexer () : '
token =
96 let token : 'raw_token
= lexer lexbuf in
97 make_token
token lexbuf.Lexing.lex_start_p
lexbuf.Lexing.lex_curr_p
100 (* Invoke the revised parser. *)
104 (* --------------------------------------------------------------------------- *)
106 (* Simplified versions of the above, where concrete triples are used. *)
108 module Simplified
= struct
110 let traditional2revised parser
=
112 (fun (token, _
, _
) -> token)
113 (fun (_
, startp
, _
) -> startp
)
114 (fun (_
, _
, endp
) -> endp
)
117 let revised2traditional parser
=
119 (fun token startp endp
-> (token, startp
, endp
))