aboutsummaryrefslogtreecommitdiffstats
path: root/cparser/Lexer.mll
diff options
context:
space:
mode:
authorFrançois Pottier <francois.pottier@inria.fr>2015-10-23 13:16:35 +0200
committerFrançois Pottier <francois.pottier@inria.fr>2015-10-23 13:16:35 +0200
commit8d1a15f7f5c8fbea194a67c49c5aa10d6371b267 (patch)
tree9e96a2ba50c0ca60757edfdf85e0a54ba9e44717 /cparser/Lexer.mll
parentdca619cc34e64e63fe36ec9b5acdb42aafe665a8 (diff)
downloadcompcert-kvx-8d1a15f7f5c8fbea194a67c49c5aa10d6371b267.tar.gz
compcert-kvx-8d1a15f7f5c8fbea194a67c49c5aa10d6371b267.zip
Lexer update: use Menhir's incremental API instead of Menhir's traditional API.
This means that CompCert must now be compiled in --table mode. At this point, the error message for a syntax error is still just "syntax error".
Diffstat (limited to 'cparser/Lexer.mll')
-rw-r--r--cparser/Lexer.mll19
1 files changed, 16 insertions, 3 deletions
diff --git a/cparser/Lexer.mll b/cparser/Lexer.mll
index ab4e03fe..62764a48 100644
--- a/cparser/Lexer.mll
+++ b/cparser/Lexer.mll
@@ -458,12 +458,25 @@ and singleline_comment = parse
Queue.push token tokens;
token
- let tokens_stream filename text : token coq_Stream =
- let tokens = Queue.create () in
+ (* [invoke_pre_parser] is in charge of calling the pre_parser. It uses
+ the incremental API, which allows us to do our own error handling. *)
+
+ let invoke_pre_parser filename text lexer =
let lexbuf = Lexing.from_string text in
lexbuf.lex_curr_p <- {lexbuf.lex_curr_p with pos_fname = filename; pos_lnum = 1};
+ let module I = Pre_parser.MenhirInterpreter in
+ let checkpoint = Pre_parser.Incremental.translation_unit_file()
+ and supplier = I.lexer_lexbuf_to_supplier lexer lexbuf
+ and succeed () = ()
+ and fail checkpoint =
+ Cerrors.fatal_error_raw "syntax error"
+ in
+ I.loop_handle succeed fail supplier checkpoint
+
+ let tokens_stream filename text : token coq_Stream =
contexts_stk := [init_ctx];
- Pre_parser.translation_unit_file (lexer tokens) lexbuf;
+ let tokens = Queue.create () in
+ invoke_pre_parser filename text (lexer tokens);
assert (List.length !contexts_stk = 1);
let rec compute_token_stream () =
let loop t v =