aboutsummaryrefslogtreecommitdiffstats
path: root/cparser
diff options
context:
space:
mode:
authorDavid Monniaux <david.monniaux@univ-grenoble-alpes.fr>2019-07-19 18:25:09 +0200
committerDavid Monniaux <david.monniaux@univ-grenoble-alpes.fr>2019-07-19 18:25:09 +0200
commit3b79923a6c9fa8c76916df1eecfdecd7ae2124a5 (patch)
tree98b27b88ea7195a244eff90eaa5f63028ad518a6 /cparser
parent9bc337d05eed466e2bfc9b18aa35fac34d3954a9 (diff)
parent91381b65f5aa76e5195caae9ef331b3f5f95afaf (diff)
downloadcompcert-kvx-3b79923a6c9fa8c76916df1eecfdecd7ae2124a5.tar.gz
compcert-kvx-3b79923a6c9fa8c76916df1eecfdecd7ae2124a5.zip
Merge branch 'master' of https://github.com/AbsInt/CompCert into mppa-work-upstream-merge
Diffstat (limited to 'cparser')
-rw-r--r--cparser/Builtins.ml54
-rw-r--r--cparser/Builtins.mli25
-rw-r--r--cparser/C.mli7
-rw-r--r--cparser/Cabs.v54
-rw-r--r--cparser/Cabshelper.ml13
-rw-r--r--cparser/Checks.ml362
-rw-r--r--cparser/Checks.mli2
-rw-r--r--cparser/Cutil.ml7
-rw-r--r--cparser/Cutil.mli2
-rw-r--r--cparser/Diagnostics.ml8
-rw-r--r--cparser/Diagnostics.mli4
-rw-r--r--cparser/Elab.ml43
-rw-r--r--cparser/Elab.mli4
-rw-r--r--cparser/Env.ml40
-rw-r--r--cparser/Env.mli7
-rw-r--r--cparser/GCC.ml4
-rw-r--r--cparser/Lexer.mll247
-rw-r--r--cparser/MenhirLib/Alphabet.v320
-rw-r--r--cparser/MenhirLib/Automaton.v167
-rw-r--r--cparser/MenhirLib/Grammar.v166
-rw-r--r--cparser/MenhirLib/Interpreter.v228
-rw-r--r--cparser/MenhirLib/Interpreter_complete.v686
-rw-r--r--cparser/MenhirLib/Interpreter_correct.v228
-rw-r--r--cparser/MenhirLib/Interpreter_safe.v275
-rw-r--r--cparser/MenhirLib/Main.v92
-rw-r--r--cparser/MenhirLib/Tuples.v49
-rw-r--r--cparser/MenhirLib/Validator_complete.v542
-rw-r--r--cparser/MenhirLib/Validator_safe.v414
-rw-r--r--cparser/PackedStructs.ml2
-rw-r--r--cparser/Parse.ml13
-rw-r--r--cparser/Parser.vy595
-rw-r--r--cparser/Rename.ml2
-rw-r--r--cparser/Transform.ml2
-rw-r--r--cparser/Unblock.ml2
-rw-r--r--cparser/pre_parser.mly10
35 files changed, 846 insertions, 3830 deletions
diff --git a/cparser/Builtins.ml b/cparser/Builtins.ml
deleted file mode 100644
index 8eb1abfd..00000000
--- a/cparser/Builtins.ml
+++ /dev/null
@@ -1,54 +0,0 @@
-(* *********************************************************************)
-(* *)
-(* The Compcert verified compiler *)
-(* *)
-(* Xavier Leroy, INRIA Paris-Rocquencourt *)
-(* *)
-(* Copyright Institut National de Recherche en Informatique et en *)
-(* Automatique. All rights reserved. This file is distributed *)
-(* under the terms of the GNU General Public License as published by *)
-(* the Free Software Foundation, either version 2 of the License, or *)
-(* (at your option) any later version. This file is also distributed *)
-(* under the terms of the INRIA Non-Commercial License Agreement. *)
-(* *)
-(* *********************************************************************)
-
-(* Compiler built-ins *)
-
-open C
-open Cutil
-
-let env = ref Env.empty
-let idents = ref []
-let decls = ref []
-
-let environment () = !env
-let identifiers () = !idents
-let declarations () = List.rev !decls
-
-let add_typedef (s, ty) =
- let (id, env') = Env.enter_typedef !env s ty in
- env := env';
- idents := id :: !idents;
- decls := {gdesc = Gtypedef(id, ty); gloc = no_loc} :: !decls
-
-let add_function (s, (res, args, va)) =
- let ty =
- TFun(res,
- Some (List.map (fun ty -> (Env.fresh_ident "", ty)) args),
- va, []) in
- let (id, env') = Env.enter_ident !env s Storage_extern ty in
- env := env';
- idents := id :: !idents;
- decls := {gdesc = Gdecl(Storage_extern, id, ty, None); gloc = no_loc} :: !decls
-
-type t = {
- typedefs: (string * C.typ) list;
- functions: (string * (C.typ * C.typ list * bool)) list
-}
-
-let set blt =
- env := Env.empty;
- idents := [];
- List.iter add_typedef blt.typedefs;
- List.iter add_function blt.functions
diff --git a/cparser/Builtins.mli b/cparser/Builtins.mli
deleted file mode 100644
index 7f9d78a9..00000000
--- a/cparser/Builtins.mli
+++ /dev/null
@@ -1,25 +0,0 @@
-(* *********************************************************************)
-(* *)
-(* The Compcert verified compiler *)
-(* *)
-(* Xavier Leroy, INRIA Paris-Rocquencourt *)
-(* *)
-(* Copyright Institut National de Recherche en Informatique et en *)
-(* Automatique. All rights reserved. This file is distributed *)
-(* under the terms of the GNU General Public License as published by *)
-(* the Free Software Foundation, either version 2 of the License, or *)
-(* (at your option) any later version. This file is also distributed *)
-(* under the terms of the INRIA Non-Commercial License Agreement. *)
-(* *)
-(* *********************************************************************)
-
-val environment: unit -> Env.t
-val identifiers: unit -> C.ident list
-val declarations: unit -> C.globdecl list
-
-type t = {
- typedefs: (string * C.typ) list;
- functions: (string * (C.typ * C.typ list * bool)) list
-}
-
-val set: t -> unit
diff --git a/cparser/C.mli b/cparser/C.mli
index cc8d4065..15717565 100644
--- a/cparser/C.mli
+++ b/cparser/C.mli
@@ -264,3 +264,10 @@ and globdecl_desc =
| Gpragma of string (* #pragma directive *)
type program = globdecl list
+
+(** Builtin types and functions *)
+
+type builtins = {
+ builtin_typedefs: (string * typ) list;
+ builtin_functions: (string * (typ * typ list * bool)) list
+}
diff --git a/cparser/Cabs.v b/cparser/Cabs.v
index 5865ab69..5f12e8a1 100644
--- a/cparser/Cabs.v
+++ b/cparser/Cabs.v
@@ -20,7 +20,7 @@ Parameter string : Type.
(* OCaml's int64 type, used to represent individual characters in literals. *)
Parameter char_code : Type.
(* Context information. *)
-Parameter cabsloc : Type.
+Parameter loc : Type.
Record floatInfo := {
isHex_FI:bool;
@@ -51,7 +51,7 @@ Inductive typeSpecifier := (* Merge all specifiers into one type *)
* They also have a list of __attribute__s that appeared between the
* keyword and the type name (definitions only) *)
| Tstruct_union : structOrUnion -> option string -> option (list field_group) -> list attribute -> typeSpecifier
- | Tenum : option string -> option (list (string * option expression * cabsloc)) -> list attribute -> typeSpecifier
+ | Tenum : option string -> option (list (string * option expression * loc)) -> list attribute -> typeSpecifier
with storage :=
AUTO | STATIC | EXTERN | REGISTER | TYPEDEF
@@ -87,18 +87,18 @@ with decl_type :=
| PROTO_OLD : decl_type -> list string -> decl_type
with parameter :=
- | PARAM : list spec_elem -> option string -> decl_type -> list attribute -> cabsloc -> parameter
+ | PARAM : list spec_elem -> option string -> decl_type -> list attribute -> loc -> parameter
(* The optional expression is the bitfield *)
with field_group :=
- | Field_group : list spec_elem -> list (option name * option expression) -> cabsloc -> field_group
+ | Field_group : list spec_elem -> list (option name * option expression) -> loc -> field_group
(* The decl_type is in the order in which they are printed. Only the name of
* the declared identifier is pulled out. *)
(* e.g: in "int *x", "*x" is the declarator; "x" will be pulled out as *)
(* the string, and decl_type will be PTR([], JUSTBASE) *)
with name :=
- | Name : string -> decl_type -> list attribute -> cabsloc -> name
+ | Name : string -> decl_type -> list attribute -> loc -> name
(* A variable declarator ("name") with an initializer *)
with init_name :=
@@ -161,9 +161,9 @@ with initwhat :=
| ATINDEX_INIT : expression -> initwhat
with attribute :=
- | GCC_ATTR : list gcc_attribute -> cabsloc -> attribute
- | PACKED_ATTR : list expression -> cabsloc -> attribute
- | ALIGNAS_ATTR : list expression -> cabsloc -> attribute
+ | GCC_ATTR : list gcc_attribute -> loc -> attribute
+ | PACKED_ATTR : list expression -> loc -> attribute
+ | ALIGNAS_ATTR : list expression -> loc -> attribute
with gcc_attribute :=
| GCC_ATTR_EMPTY
@@ -194,31 +194,31 @@ Definition asm_flag := (bool * list char_code)%type.
** Declaration definition (at toplevel)
*)
Inductive definition :=
- | FUNDEF : list spec_elem -> name -> list definition -> statement -> cabsloc -> definition
- | DECDEF : init_name_group -> cabsloc -> definition (* global variable(s), or function prototype *)
- | PRAGMA : string -> cabsloc -> definition
+ | FUNDEF : list spec_elem -> name -> list definition -> statement -> loc -> definition
+ | DECDEF : init_name_group -> loc -> definition (* global variable(s), or function prototype *)
+ | PRAGMA : string -> loc -> definition
(*
** statements
*)
with statement :=
- | NOP : cabsloc -> statement
- | COMPUTATION : expression -> cabsloc -> statement
- | BLOCK : list statement -> cabsloc -> statement
- | If : expression -> statement -> option statement -> cabsloc -> statement
- | WHILE : expression -> statement -> cabsloc -> statement
- | DOWHILE : expression -> statement -> cabsloc -> statement
- | FOR : option for_clause -> option expression -> option expression -> statement -> cabsloc -> statement
- | BREAK : cabsloc -> statement
- | CONTINUE : cabsloc -> statement
- | RETURN : option expression -> cabsloc -> statement
- | SWITCH : expression -> statement -> cabsloc -> statement
- | CASE : expression -> statement -> cabsloc -> statement
- | DEFAULT : statement -> cabsloc -> statement
- | LABEL : string -> statement -> cabsloc -> statement
- | GOTO : string -> cabsloc -> statement
- | ASM : list cvspec -> bool -> list char_code -> list asm_operand -> list asm_operand -> list asm_flag -> cabsloc -> statement
+ | NOP : loc -> statement
+ | COMPUTATION : expression -> loc -> statement
+ | BLOCK : list statement -> loc -> statement
+ | If : expression -> statement -> option statement -> loc -> statement
+ | WHILE : expression -> statement -> loc -> statement
+ | DOWHILE : expression -> statement -> loc -> statement
+ | FOR : option for_clause -> option expression -> option expression -> statement -> loc -> statement
+ | BREAK : loc -> statement
+ | CONTINUE : loc -> statement
+ | RETURN : option expression -> loc -> statement
+ | SWITCH : expression -> statement -> loc -> statement
+ | CASE : expression -> statement -> loc -> statement
+ | DEFAULT : statement -> loc -> statement
+ | LABEL : string -> statement -> loc -> statement
+ | GOTO : string -> loc -> statement
+ | ASM : list cvspec -> bool -> list char_code -> list asm_operand -> list asm_operand -> list asm_flag -> loc -> statement
| DEFINITION : definition -> statement (*definition or declaration of a variable or type*)
with for_clause :=
diff --git a/cparser/Cabshelper.ml b/cparser/Cabshelper.ml
index 958f242c..22f3b3c7 100644
--- a/cparser/Cabshelper.ml
+++ b/cparser/Cabshelper.ml
@@ -16,11 +16,6 @@
open Cabs
-let cabslu = {lineno = -10;
- filename = "cabs loc unknown";
- byteno = -10;
- ident = 0}
-
(*********** HELPER FUNCTIONS **********)
let rec isStatic = function
@@ -44,13 +39,13 @@ let rec isTypedef = function
| _ :: rest -> isTypedef rest
-let get_definitionloc (d : definition) : cabsloc =
+let get_definitionloc (d : definition) : loc =
match d with
| FUNDEF(_, _, _, _, l) -> l
| DECDEF(_, l) -> l
| PRAGMA(_, l) -> l
-let get_statementloc (s : statement) : cabsloc =
+let get_statementloc (s : statement) : loc =
begin
match s with
| NOP(loc) -> loc
@@ -72,8 +67,8 @@ begin
| ASM(_,_,_,_,_,_,loc) -> loc
end
-let string_of_cabsloc l =
+let string_of_loc l =
Printf.sprintf "%s:%d" l.filename l.lineno
-let format_cabsloc pp l =
+let format_loc pp l =
Format.fprintf pp "%s:%d" l.filename l.lineno
diff --git a/cparser/Checks.ml b/cparser/Checks.ml
index a30cde7d..17caf19a 100644
--- a/cparser/Checks.ml
+++ b/cparser/Checks.ml
@@ -18,44 +18,68 @@ open Diagnostics
open Cutil
open Env
-let unknown_attrs loc attrs =
- let unknown attr =
- let attr_class = class_of_attribute attr in
- if attr_class = Attr_unknown then
- warning loc Unknown_attribute
- "unknown attribute '%s' ignored" (name_of_attribute attr) in
- List.iter unknown attrs
+(* AST traversal functions *)
-let unknown_attrs_typ env loc ty =
- let attr = attributes_of_type env ty in
- unknown_attrs loc attr
+let fold_over_stmt_loc ~(expr: 'a -> location -> exp -> 'a)
+ ~(decl: 'a -> location -> decl -> 'a)
+ (a: 'a) (s: stmt) : 'a =
+ let rec fold a s =
+ match s.sdesc with
+ | Sskip -> a
+ | Sbreak -> a
+ | Scontinue -> a
+ | Slabeled(_, s1) -> fold a s1
+ | Sgoto _ -> a
+ | Sreturn None -> a
+ | Sreturn (Some e) -> expr a s.sloc e
+ | Sasm(_, _, outs, ins, _) -> asm_operands (asm_operands a s.sloc outs) s.sloc ins
+ | Sdo e -> expr a s.sloc e
+ | Sif (e, s1, s2) -> fold (fold (expr a s.sloc e) s1) s2
+ | Sseq (s1, s2) -> fold (fold a s1) s2
+ | Sfor (s1, e, s2, s3) -> fold (fold (expr (fold a s1) s.sloc e) s2) s3
+ | Swhile(e, s1) -> fold (expr a s.sloc e) s1
+ | Sdowhile (s1, e) -> expr (fold a s1) s.sloc e
+ | Sswitch (e, s1) -> fold (expr a s.sloc e) s1
+ | Sblock sl -> List.fold_left fold a sl
+ | Sdecl d -> decl a s.sloc d
+ and asm_operands a loc l =
+ List.fold_left (fun a (_, _, e) -> expr a loc e) a l
+ in fold a s
-let unknown_attrs_decl env loc (sto, id, ty, init) =
- unknown_attrs_typ env loc ty
+let iter_over_stmt_loc
+ ?(expr = fun loc e -> ())
+ ?(decl = fun loc decl -> ())
+ (s: stmt) : unit =
+ fold_over_stmt_loc ~expr: (fun () loc e -> expr loc e)
+ ~decl: (fun () loc d -> decl loc d)
+ () s
+
+let fold_over_stmt ~(expr: 'a -> exp -> 'a)
+ ~(decl: 'a -> location -> decl -> 'a)
+ (a: 'a) (s: stmt) : 'a =
+ fold_over_stmt_loc ~expr:(fun a _ e -> expr a e) ~decl:decl a s
+
+let iter_over_stmt ?(expr = fun e -> ())
+ ?(decl = fun loc decl -> ())
+ (s:stmt) : unit =
+ fold_over_stmt_loc ~expr:(fun () _ e -> expr e)
+ ~decl:(fun () loc d -> decl loc d) () s
+
+let fold_over_init ~(expr: 'a -> exp -> 'a) (a: 'a) (i: init) : 'a =
+ let rec fold a = function
+ | Init_single e -> expr a e
+ | Init_array il -> List.fold_left fold a il
+ | Init_struct (_, sl) -> List.fold_left (fun a (_,i) -> fold a i) a sl
+ | Init_union (_, _, ui) -> fold a ui
+ in fold a i
-let rec unknown_attrs_stmt env s =
- match s.sdesc with
- | Sskip
- | Sbreak
- | Scontinue
- | Slabeled _
- | Sgoto _
- | Sreturn _
- | Sasm _
- | Sdo _ -> ()
- | Sif (_,s1,s2)
- | Sseq (s1,s2) ->
- unknown_attrs_stmt env s1;
- unknown_attrs_stmt env s2
- | Sfor (s1,e,s2,s3) ->
- unknown_attrs_stmt env s1;
- unknown_attrs_stmt env s2;
- unknown_attrs_stmt env s3
- | Swhile(_,s)
- | Sdowhile (s,_)
- | Sswitch (_,s) -> unknown_attrs_stmt env s
- | Sblock sl -> List.iter (unknown_attrs_stmt env) sl
- | Sdecl d -> unknown_attrs_decl env s.sloc d
+let iter_over_init ~(expr: exp -> unit) (i:init) : unit =
+ fold_over_init ~expr:(fun () e -> expr e) () i
+
+let fold_over_decl ~(expr: 'a -> exp -> 'a) (a: 'a) loc (sto, id, ty, init) : 'a=
+ match init with
+ | Some i -> fold_over_init ~expr a i
+ | None -> a
let traverse_program
?(decl = fun env loc d -> ())
@@ -93,7 +117,27 @@ let traverse_program
pragma env g.gloc s;
env in
traverse env gl in
- traverse (Builtins.environment ()) p
+ traverse (Env.initial ()) p
+
+(* Unknown attributes warning *)
+
+let unknown_attrs loc attrs =
+ let unknown attr =
+ let attr_class = class_of_attribute attr in
+ if attr_class = Attr_unknown then
+ warning loc Unknown_attribute
+ "unknown attribute '%s' ignored" (name_of_attribute attr) in
+ List.iter unknown attrs
+
+let unknown_attrs_typ env loc ty =
+ let attr = attributes_of_type env ty in
+ unknown_attrs loc attr
+
+let unknown_attrs_decl env loc (sto, id, ty, init) =
+ unknown_attrs_typ env loc ty
+
+let unknown_attrs_stmt env s =
+ iter_over_stmt ~decl:(unknown_attrs_decl env) s
let unknown_attrs_program p =
let decl env loc d =
@@ -122,6 +166,7 @@ let unknown_attrs_program p =
~enum:enum
p
+(* Unused variables and parameters warning *)
let rec vars_used_expr env e =
match e.edesc with
@@ -143,83 +188,21 @@ let rec vars_used_expr env e =
let env = vars_used_expr env e in
List.fold_left vars_used_expr env p
-and vars_used_init env = function
- | Init_single e -> vars_used_expr env e
- | Init_array al -> List.fold_left vars_used_init env al
- | Init_struct (_,sl) -> List.fold_left (fun env (_,i) -> vars_used_init env i) env sl
- | Init_union (_,_,ui) -> vars_used_init env ui
-
-let rec vars_used_stmt env s =
- match s.sdesc with
- | Sbreak
- | Scontinue
- | Sgoto _
- | Sreturn None
- | Sskip -> env
- | Sreturn (Some e)
- | Sdo e -> (vars_used_expr env e)
- | Sseq (s1,s2) ->
- let env = vars_used_stmt env s1 in
- vars_used_stmt env s2
- | Sif (e,s1,s2) ->
- let env = vars_used_expr env e in
- let env = vars_used_stmt env s1 in
- vars_used_stmt env s2
- | Sfor (s1,e,s2,s3) ->
- let env = vars_used_expr env e in
- let env = vars_used_stmt env s1 in
- let env = vars_used_stmt env s2 in
- vars_used_stmt env s3
- | Sswitch (e,s)
- | Swhile (e,s)
- | Sdowhile (s,e) ->
- let env = vars_used_expr env e in
- vars_used_stmt env s
- | Sblock sl -> List.fold_left vars_used_stmt env sl
- | Sdecl (sto,id,ty,init) ->
- let env = match init with
- | Some init ->vars_used_init env init
- | None -> env in
- env
- | Slabeled (lbl,s) -> vars_used_stmt env s
- | Sasm (attr,str,op,op2,constr) ->
- let vars_asm_op env (_,_,e) =
- vars_used_expr env e in
- let env = List.fold_left vars_asm_op env op in
- let env = List.fold_left vars_asm_op env op2 in
- env
-
-let unused_variable env used loc (id,ty) =
+and vars_used_init env init =
+ fold_over_init ~expr:vars_used_expr env init
+
+let vars_used_stmt env s =
+ fold_over_stmt ~expr: vars_used_expr
+ ~decl: (fold_over_decl ~expr: vars_used_expr) env s
+
+let unused_variable env used loc (id, ty) =
let attr = attributes_of_type env ty in
let unused_attr = find_custom_attributes ["unused";"__unused__"] attr <> [] in
if not ((IdentSet.mem id used) || unused_attr) then
warning loc Unused_variable "unused variable '%s'" id.name
-let rec unused_variables_stmt env used s =
- match s.sdesc with
- | Sbreak
- | Scontinue
- | Sgoto _
- | Sreturn _
- | Sskip
- | Sasm _
- | Sdo _ -> ()
- | Sseq (s1,s2)
- | Sif (_,s1,s2) ->
- unused_variables_stmt env used s1;
- unused_variables_stmt env used s2
- | Sfor (s1,e,s2,s3) ->
- unused_variables_stmt env used s1;
- unused_variables_stmt env used s2;
- unused_variables_stmt env used s3
- | Slabeled (_,s)
- | Sswitch (_,s)
- | Swhile (_,s)
- | Sdowhile (s,_) ->
- unused_variables_stmt env used s
- | Sblock sl -> List.iter (unused_variables_stmt env used) sl
- | Sdecl (sto,id,ty,init) ->
- unused_variable env used s.sloc (id,ty)
+let unused_variables_stmt env used s =
+ iter_over_stmt ~decl:(fun loc (sto, id, ty, init) -> unused_variable env used loc (id,ty)) s
let unused_variables p =
let fundef env loc fd =
@@ -229,3 +212,166 @@ let unused_variables p =
traverse_program
~fundef:fundef
p
+
+(* Warning for conditionals that cannot be transformed into linear code *)
+
+(* Compute the set of local variables that do not have their address taken *)
+
+let rec non_stack_locals_expr vars e =
+ match e.edesc with
+ | ECast (_,e) -> non_stack_locals_expr vars e
+ | EUnop (Oaddrof,e) ->
+ begin match e.edesc with
+ | EVar id ->
+ IdentSet.remove id vars
+ | _ -> vars
+ end
+ | EUnop (Oderef, e) ->
+ (* Special optimization *(& ...) is removed in SimplExpr *)
+ begin match e.edesc with
+ | EUnop (Oaddrof,e) -> non_stack_locals_expr vars e
+ | _ -> non_stack_locals_expr vars e
+ end
+ | EUnop (_, e) ->
+ non_stack_locals_expr vars e
+ | EBinop (_,e1,e2,_) ->
+ let vars = non_stack_locals_expr vars e1 in
+ non_stack_locals_expr vars e2
+ | EConditional (e1,e2,e3) ->
+ let vars = non_stack_locals_expr vars e1 in
+ let vars = non_stack_locals_expr vars e2 in
+ non_stack_locals_expr vars e3
+ | ECompound (_,init) -> non_stack_locals_init vars init
+ | ECall (e,p) ->
+ let vars = non_stack_locals_expr vars e in
+ List.fold_left non_stack_locals_expr vars p
+ | _ -> vars
+
+and non_stack_locals_init vars init =
+ fold_over_init ~expr:non_stack_locals_expr vars init
+
+let add_vars env vars (id,ty) =
+ let volatile = List.mem AVolatile (attributes_of_type env ty) in
+ if not volatile then
+ IdentSet.add id vars
+ else
+ vars
+
+let non_stack_locals_stmt env vars s =
+ let decl vars loc (sto, id, ty, init) =
+ let vars = match init with
+ | Some init -> non_stack_locals_init vars init
+ | None -> vars in
+ add_vars env vars (id,ty) in
+ fold_over_stmt ~expr:non_stack_locals_expr ~decl:decl
+ vars s
+
+(* Check whether an expression is safe and can be always evaluated *)
+
+let safe_cast env tfrom tto =
+ match unroll env tfrom, unroll env tto with
+ | (TInt _ | TPtr _ | TArray _ | TFun _ | TEnum _),
+ (TInt _ | TPtr _ | TEnum _) -> true
+ | TFloat _, TFloat _ -> true
+ | _, _ -> equal_types env tfrom tto
+
+let safe_expr vars env e =
+ let rec expr e =
+ match e.edesc with
+ | EConst _ | ESizeof _ | EAlignof _ | ECompound _ -> true
+ | EVar id -> (IdentSet.mem id vars) || not (is_scalar_type env e.etyp)
+ | ECast (ty, e) ->
+ safe_cast env e.etyp ty && expr e
+ | EUnop (op, e) ->
+ unop op e
+ | EBinop (op, e1, e2, ty) ->
+ binop op e1 e2
+ | EConditional _ -> false
+ | ECall _ -> false
+ and binop op e1 e2 =
+ let is_long_long_type ty =
+ match unroll env ty with
+ | TInt (ILongLong, _)
+ | TInt (IULongLong, _) -> true
+ | _ -> false in
+ match op with
+ | Oadd | Osub | Omul | Oand | Oor | Oxor | Oshl | Oshr ->
+ expr e1 && expr e2
+ | Oeq | One | Olt | Ogt | Ole | Oge ->
+ let not_long_long = not (is_long_long_type e1.etyp) && not (is_long_long_type e2.etyp) in
+ not_long_long && expr e1 && expr e2
+ | _ -> false
+ (* x.f if f has array or struct or union type *)
+ and unop op e =
+ match op with
+ | Ominus | Onot | Olognot | Oplus -> expr e
+ | Oaddrof ->
+ begin match e.edesc with
+ (* skip &*e *)
+ | EUnop (Oderef, e) -> expr e
+ (* skip &(e.f) *)
+ | EUnop (Odot f, e) -> expr e
+ | _ -> expr e
+ end
+ (* skip *&e *)
+ | Oderef ->
+ begin match e.edesc with
+ | EUnop (Oaddrof,e) -> expr e
+ | _ -> false
+ end
+ (* e.f is okay if f has array or composite type *)
+ | Odot m ->
+ let fld = field_of_dot_access env e.etyp m in
+ (is_array_type env fld.fld_typ || is_composite_type env fld.fld_typ) && expr e
+ | _ -> false in
+ expr e
+
+(* Check expressions if they contain conditionals that cannot be transformed in
+ linear code. The inner_cond parameter is used to mimic the translation of short
+ circuit logical or and logical and as well as conditional to if statements in
+ SimplExpr. *)
+
+let rec non_linear_cond_expr inner_cond vars env loc e =
+ match e.edesc with
+ | EConst _ | ESizeof _ | EAlignof _ | EVar _ -> ()
+ | ECast (_ , e) | EUnop (_, e)-> non_linear_cond_expr false vars env loc e
+ | EBinop (op, e1, e2, ty) ->
+ let inner_cond = match op with
+ | Ocomma -> inner_cond
+ | Ologand | Ologor -> true
+ | _ -> false
+ in
+ non_linear_cond_expr false vars env loc e1;
+ non_linear_cond_expr inner_cond vars env loc e2
+ | EConditional (c, e1, e2) ->
+ let can_cast = safe_cast env e1.etyp e.etyp && safe_cast env e2.etyp e.etyp in
+ if not can_cast || inner_cond || not (safe_expr vars env e1) || not (safe_expr vars env e2) then
+ warning loc Non_linear_cond_expr "conditional expression may not be linearized";
+ non_linear_cond_expr true vars env loc e1;
+ non_linear_cond_expr true vars env loc e2;
+ | ECompound (ty, init) -> non_linear_cond_init vars env loc init
+ | ECall (e, params) ->
+ non_linear_cond_expr false vars env loc e;
+ List.iter (non_linear_cond_expr false vars env loc) params
+
+and non_linear_cond_init vars env loc init =
+ iter_over_init ~expr:(non_linear_cond_expr false vars env loc) init
+
+let non_linear_cond_stmt vars env s =
+ let decl loc (sto, id, ty, init) =
+ match init with
+ | None -> ()
+ | Some init -> non_linear_cond_init vars env loc init in
+ iter_over_stmt_loc ~expr:(non_linear_cond_expr false vars env) ~decl:decl s
+
+let non_linear_conditional p =
+ if active_warning Non_linear_cond_expr && !Clflags.option_Obranchless then begin
+ let fundef env loc fd =
+ let vars = List.fold_left (add_vars env) IdentSet.empty fd.fd_params in
+ let vars = non_stack_locals_stmt env vars fd.fd_body in
+ non_linear_cond_stmt vars env fd.fd_body;
+ in
+ traverse_program
+ ~fundef:fundef
+ p
+ end
diff --git a/cparser/Checks.mli b/cparser/Checks.mli
index 4d61a5b8..cfd7b04d 100644
--- a/cparser/Checks.mli
+++ b/cparser/Checks.mli
@@ -16,3 +16,5 @@
val unknown_attrs_program: C.program -> unit
val unused_variables: C.program -> unit
+
+val non_linear_conditional : C.program -> unit
diff --git a/cparser/Cutil.ml b/cparser/Cutil.ml
index 7329767a..7a2f4828 100644
--- a/cparser/Cutil.ml
+++ b/cparser/Cutil.ml
@@ -29,7 +29,7 @@ let no_loc = ("", -1)
module Ident = struct
type t = ident
- let compare id1 id2 = Pervasives.compare id1.stamp id2.stamp
+ let compare id1 id2 = compare id1.stamp id2.stamp
end
module IdentSet = Set.Make(Ident)
@@ -821,6 +821,11 @@ let is_composite_type env t =
| TStruct _ | TUnion _ -> true
| _ -> false
+let is_array_type env t =
+ match unroll env t with
+ | TArray _ -> true
+ | _ -> false
+
let is_function_type env t =
match unroll env t with
| TFun _ -> true
diff --git a/cparser/Cutil.mli b/cparser/Cutil.mli
index 5a1e9af3..f6c4627d 100644
--- a/cparser/Cutil.mli
+++ b/cparser/Cutil.mli
@@ -166,6 +166,8 @@ val is_scalar_type : Env.t -> typ -> bool
(* Is type integer, float or pointer? *)
val is_composite_type : Env.t -> typ -> bool
(* Is type a struct or union? *)
+val is_array_type : Env.t -> typ -> bool
+ (* Is type an array type? *)
val is_function_type : Env.t -> typ -> bool
(* Is type a function type? (not pointer to function) *)
val is_function_pointer_type : Env.t -> typ -> bool
diff --git a/cparser/Diagnostics.ml b/cparser/Diagnostics.ml
index 51dcab47..012e4b66 100644
--- a/cparser/Diagnostics.ml
+++ b/cparser/Diagnostics.ml
@@ -102,6 +102,7 @@ type warning_type =
| Flexible_array_extensions
| Tentative_incomplete_static
| Reduced_alignment
+ | Non_linear_cond_expr
(* List of active warnings *)
let active_warnings: warning_type list ref = ref [
@@ -163,6 +164,7 @@ let string_of_warning = function
| Flexible_array_extensions -> "flexible-array-extensions"
| Tentative_incomplete_static -> "tentative-incomplete-static"
| Reduced_alignment -> "reduced-alignment"
+ | Non_linear_cond_expr -> "non-linear-cond-expr"
(* Activate the given warning *)
let activate_warning w () =
@@ -216,6 +218,7 @@ let wall () =
Flexible_array_extensions;
Tentative_incomplete_static;
Reduced_alignment;
+ Non_linear_cond_expr;
]
let wnothing () =
@@ -253,6 +256,7 @@ let werror () =
Flexible_array_extensions;
Tentative_incomplete_static;
Reduced_alignment;
+ Non_linear_cond_expr;
]
(* Generate the warning key for the message *)
@@ -437,6 +441,7 @@ let warning_options =
error_option Flexible_array_extensions @
error_option Tentative_incomplete_static @
error_option Reduced_alignment @
+ error_option Non_linear_cond_expr @
[Exact ("-Wfatal-errors"), Set error_fatal;
Exact ("-fdiagnostics-color"), Ignore; (* Either output supports it or no color *)
Exact ("-fno-diagnostics-color"), Unset color_diagnostics;
@@ -492,3 +497,6 @@ let crash exn =
let no_loc = ("", -1)
let file_loc file = (file,-10)
+
+let active_warning ty =
+ fst (classify_warning ty) <> SuppressedMsg
diff --git a/cparser/Diagnostics.mli b/cparser/Diagnostics.mli
index 6a3c11c8..0f0a0ea5 100644
--- a/cparser/Diagnostics.mli
+++ b/cparser/Diagnostics.mli
@@ -55,6 +55,7 @@ type warning_type =
| Flexible_array_extensions (** usange of structs with flexible arrays in structs and arrays *)
| Tentative_incomplete_static (** static tentative definition with incomplete type *)
| Reduced_alignment (** alignment reduction *)
+ | Non_linear_cond_expr (** condition that cannot be linearized *)
val warning : (string * int) -> warning_type -> ('a, Format.formatter, unit, unit, unit, unit) format6 -> 'a
(** [warning (f,c) w fmt arg1 ... argN] formats the arguments [arg1] to [argN] as warining according to
@@ -95,3 +96,6 @@ val file_loc : string -> string * int
val error_summary : unit -> unit
(** Print a summary containing the numbers of errors encountered *)
+
+val active_warning : warning_type -> bool
+(** Test whether a warning is active to avoid costly checks *)
diff --git a/cparser/Elab.ml b/cparser/Elab.ml
index 10380152..3797164d 100644
--- a/cparser/Elab.ml
+++ b/cparser/Elab.ml
@@ -258,7 +258,7 @@ let enter_or_refine_function loc env id sto ty =
(* Forward declarations *)
-let elab_expr_f : (cabsloc -> Env.t -> Cabs.expression -> C.exp * Env.t) ref
+let elab_expr_f : (Cabs.loc -> Env.t -> Cabs.expression -> C.exp * Env.t) ref
= ref (fun _ _ _ -> assert false)
let elab_funbody_f : (C.typ -> bool -> bool -> Env.t -> statement -> C.stmt) ref
@@ -1802,6 +1802,42 @@ let elab_expr ctx loc env a =
(print_typ env) ty (print_typ env) ty' (print_typ env) ty' (print_typ env) ty;
{ edesc = ECall(ident, [b2; b3]); etyp = ty },env
+ | CALL((VARIABLE "__builtin_sel" as a0), al) ->
+ begin match al with
+ | [a1; a2; a3] ->
+ let b0,env = elab env a0 in
+ let b1,env = elab env a1 in
+ let b2,env = elab env a2 in
+ let b3,env = elab env a3 in
+ if not (is_scalar_type env b1.etyp) then
+ error "first argument of '__builtin_sel' is not a scalar type (invalid %a)"
+ (print_typ env) b1.etyp;
+ let tyres =
+ match pointer_decay env b2.etyp, pointer_decay env b3.etyp with
+ | (TInt _ | TFloat _ | TEnum _), (TInt _ | TFloat _ | TEnum _) ->
+ binary_conversion env b2.etyp b3.etyp
+ | (TPtr(ty1, a1) as pty1), (TPtr(ty2, a2) as pty2) ->
+ if is_void_type env ty1 || is_void_type env ty2 then
+ TPtr(TVoid (add_attributes a1 a2), [])
+ else begin
+ match combine_types AttrIgnoreAll env pty1 pty2 with
+ | None ->
+ warning Pointer_type_mismatch "the second and third arguments of '__builtin_sel' have incompatible pointer types (%a and %a)"
+ (print_typ env) pty1 (print_typ env) pty2;
+ (* tolerance *)
+ TPtr(TVoid (add_attributes a1 a2), [])
+ | Some ty -> ty
+ end
+ | _, _ ->
+ fatal_error "wrong types (%a and %a) for the second and third arguments of '__builtin_sel'"
+ (print_typ env) b2.etyp (print_typ env) b3.etyp
+
+ in
+ { edesc = ECall(b0, [b1; b2; b3]); etyp = tyres }, env
+ | _ ->
+ fatal_error "'__builtin_sel' expect 3 arguments"
+ end
+
| CALL(a1, al) ->
let b1,env =
(* Catch the old-style usage of calling a function without
@@ -2708,7 +2744,7 @@ let elab_fundef genv spec name defs body loc =
(* Definitions *)
let elab_decdef (for_loop: bool) (local: bool) (nonstatic_inline: bool)
(env: Env.t) ((spec, namelist): Cabs.init_name_group)
- (loc: Cabs.cabsloc) : decl list * Env.t =
+ (loc: Cabs.loc) : decl list * Env.t =
let (sto, inl, noret, tydef, bty, env') =
elab_specifier ~only:(namelist=[]) loc env spec in
(* Sanity checks on storage class *)
@@ -3089,10 +3125,11 @@ let _ = elab_funbody_f := elab_funbody
let elab_file prog =
reset();
- let env = Builtins.environment () in
+ let env = Env.initial () in
let elab_def env d = snd (elab_definition false false false env d) in
ignore (List.fold_left elab_def env prog);
let p = elaborated_program () in
Checks.unused_variables p;
Checks.unknown_attrs_program p;
+ Checks.non_linear_conditional p;
p
diff --git a/cparser/Elab.mli b/cparser/Elab.mli
index f701e8c5..59c5efc1 100644
--- a/cparser/Elab.mli
+++ b/cparser/Elab.mli
@@ -18,8 +18,8 @@ val elab_file : Cabs.definition list -> C.program
definitions as produced by the parser into a program in C abstract
syntax. *)
-val elab_int_constant : Cabs.cabsloc -> string -> int64 * C.ikind
+val elab_int_constant : Cabs.loc -> string -> int64 * C.ikind
val elab_float_constant : Cabs.floatInfo -> C.float_cst * C.fkind
-val elab_char_constant : Cabs.cabsloc -> bool -> int64 list -> int64
+val elab_char_constant : Cabs.loc -> bool -> int64 list -> int64
(* These auxiliary functions are exported so that they can be reused
in other projects that deal with C-style source languages. *)
diff --git a/cparser/Env.ml b/cparser/Env.ml
index 5fa4571a..4723a725 100644
--- a/cparser/Env.ml
+++ b/cparser/Env.ml
@@ -276,6 +276,46 @@ let add_enum env id info =
let add_types env_old env_new =
{ env_new with env_ident = env_old.env_ident;env_scope = env_old.env_scope;}
+(* Initial environment describing the built-in types and functions *)
+
+module Init = struct
+
+let env = ref empty
+let idents = ref []
+let decls = ref []
+
+let no_loc = ("", -1)
+
+let add_typedef (s, ty) =
+ let (id, env') = enter_typedef !env s ty in
+ env := env';
+ idents := id :: !idents;
+ decls := {gdesc = Gtypedef(id, ty); gloc = no_loc} :: !decls
+
+let add_function (s, (res, args, va)) =
+ let ty =
+ TFun(res,
+ Some (List.map (fun ty -> (fresh_ident "", ty)) args),
+ va, []) in
+ let (id, env') = enter_ident !env s Storage_extern ty in
+ env := env';
+ idents := id :: !idents;
+ decls :=
+ {gdesc = Gdecl(Storage_extern, id, ty, None); gloc = no_loc} :: !decls
+
+end
+
+let initial () = !Init.env
+let initial_identifiers () = !Init.idents
+let initial_declarations () = List.rev !Init.decls
+
+let set_builtins blt =
+ Init.env := empty;
+ Init.idents := [];
+ Init.decls := [];
+ List.iter Init.add_typedef blt.builtin_typedefs;
+ List.iter Init.add_function blt.builtin_functions
+
(* Error reporting *)
open Printf
diff --git a/cparser/Env.mli b/cparser/Env.mli
index 7ea2c514..1baab68f 100644
--- a/cparser/Env.mli
+++ b/cparser/Env.mli
@@ -77,3 +77,10 @@ val add_typedef : t -> C.ident -> typedef_info -> t
val add_enum : t -> C.ident -> enum_info -> t
val add_types : t -> t -> t
+
+(* Initial environment describing the builtin types and functions *)
+
+val initial: unit -> t
+val initial_identifiers: unit -> C.ident list
+val initial_declarations: unit -> C.globdecl list
+val set_builtins: C.builtins -> unit
diff --git a/cparser/GCC.ml b/cparser/GCC.ml
index 010d12f3..458e51d3 100644
--- a/cparser/GCC.ml
+++ b/cparser/GCC.ml
@@ -38,10 +38,10 @@ let intPtrType = TPtr(TInt(IInt, []), [])
let sizeType() = TInt(size_t_ikind(), [])
let builtins = {
- Builtins.typedefs = [
+ builtin_typedefs = [
"__builtin_va_list", voidPtrType
];
- Builtins.functions = [
+ builtin_functions = [
"__builtin___fprintf_chk", (intType, [ voidPtrType; intType; charConstPtrType ], true) (* first argument is really FILE*, not void*, but we don't want to build in the definition for FILE *);
"__builtin___memcpy_chk", (voidPtrType, [ voidPtrType; voidConstPtrType; sizeType(); sizeType() ], false);
"__builtin___memmove_chk", (voidPtrType, [ voidPtrType; voidConstPtrType; sizeType(); sizeType() ], false);
diff --git a/cparser/Lexer.mll b/cparser/Lexer.mll
index 7cf22eef..346477b5 100644
--- a/cparser/Lexer.mll
+++ b/cparser/Lexer.mll
@@ -20,7 +20,7 @@ open Pre_parser_aux
module SSet = Set.Make(String)
-let lexicon : (string, Cabs.cabsloc -> token) Hashtbl.t = Hashtbl.create 17
+let lexicon : (string, Cabs.loc -> token) Hashtbl.t = Hashtbl.create 17
let ignored_keywords : SSet.t ref = ref SSet.empty
let reserved_keyword loc id =
@@ -434,10 +434,7 @@ and singleline_comment = parse
| _ { singleline_comment lexbuf }
{
- open Streams
- open Specif
- open Parser
- open !Aut.GramDefs
+ open Parser.MenhirLibParser.Inter
(* This is the main entry point to the lexer. *)
@@ -463,8 +460,8 @@ and singleline_comment = parse
curr_id := None;
let loc = currentLoc lexbuf in
let token =
- if SSet.mem id !types_context then TYPEDEF_NAME (id, ref TypedefId, loc)
- else VAR_NAME (id, ref VarId, loc)
+ if SSet.mem id !types_context then Pre_parser.TYPEDEF_NAME (id, ref TypedefId, loc)
+ else Pre_parser.VAR_NAME (id, ref VarId, loc)
in
Queue.push token tokens;
token
@@ -497,133 +494,129 @@ and singleline_comment = parse
(* [tokens_stream filename text] runs the pre_parser and produces a stream
of (appropriately classified) tokens. *)
- let tokens_stream filename text : token coq_Stream =
+ let tokens_stream filename text : buffer =
let tokens = Queue.create () in
let buffer = ref ErrorReports.Zero in
invoke_pre_parser filename text (lexer tokens buffer) buffer;
- let rec compute_token_stream () =
- let loop t v =
- Cons (Coq_existT (t, Obj.magic v), Lazy.from_fun compute_token_stream)
- in
+ let rec compute_buffer () =
+ let loop t = Buf_cons (t, Lazy.from_fun compute_buffer) in
match Queue.pop tokens with
- | ADD_ASSIGN loc -> loop ADD_ASSIGN't loc
- | AND loc -> loop AND't loc
- | ANDAND loc -> loop ANDAND't loc
- | AND_ASSIGN loc -> loop AND_ASSIGN't loc
- | AUTO loc -> loop AUTO't loc
- | BANG loc -> loop BANG't loc
- | BAR loc -> loop BAR't loc
- | BARBAR loc -> loop BARBAR't loc
- | UNDERSCORE_BOOL loc -> loop UNDERSCORE_BOOL't loc
- | BREAK loc -> loop BREAK't loc
- | BUILTIN_VA_ARG loc -> loop BUILTIN_VA_ARG't loc
- | BUILTIN_OFFSETOF loc -> loop BUILTIN_OFFSETOF't loc
- | CASE loc -> loop CASE't loc
- | CHAR loc -> loop CHAR't loc
- | COLON loc -> loop COLON't loc
- | COMMA loc -> loop COMMA't loc
- | CONST loc -> loop CONST't loc
- | CONSTANT (cst, loc) -> loop CONSTANT't (cst, loc)
- | CONTINUE loc -> loop CONTINUE't loc
- | DEC loc -> loop DEC't loc
- | DEFAULT loc -> loop DEFAULT't loc
- | DIV_ASSIGN loc -> loop DIV_ASSIGN't loc
- | DO loc -> loop DO't loc
- | DOT loc -> loop DOT't loc
- | DOUBLE loc -> loop DOUBLE't loc
- | ELLIPSIS loc -> loop ELLIPSIS't loc
- | ELSE loc -> loop ELSE't loc
- | ENUM loc -> loop ENUM't loc
- | EOF -> loop EOF't ()
- | EQ loc -> loop EQ't loc
- | EQEQ loc -> loop EQEQ't loc
- | EXTERN loc -> loop EXTERN't loc
- | FLOAT loc -> loop FLOAT't loc
- | FOR loc -> loop FOR't loc
- | GEQ loc -> loop GEQ't loc
- | GOTO loc -> loop GOTO't loc
- | GT loc -> loop GT't loc
- | HAT loc -> loop HAT't loc
- | IF loc -> loop IF't loc
- | INC loc -> loop INC't loc
- | INLINE loc -> loop INLINE't loc
- | INT loc -> loop INT't loc
- | LBRACE loc -> loop LBRACE't loc
- | LBRACK loc -> loop LBRACK't loc
- | LEFT loc -> loop LEFT't loc
- | LEFT_ASSIGN loc -> loop LEFT_ASSIGN't loc
- | LEQ loc -> loop LEQ't loc
- | LONG loc -> loop LONG't loc
- | LPAREN loc -> loop LPAREN't loc
- | LT loc -> loop LT't loc
- | MINUS loc -> loop MINUS't loc
- | MOD_ASSIGN loc -> loop MOD_ASSIGN't loc
- | MUL_ASSIGN loc -> loop MUL_ASSIGN't loc
- | NEQ loc -> loop NEQ't loc
- | NORETURN loc -> loop NORETURN't loc
- | OR_ASSIGN loc -> loop OR_ASSIGN't loc
- | PACKED loc -> loop PACKED't loc
- | PERCENT loc -> loop PERCENT't loc
- | PLUS loc -> loop PLUS't loc
- | PTR loc -> loop PTR't loc
- | QUESTION loc -> loop QUESTION't loc
- | RBRACE loc -> loop RBRACE't loc
- | RBRACK loc -> loop RBRACK't loc
- | REGISTER loc -> loop REGISTER't loc
- | RESTRICT loc -> loop RESTRICT't loc
- | RETURN loc -> loop RETURN't loc
- | RIGHT loc -> loop RIGHT't loc
- | RIGHT_ASSIGN loc -> loop RIGHT_ASSIGN't loc
- | RPAREN loc -> loop RPAREN't loc
- | SEMICOLON loc -> loop SEMICOLON't loc
- | SHORT loc -> loop SHORT't loc
- | SIGNED loc -> loop SIGNED't loc
- | SIZEOF loc -> loop SIZEOF't loc
- | SLASH loc -> loop SLASH't loc
- | STAR loc -> loop STAR't loc
- | STATIC loc -> loop STATIC't loc
- | STRING_LITERAL (wide, str, loc) ->
+ | Pre_parser.ADD_ASSIGN loc -> loop (Parser.ADD_ASSIGN loc)
+ | Pre_parser.AND loc -> loop (Parser.AND loc)
+ | Pre_parser.ANDAND loc -> loop (Parser.ANDAND loc)
+ | Pre_parser.AND_ASSIGN loc -> loop (Parser.AND_ASSIGN loc)
+ | Pre_parser.AUTO loc -> loop (Parser.AUTO loc)
+ | Pre_parser.BANG loc -> loop (Parser.BANG loc)
+ | Pre_parser.BAR loc -> loop (Parser.BAR loc)
+ | Pre_parser.BARBAR loc -> loop (Parser.BARBAR loc)
+ | Pre_parser.UNDERSCORE_BOOL loc -> loop (Parser.UNDERSCORE_BOOL loc)
+ | Pre_parser.BREAK loc -> loop (Parser.BREAK loc)
+ | Pre_parser.BUILTIN_VA_ARG loc -> loop (Parser.BUILTIN_VA_ARG loc)
+ | Pre_parser.BUILTIN_OFFSETOF loc -> loop (Parser.BUILTIN_OFFSETOF loc)
+ | Pre_parser.CASE loc -> loop (Parser.CASE loc)
+ | Pre_parser.CHAR loc -> loop (Parser.CHAR loc)
+ | Pre_parser.COLON loc -> loop (Parser.COLON loc)
+ | Pre_parser.COMMA loc -> loop (Parser.COMMA loc)
+ | Pre_parser.CONST loc -> loop (Parser.CONST loc)
+ | Pre_parser.CONSTANT (cst, loc) -> loop (Parser.CONSTANT (cst, loc))
+ | Pre_parser.CONTINUE loc -> loop (Parser.CONTINUE loc)
+ | Pre_parser.DEC loc -> loop (Parser.DEC loc)
+ | Pre_parser.DEFAULT loc -> loop (Parser.DEFAULT loc)
+ | Pre_parser.DIV_ASSIGN loc -> loop (Parser.DIV_ASSIGN loc)
+ | Pre_parser.DO loc -> loop (Parser.DO loc)
+ | Pre_parser.DOT loc -> loop (Parser.DOT loc)
+ | Pre_parser.DOUBLE loc -> loop (Parser.DOUBLE loc)
+ | Pre_parser.ELLIPSIS loc -> loop (Parser.ELLIPSIS loc)
+ | Pre_parser.ELSE loc -> loop (Parser.ELSE loc)
+ | Pre_parser.ENUM loc -> loop (Parser.ENUM loc)
+ | Pre_parser.EOF -> loop (Parser.EOF ())
+ | Pre_parser.EQ loc -> loop (Parser.EQ loc)
+ | Pre_parser.EQEQ loc -> loop (Parser.EQEQ loc)
+ | Pre_parser.EXTERN loc -> loop (Parser.EXTERN loc)
+ | Pre_parser.FLOAT loc -> loop (Parser.FLOAT loc)
+ | Pre_parser.FOR loc -> loop (Parser.FOR loc)
+ | Pre_parser.GEQ loc -> loop (Parser.GEQ loc)
+ | Pre_parser.GOTO loc -> loop (Parser.GOTO loc)
+ | Pre_parser.GT loc -> loop (Parser.GT loc)
+ | Pre_parser.HAT loc -> loop (Parser.HAT loc)
+ | Pre_parser.IF loc -> loop (Parser.IF_ loc)
+ | Pre_parser.INC loc -> loop (Parser.INC loc)
+ | Pre_parser.INLINE loc -> loop (Parser.INLINE loc)
+ | Pre_parser.INT loc -> loop (Parser.INT loc)
+ | Pre_parser.LBRACE loc -> loop (Parser.LBRACE loc)
+ | Pre_parser.LBRACK loc -> loop (Parser.LBRACK loc)
+ | Pre_parser.LEFT loc -> loop (Parser.LEFT loc)
+ | Pre_parser.LEFT_ASSIGN loc -> loop (Parser.LEFT_ASSIGN loc)
+ | Pre_parser.LEQ loc -> loop (Parser.LEQ loc)
+ | Pre_parser.LONG loc -> loop (Parser.LONG loc)
+ | Pre_parser.LPAREN loc -> loop (Parser.LPAREN loc)
+ | Pre_parser.LT loc -> loop (Parser.LT loc)
+ | Pre_parser.MINUS loc -> loop (Parser.MINUS loc)
+ | Pre_parser.MOD_ASSIGN loc -> loop (Parser.MOD_ASSIGN loc)
+ | Pre_parser.MUL_ASSIGN loc -> loop (Parser.MUL_ASSIGN loc)
+ | Pre_parser.NEQ loc -> loop (Parser.NEQ loc)
+ | Pre_parser.NORETURN loc -> loop (Parser.NORETURN loc)
+ | Pre_parser.OR_ASSIGN loc -> loop (Parser.OR_ASSIGN loc)
+ | Pre_parser.PACKED loc -> loop (Parser.PACKED loc)
+ | Pre_parser.PERCENT loc -> loop (Parser.PERCENT loc)
+ | Pre_parser.PLUS loc -> loop (Parser.PLUS loc)
+ | Pre_parser.PTR loc -> loop (Parser.PTR loc)
+ | Pre_parser.QUESTION loc -> loop (Parser.QUESTION loc)
+ | Pre_parser.RBRACE loc -> loop (Parser.RBRACE loc)
+ | Pre_parser.RBRACK loc -> loop (Parser.RBRACK loc)
+ | Pre_parser.REGISTER loc -> loop (Parser.REGISTER loc)
+ | Pre_parser.RESTRICT loc -> loop (Parser.RESTRICT loc)
+ | Pre_parser.RETURN loc -> loop (Parser.RETURN loc)
+ | Pre_parser.RIGHT loc -> loop (Parser.RIGHT loc)
+ | Pre_parser.RIGHT_ASSIGN loc -> loop (Parser.RIGHT_ASSIGN loc)
+ | Pre_parser.RPAREN loc -> loop (Parser.RPAREN loc)
+ | Pre_parser.SEMICOLON loc -> loop (Parser.SEMICOLON loc)
+ | Pre_parser.SHORT loc -> loop (Parser.SHORT loc)
+ | Pre_parser.SIGNED loc -> loop (Parser.SIGNED loc)
+ | Pre_parser.SIZEOF loc -> loop (Parser.SIZEOF loc)
+ | Pre_parser.SLASH loc -> loop (Parser.SLASH loc)
+ | Pre_parser.STAR loc -> loop (Parser.STAR loc)
+ | Pre_parser.STATIC loc -> loop (Parser.STATIC loc)
+ | Pre_parser.STRING_LITERAL (wide, str, loc) ->
(* Merge consecutive string literals *)
let rec doConcat wide str =
- try
- match Queue.peek tokens with
- | STRING_LITERAL (wide', str', loc) ->
- ignore (Queue.pop tokens);
- let (wide'', str'') = doConcat wide' str' in
- if str'' <> []
- then (wide || wide'', str @ str'')
- else (wide, str)
- | _ ->
- (wide, str)
- with Queue.Empty -> (wide, str) in
- let (wide', str') = doConcat wide str in
- loop STRING_LITERAL't ((wide', str'), loc)
- | STRUCT loc -> loop STRUCT't loc
- | SUB_ASSIGN loc -> loop SUB_ASSIGN't loc
- | SWITCH loc -> loop SWITCH't loc
- | TILDE loc -> loop TILDE't loc
- | TYPEDEF loc -> loop TYPEDEF't loc
- | TYPEDEF_NAME (id, typ, loc)
- | VAR_NAME (id, typ, loc) ->
- let terminal = match !typ with
- | VarId -> VAR_NAME't
- | TypedefId -> TYPEDEF_NAME't
- | OtherId -> OTHER_NAME't
+ match Queue.peek tokens with
+ | Pre_parser.STRING_LITERAL (wide', str', loc) ->
+ ignore (Queue.pop tokens);
+ let (wide'', str'') = doConcat wide' str' in
+ if str'' <> []
+ then (wide || wide'', str @ str'')
+ else (wide, str)
+ | _ -> (wide, str)
+ | exception Queue.Empty -> (wide, str)
in
- loop terminal (id, loc)
- | UNION loc -> loop UNION't loc
- | UNSIGNED loc -> loop UNSIGNED't loc
- | VOID loc -> loop VOID't loc
- | VOLATILE loc -> loop VOLATILE't loc
- | WHILE loc -> loop WHILE't loc
- | XOR_ASSIGN loc -> loop XOR_ASSIGN't loc
- | ALIGNAS loc -> loop ALIGNAS't loc
- | ALIGNOF loc -> loop ALIGNOF't loc
- | ATTRIBUTE loc -> loop ATTRIBUTE't loc
- | ASM loc -> loop ASM't loc
- | PRAGMA (s, loc) -> loop PRAGMA't (s, loc)
- | PRE_NAME _ -> assert false
+ let (wide', str') = doConcat wide str in
+ loop (Parser.STRING_LITERAL ((wide', str'), loc))
+ | Pre_parser.STRUCT loc -> loop (Parser.STRUCT loc)
+ | Pre_parser.SUB_ASSIGN loc -> loop (Parser.SUB_ASSIGN loc)
+ | Pre_parser.SWITCH loc -> loop (Parser.SWITCH loc)
+ | Pre_parser.TILDE loc -> loop (Parser.TILDE loc)
+ | Pre_parser.TYPEDEF loc -> loop (Parser.TYPEDEF loc)
+ | Pre_parser.TYPEDEF_NAME (id, typ, loc)
+ | Pre_parser.VAR_NAME (id, typ, loc) ->
+ begin match !typ with
+ | VarId -> loop (Parser.VAR_NAME (id, loc))
+ | TypedefId -> loop (Parser.TYPEDEF_NAME (id, loc))
+ | OtherId -> loop (Parser.OTHER_NAME (id, loc))
+ end
+ | Pre_parser.UNION loc -> loop (Parser.UNION loc)
+ | Pre_parser.UNSIGNED loc -> loop (Parser.UNSIGNED loc)
+ | Pre_parser.VOID loc -> loop (Parser.VOID loc)
+ | Pre_parser.VOLATILE loc -> loop (Parser.VOLATILE loc)
+ | Pre_parser.WHILE loc -> loop (Parser.WHILE loc)
+ | Pre_parser.XOR_ASSIGN loc -> loop (Parser.XOR_ASSIGN loc)
+ | Pre_parser.ALIGNAS loc -> loop (Parser.ALIGNAS loc)
+ | Pre_parser.ALIGNOF loc -> loop (Parser.ALIGNOF loc)
+ | Pre_parser.ATTRIBUTE loc -> loop (Parser.ATTRIBUTE loc)
+ | Pre_parser.ASM loc -> loop (Parser.ASM loc)
+ | Pre_parser.PRAGMA (s, loc) -> loop (Parser.PRAGMA (s, loc))
+ | Pre_parser.PRE_NAME _ -> assert false
in
- Lazy.from_fun compute_token_stream
+ Lazy.from_fun compute_buffer
}
diff --git a/cparser/MenhirLib/Alphabet.v b/cparser/MenhirLib/Alphabet.v
deleted file mode 100644
index a13f69b0..00000000
--- a/cparser/MenhirLib/Alphabet.v
+++ /dev/null
@@ -1,320 +0,0 @@
-(* *********************************************************************)
-(* *)
-(* The Compcert verified compiler *)
-(* *)
-(* Jacques-Henri Jourdan, INRIA Paris-Rocquencourt *)
-(* *)
-(* Copyright Institut National de Recherche en Informatique et en *)
-(* Automatique. All rights reserved. This file is distributed *)
-(* under the terms of the GNU General Public License as published by *)
-(* the Free Software Foundation, either version 2 of the License, or *)
-(* (at your option) any later version. This file is also distributed *)
-(* under the terms of the INRIA Non-Commercial License Agreement. *)
-(* *)
-(* *********************************************************************)
-
-Require Import Int31.
-Require Import Cyclic31.
-Require Import Omega.
-Require Import List.
-Require Import Syntax.
-Require Import Relations.
-Require Import RelationClasses.
-
-Local Obligation Tactic := intros.
-
-(** A comparable type is equiped with a [compare] function, that define an order
- relation. **)
-Class Comparable (A:Type) := {
- compare : A -> A -> comparison;
- compare_antisym : forall x y, CompOpp (compare x y) = compare y x;
- compare_trans : forall x y z c,
- (compare x y) = c -> (compare y z) = c -> (compare x z) = c
-}.
-
-Theorem compare_refl {A:Type} (C: Comparable A) :
- forall x, compare x x = Eq.
-Proof.
-intros.
-pose proof (compare_antisym x x).
-destruct (compare x x); intuition; try discriminate.
-Qed.
-
-(** The corresponding order is a strict order. **)
-Definition comparableLt {A:Type} (C: Comparable A) : relation A :=
- fun x y => compare x y = Lt.
-
-Instance ComparableLtStrictOrder {A:Type} (C: Comparable A) :
- StrictOrder (comparableLt C).
-Proof.
-apply Build_StrictOrder.
-unfold Irreflexive, Reflexive, complement, comparableLt.
-intros.
-pose proof H.
-rewrite <- compare_antisym in H.
-rewrite H0 in H.
-discriminate H.
-unfold Transitive, comparableLt.
-intros x y z.
-apply compare_trans.
-Qed.
-
-(** nat is comparable. **)
-Program Instance natComparable : Comparable nat :=
- { compare := Nat.compare }.
-Next Obligation.
-symmetry.
-destruct (Nat.compare x y) as [] eqn:?.
-rewrite Nat.compare_eq_iff in Heqc.
-destruct Heqc.
-rewrite Nat.compare_eq_iff.
-trivial.
-rewrite <- nat_compare_lt in *.
-rewrite <- nat_compare_gt in *.
-trivial.
-rewrite <- nat_compare_lt in *.
-rewrite <- nat_compare_gt in *.
-trivial.
-Qed.
-Next Obligation.
-destruct c.
-rewrite Nat.compare_eq_iff in *; destruct H; assumption.
-rewrite <- nat_compare_lt in *.
-apply (Nat.lt_trans _ _ _ H H0).
-rewrite <- nat_compare_gt in *.
-apply (gt_trans _ _ _ H H0).
-Qed.
-
-(** A pair of comparable is comparable. **)
-Program Instance PairComparable {A:Type} (CA:Comparable A) {B:Type} (CB:Comparable B) :
- Comparable (A*B) :=
- { compare := fun x y =>
- let (xa, xb) := x in let (ya, yb) := y in
- match compare xa ya return comparison with
- | Eq => compare xb yb
- | x => x
- end }.
-Next Obligation.
-destruct x, y.
-rewrite <- (compare_antisym a a0).
-rewrite <- (compare_antisym b b0).
-destruct (compare a a0); intuition.
-Qed.
-Next Obligation.
-destruct x, y, z.
-destruct (compare a a0) as [] eqn:?, (compare a0 a1) as [] eqn:?;
-try (rewrite <- H0 in H; discriminate);
-try (destruct (compare a a1) as [] eqn:?;
- try (rewrite <- compare_antisym in Heqc0;
- rewrite CompOpp_iff in Heqc0;
- rewrite (compare_trans _ _ _ _ Heqc0 Heqc2) in Heqc1;
- discriminate);
- try (rewrite <- compare_antisym in Heqc1;
- rewrite CompOpp_iff in Heqc1;
- rewrite (compare_trans _ _ _ _ Heqc2 Heqc1) in Heqc0;
- discriminate);
- assumption);
-rewrite (compare_trans _ _ _ _ Heqc0 Heqc1);
-try assumption.
-apply (compare_trans _ _ _ _ H H0).
-Qed.
-
-(** Special case of comparable, where equality is usual equality. **)
-Class ComparableUsualEq {A:Type} (C: Comparable A) :=
- compare_eq : forall x y, compare x y = Eq -> x = y.
-
-(** Boolean equality for a [Comparable]. **)
-Definition compare_eqb {A:Type} {C:Comparable A} (x y:A) :=
- match compare x y with
- | Eq => true
- | _ => false
- end.
-
-Theorem compare_eqb_iff {A:Type} {C:Comparable A} {U:ComparableUsualEq C} :
- forall x y, compare_eqb x y = true <-> x = y.
-Proof.
-unfold compare_eqb.
-intuition.
-apply compare_eq.
-destruct (compare x y); intuition; discriminate.
-destruct H.
-rewrite compare_refl; intuition.
-Qed.
-
-(** [Comparable] provides a decidable equality. **)
-Definition compare_eqdec {A:Type} {C:Comparable A} {U:ComparableUsualEq C} (x y:A):
- {x = y} + {x <> y}.
-Proof.
-destruct (compare x y) as [] eqn:?; [left; apply compare_eq; intuition | ..];
- right; intro; destruct H; rewrite compare_refl in Heqc; discriminate.
-Defined.
-
-Instance NComparableUsualEq : ComparableUsualEq natComparable := Nat.compare_eq.
-
-(** A pair of ComparableUsualEq is ComparableUsualEq **)
-Instance PairComparableUsualEq
- {A:Type} {CA:Comparable A} (UA:ComparableUsualEq CA)
- {B:Type} {CB:Comparable B} (UB:ComparableUsualEq CB) :
- ComparableUsualEq (PairComparable CA CB).
-Proof.
-intros x y; destruct x, y; simpl.
-pose proof (compare_eq a a0); pose proof (compare_eq b b0).
-destruct (compare a a0); try discriminate.
-intuition.
-destruct H2, H0.
-reflexivity.
-Qed.
-
-(** An [Finite] type is a type with the list of all elements. **)
-Class Finite (A:Type) := {
- all_list : list A;
- all_list_forall : forall x:A, In x all_list
-}.
-
-(** An alphabet is both [ComparableUsualEq] and [Finite]. **)
-Class Alphabet (A:Type) := {
- AlphabetComparable :> Comparable A;
- AlphabetComparableUsualEq :> ComparableUsualEq AlphabetComparable;
- AlphabetFinite :> Finite A
-}.
-
-(** The [Numbered] class provides a conveniant way to build [Alphabet] instances,
- with a good computationnal complexity. It is mainly a injection from it to
- [Int31] **)
-Class Numbered (A:Type) := {
- inj : A -> int31;
- surj : int31 -> A;
- surj_inj_compat : forall x, surj (inj x) = x;
- inj_bound : int31;
- inj_bound_spec : forall x, (phi (inj x) < phi inj_bound)%Z
-}.
-
-Program Instance NumberedAlphabet {A:Type} (N:Numbered A) : Alphabet A :=
- { AlphabetComparable :=
- {| compare := fun x y => compare31 (inj x) (inj y) |};
- AlphabetFinite :=
- {| all_list := fst (iter_int31 inj_bound _
- (fun p => (cons (surj (snd p)) (fst p), incr (snd p))) ([], 0%int31)) |} }.
-Next Obligation. apply Zcompare_antisym. Qed.
-Next Obligation.
-destruct c. unfold compare31 in *.
-rewrite Z.compare_eq_iff in *. congruence.
-eapply Zcompare_Lt_trans. apply H. apply H0.
-eapply Zcompare_Gt_trans. apply H. apply H0.
-Qed.
-Next Obligation.
-intros x y H. unfold compare, compare31 in H.
-rewrite Z.compare_eq_iff in *.
-rewrite <- surj_inj_compat, <- phi_inv_phi with (inj y), <- H.
-rewrite phi_inv_phi, surj_inj_compat; reflexivity.
-Qed.
-Next Obligation.
-rewrite iter_int31_iter_nat.
-pose proof (inj_bound_spec x).
-match goal with |- In x (fst ?p) => destruct p as [] eqn:? end.
-replace inj_bound with i in H.
-revert l i Heqp x H.
-induction (Z.abs_nat (phi inj_bound)); intros.
-inversion Heqp; clear Heqp; subst.
-rewrite spec_0 in H. pose proof (phi_bounded (inj x)). omega.
-simpl in Heqp.
-destruct nat_rect; specialize (IHn _ _ (eq_refl _) x); simpl in *.
-inversion Heqp. subst. clear Heqp.
-rewrite phi_incr in H.
-pose proof (phi_bounded i0).
-pose proof (phi_bounded (inj x)).
-destruct (Z_lt_le_dec (Z.succ (phi i0)) (2 ^ Z.of_nat size)%Z).
-rewrite Zmod_small in H by omega.
-apply Zlt_succ_le, Zle_lt_or_eq in H.
-destruct H; simpl; auto. left.
-rewrite <- surj_inj_compat, <- phi_inv_phi with (inj x), H, phi_inv_phi; reflexivity.
-replace (Z.succ (phi i0)) with (2 ^ Z.of_nat size)%Z in H by omega.
-rewrite Z_mod_same_full in H.
-exfalso; omega.
-rewrite <- phi_inv_phi with i, <- phi_inv_phi with inj_bound; f_equal.
-pose proof (phi_bounded inj_bound); pose proof (phi_bounded i).
-rewrite <- Z.abs_eq with (phi i), <- Z.abs_eq with (phi inj_bound) by omega.
-clear H H0 H1.
-do 2 rewrite <- Zabs2Nat.id_abs.
-f_equal.
-revert l i Heqp.
-assert (Z.abs_nat (phi inj_bound) < Z.abs_nat (2^31)).
-apply Zabs_nat_lt, phi_bounded.
-induction (Z.abs_nat (phi inj_bound)); intros.
-inversion Heqp; reflexivity.
-inversion Heqp; clear H1 H2 Heqp.
-match goal with |- _ (_ (_ (snd ?p))) = _ => destruct p end.
-pose proof (phi_bounded i0).
-erewrite <- IHn, <- Zabs2Nat.inj_succ in H |- *; eauto; try omega.
-rewrite phi_incr, Zmod_small; intuition; try omega.
-apply inj_lt in H.
-pose proof Z.le_le_succ_r.
-do 2 rewrite Zabs2Nat.id_abs, Z.abs_eq in H; now eauto.
-Qed.
-
-(** Previous class instances for [option A] **)
-Program Instance OptionComparable {A:Type} (C:Comparable A) : Comparable (option A) :=
- { compare := fun x y =>
- match x, y return comparison with
- | None, None => Eq
- | None, Some _ => Lt
- | Some _, None => Gt
- | Some x, Some y => compare x y
- end }.
-Next Obligation.
-destruct x, y; intuition.
-apply compare_antisym.
-Qed.
-Next Obligation.
-destruct x, y, z; try now intuition;
-try (rewrite <- H in H0; discriminate).
-apply (compare_trans _ _ _ _ H H0).
-Qed.
-
-Instance OptionComparableUsualEq {A:Type} {C:Comparable A} (U:ComparableUsualEq C) :
- ComparableUsualEq (OptionComparable C).
-Proof.
-intros x y.
-destruct x, y; intuition; try discriminate.
-rewrite (compare_eq a a0); intuition.
-Qed.
-
-Program Instance OptionFinite {A:Type} (E:Finite A) : Finite (option A) :=
- { all_list := None :: map Some all_list }.
-Next Obligation.
-destruct x; intuition.
-right.
-apply in_map.
-apply all_list_forall.
-Defined.
-
-(** Definitions of [FSet]/[FMap] from [Comparable] **)
-Require Import OrderedTypeAlt.
-Require FSetAVL.
-Require FMapAVL.
-Import OrderedType.
-
-Module Type ComparableM.
- Parameter t : Type.
- Declare Instance tComparable : Comparable t.
-End ComparableM.
-
-Module OrderedTypeAlt_from_ComparableM (C:ComparableM) <: OrderedTypeAlt.
- Definition t := C.t.
- Definition compare : t -> t -> comparison := compare.
-
- Infix "?=" := compare (at level 70, no associativity).
-
- Lemma compare_sym x y : (y?=x) = CompOpp (x?=y).
- Proof. exact (Logic.eq_sym (compare_antisym x y)). Qed.
- Lemma compare_trans c x y z :
- (x?=y) = c -> (y?=z) = c -> (x?=z) = c.
- Proof.
- apply compare_trans.
- Qed.
-End OrderedTypeAlt_from_ComparableM.
-
-Module OrderedType_from_ComparableM (C:ComparableM) <: OrderedType.
- Module Alt := OrderedTypeAlt_from_ComparableM C.
- Include (OrderedType_from_Alt Alt).
-End OrderedType_from_ComparableM.
diff --git a/cparser/MenhirLib/Automaton.v b/cparser/MenhirLib/Automaton.v
deleted file mode 100644
index fc995298..00000000
--- a/cparser/MenhirLib/Automaton.v
+++ /dev/null
@@ -1,167 +0,0 @@
-(* *********************************************************************)
-(* *)
-(* The Compcert verified compiler *)
-(* *)
-(* Jacques-Henri Jourdan, INRIA Paris-Rocquencourt *)
-(* *)
-(* Copyright Institut National de Recherche en Informatique et en *)
-(* Automatique. All rights reserved. This file is distributed *)
-(* under the terms of the GNU General Public License as published by *)
-(* the Free Software Foundation, either version 2 of the License, or *)
-(* (at your option) any later version. This file is also distributed *)
-(* under the terms of the INRIA Non-Commercial License Agreement. *)
-(* *)
-(* *********************************************************************)
-
-Require Grammar.
-Require Import Orders.
-Require Export Alphabet.
-Require Export List.
-Require Export Syntax.
-
-Module Type AutInit.
- (** The grammar of the automaton. **)
- Declare Module Gram:Grammar.T.
- Export Gram.
-
- (** The set of non initial state is considered as an alphabet. **)
- Parameter noninitstate : Type.
- Declare Instance NonInitStateAlph : Alphabet noninitstate.
-
- Parameter initstate : Type.
- Declare Instance InitStateAlph : Alphabet initstate.
-
- (** When we are at this state, we know that this symbol is the top of the
- stack. **)
- Parameter last_symb_of_non_init_state: noninitstate -> symbol.
-End AutInit.
-
-Module Types(Import Init:AutInit).
- (** In many ways, the behaviour of the initial state is different from the
- behaviour of the other states. So we have chosen to explicitaly separate
- them: the user has to provide the type of non initial states. **)
- Inductive state :=
- | Init: initstate -> state
- | Ninit: noninitstate -> state.
-
- Program Instance StateAlph : Alphabet state :=
- { AlphabetComparable := {| compare := fun x y =>
- match x, y return comparison with
- | Init _, Ninit _ => Lt
- | Init x, Init y => compare x y
- | Ninit _, Init _ => Gt
- | Ninit x, Ninit y => compare x y
- end |};
- AlphabetFinite := {| all_list := map Init all_list ++ map Ninit all_list |} }.
- Local Obligation Tactic := intros.
- Next Obligation.
- destruct x, y; intuition; apply compare_antisym.
- Qed.
- Next Obligation.
- destruct x, y, z; intuition.
- apply (compare_trans _ i0); intuition.
- congruence.
- congruence.
- apply (compare_trans _ n0); intuition.
- Qed.
- Next Obligation.
- intros x y.
- destruct x, y; intuition; try discriminate.
- rewrite (compare_eq i i0); intuition.
- rewrite (compare_eq n n0); intuition.
- Qed.
- Next Obligation.
- apply in_or_app; destruct x; intuition;
- [left|right]; apply in_map; apply all_list_forall.
- Qed.
-
- Coercion Ninit : noninitstate >-> state.
- Coercion Init : initstate >-> state.
-
- (** For an LR automaton, there are four kind of actions that can be done at a
- given state:
- - Shifting, that is reading a token and putting it into the stack,
- - Reducing a production, that is popping the right hand side of the
- production from the stack, and pushing the left hand side,
- - Failing
- - Accepting the word (special case of reduction)
-
- As in the menhir parser generator, we do not want our parser to read after
- the end of stream. That means that once the parser has read a word in the
- grammar language, it should stop without peeking the input stream. So, for
- the automaton to be complete, the grammar must be particular: if a word is
- in its language, then it is not a prefix of an other word of the language
- (otherwise, menhir reports an end of stream conflict).
-
- As a consequence of that, there is two notions of action: the first one is
- an action performed before having read the stream, the second one is after
- **)
-
- Inductive lookahead_action (term:terminal) :=
- | Shift_act: forall s:noninitstate,
- T term = last_symb_of_non_init_state s -> lookahead_action term
- | Reduce_act: production -> lookahead_action term
- | Fail_act: lookahead_action term.
- Arguments Shift_act [term].
- Arguments Reduce_act [term].
- Arguments Fail_act [term].
-
- Inductive action :=
- | Default_reduce_act: production -> action
- | Lookahead_act : (forall term:terminal, lookahead_action term) -> action.
-
- (** Types used for the annotations of the automaton. **)
-
- (** An item is a part of the annotations given to the validator.
- It is acually a set of LR(1) items sharing the same core. It is needed
- to validate completeness. **)
- Record item := {
- (** The pseudo-production of the item. **)
- prod_item: production;
-
- (** The position of the dot. **)
- dot_pos_item: nat;
-
- (** The lookahead symbol of the item. We are using a list, so we can store
- together multiple LR(1) items sharing the same core. **)
- lookaheads_item: list terminal
- }.
-End Types.
-
-Module Type T.
- Include AutInit <+ Types.
- Module Export GramDefs := Grammar.Defs Gram.
-
- (** For each initial state, the non terminal it recognizes. **)
- Parameter start_nt: initstate -> nonterminal.
-
- (** The action table maps a state to either a map terminal -> action. **)
- Parameter action_table:
- state -> action.
- (** The goto table of an LR(1) automaton. **)
- Parameter goto_table: state -> forall nt:nonterminal,
- option { s:noninitstate | NT nt = last_symb_of_non_init_state s }.
-
- (** Some annotations on the automaton to help the validation. **)
-
- (** When we are at this state, we know that these symbols are just below
- the top of the stack. The list is ordered such that the head correspond
- to the (almost) top of the stack. **)
- Parameter past_symb_of_non_init_state: noninitstate -> list symbol.
-
- (** When we are at this state, the (strictly) previous states verify these
- predicates. **)
- Parameter past_state_of_non_init_state: noninitstate -> list (state -> bool).
-
- (** The items of the state. **)
- Parameter items_of_state: state -> list item.
-
- (** The nullable predicate for non terminals :
- true if and only if the symbol produces the empty string **)
- Parameter nullable_nterm: nonterminal -> bool.
-
- (** The first predicates for non terminals, symbols or words of symbols. A
- terminal is in the returned list if, and only if the parameter produces a
- word that begins with the given terminal **)
- Parameter first_nterm: nonterminal -> list terminal.
-End T.
diff --git a/cparser/MenhirLib/Grammar.v b/cparser/MenhirLib/Grammar.v
deleted file mode 100644
index 8e427cd9..00000000
--- a/cparser/MenhirLib/Grammar.v
+++ /dev/null
@@ -1,166 +0,0 @@
-(* *********************************************************************)
-(* *)
-(* The Compcert verified compiler *)
-(* *)
-(* Jacques-Henri Jourdan, INRIA Paris-Rocquencourt *)
-(* *)
-(* Copyright Institut National de Recherche en Informatique et en *)
-(* Automatique. All rights reserved. This file is distributed *)
-(* under the terms of the GNU General Public License as published by *)
-(* the Free Software Foundation, either version 2 of the License, or *)
-(* (at your option) any later version. This file is also distributed *)
-(* under the terms of the INRIA Non-Commercial License Agreement. *)
-(* *)
-(* *********************************************************************)
-
-Require Import List.
-Require Import Syntax.
-Require Import Alphabet.
-Require Import Orders.
-Require Tuples.
-
-(** The terminal non-terminal alphabets of the grammar. **)
-Module Type Alphs.
- Parameters terminal nonterminal : Type.
- Declare Instance TerminalAlph: Alphabet terminal.
- Declare Instance NonTerminalAlph: Alphabet nonterminal.
-End Alphs.
-
-(** Definition of the alphabet of symbols, given the alphabet of terminals
- and the alphabet of non terminals **)
-Module Symbol(Import A:Alphs).
-
- Inductive symbol :=
- | T: terminal -> symbol
- | NT: nonterminal -> symbol.
-
- Program Instance SymbolAlph : Alphabet symbol :=
- { AlphabetComparable := {| compare := fun x y =>
- match x, y return comparison with
- | T _, NT _ => Gt
- | NT _, T _ => Lt
- | T x, T y => compare x y
- | NT x, NT y => compare x y
- end |};
- AlphabetFinite := {| all_list :=
- map T all_list++map NT all_list |} }.
- Next Obligation.
- destruct x; destruct y; intuition; apply compare_antisym.
- Qed.
- Next Obligation.
- destruct x; destruct y; destruct z; intuition; try discriminate.
- apply (compare_trans _ t0); intuition.
- apply (compare_trans _ n0); intuition.
- Qed.
- Next Obligation.
- intros x y.
- destruct x; destruct y; try discriminate; intros.
- rewrite (compare_eq t t0); intuition.
- rewrite (compare_eq n n0); intuition.
- Qed.
- Next Obligation.
- rewrite in_app_iff.
- destruct x; [left | right]; apply in_map; apply all_list_forall.
- Qed.
-
-End Symbol.
-
-Module Type T.
- Export Tuples.
-
- Include Alphs <+ Symbol.
-
- (** [symbol_semantic_type] maps a symbols to the type of its semantic
- values. **)
- Parameter symbol_semantic_type: symbol -> Type.
-
- (** The type of productions identifiers **)
- Parameter production : Type.
- Declare Instance ProductionAlph : Alphabet production.
-
- (** Accessors for productions: left hand side, right hand side,
- and semantic action. The semantic actions are given in the form
- of curryfied functions, that take arguments in the reverse order. **)
- Parameter prod_lhs: production -> nonterminal.
- Parameter prod_rhs_rev: production -> list symbol.
- Parameter prod_action:
- forall p:production,
- arrows_left
- (map symbol_semantic_type (rev (prod_rhs_rev p)))
- (symbol_semantic_type (NT (prod_lhs p))).
-
-End T.
-
-Module Defs(Import G:T).
-
- (** A token is a terminal and a semantic value for this terminal. **)
- Definition token := {t:terminal & symbol_semantic_type (T t)}.
-
- (** A grammar creates a relation between word of tokens and semantic values.
- This relation is parametrized by the head symbol. It defines the
- "semantics" of the grammar. This relation is defined by a notion of
- parse tree. **)
- Inductive parse_tree:
- forall (head_symbol:symbol) (word:list token)
- (semantic_value:symbol_semantic_type head_symbol), Type :=
-
- (** A single token has its semantic value as semantic value, for the
- corresponding terminal as head symbol. **)
- | Terminal_pt:
- forall (t:terminal) (sem:symbol_semantic_type (T t)),
- parse_tree (T t)
- [existT (fun t => symbol_semantic_type (T t)) t sem] sem
-
- (** Given a production, if a word has a list of semantic values for the
- right hand side as head symbols, then this word has the semantic value
- given by the semantic action of the production for the left hand side
- as head symbol.**)
- | Non_terminal_pt:
- forall {p:production} {word:list token}
- {semantic_values:tuple (map symbol_semantic_type (rev (prod_rhs_rev p)))},
- parse_tree_list (rev (prod_rhs_rev p)) word semantic_values ->
- parse_tree (NT (prod_lhs p)) word (uncurry (prod_action p) semantic_values)
-
- (** Basically the same relation as before, but for list of head symbols (ie.
- We are building a forest of syntax trees. It is mutually recursive with the
- previous relation **)
- with parse_tree_list:
- forall (head_symbols:list symbol) (word:list token)
- (semantic_values:tuple (map symbol_semantic_type head_symbols)),
- Type :=
-
- (** The empty word has [()] as semantic for [[]] as head symbols list **)
- | Nil_ptl: parse_tree_list [] [] ()
-
- (** The cons of the semantic value for one head symbol and for a list of head
- symbols **)
- | Cons_ptl:
- (** The semantic for the head **)
- forall {head_symbolt:symbol} {wordt:list token}
- {semantic_valuet:symbol_semantic_type head_symbolt},
- parse_tree head_symbolt wordt semantic_valuet ->
-
- (** and the semantic for the tail **)
- forall {head_symbolsq:list symbol} {wordq:list token}
- {semantic_valuesq:tuple (map symbol_semantic_type head_symbolsq)},
- parse_tree_list head_symbolsq wordq semantic_valuesq ->
-
- (** give the semantic of the cons **)
- parse_tree_list
- (head_symbolt::head_symbolsq)
- (wordt++wordq)
- (semantic_valuet, semantic_valuesq).
-
-
- Fixpoint pt_size {head_symbol word sem} (tree:parse_tree head_symbol word sem) :=
- match tree with
- | Terminal_pt _ _ => 1
- | Non_terminal_pt l => S (ptl_size l)
- end
- with ptl_size {head_symbols word sems} (tree:parse_tree_list head_symbols word sems) :=
- match tree with
- | Nil_ptl => 0
- | Cons_ptl t q =>
- pt_size t + ptl_size q
- end.
-End Defs.
diff --git a/cparser/MenhirLib/Interpreter.v b/cparser/MenhirLib/Interpreter.v
deleted file mode 100644
index 4ac02693..00000000
--- a/cparser/MenhirLib/Interpreter.v
+++ /dev/null
@@ -1,228 +0,0 @@
-(* *********************************************************************)
-(* *)
-(* The Compcert verified compiler *)
-(* *)
-(* Jacques-Henri Jourdan, INRIA Paris-Rocquencourt *)
-(* *)
-(* Copyright Institut National de Recherche en Informatique et en *)
-(* Automatique. All rights reserved. This file is distributed *)
-(* under the terms of the GNU General Public License as published by *)
-(* the Free Software Foundation, either version 2 of the License, or *)
-(* (at your option) any later version. This file is also distributed *)
-(* under the terms of the INRIA Non-Commercial License Agreement. *)
-(* *)
-(* *********************************************************************)
-
-Require Import Streams.
-Require Import List.
-Require Import Syntax.
-Require Automaton.
-Require Import Alphabet.
-
-Module Make(Import A:Automaton.T).
-
-(** The error monad **)
-Inductive result (A:Type) :=
- | Err: result A
- | OK: A -> result A.
-
-Arguments Err [A].
-Arguments OK [A].
-
-Definition bind {A B: Type} (f: result A) (g: A -> result B): result B :=
- match f with
- | OK x => g x
- | Err => Err
- end.
-
-Definition bind2 {A B C: Type} (f: result (A * B)) (g: A -> B -> result C):
- result C :=
- match f with
- | OK (x, y) => g x y
- | Err => Err
- end.
-
-Notation "'do' X <- A ; B" := (bind A (fun X => B))
- (at level 200, X ident, A at level 100, B at level 200).
-
-Notation "'do' ( X , Y ) <- A ; B" := (bind2 A (fun X Y => B))
- (at level 200, X ident, Y ident, A at level 100, B at level 200).
-
-(** Some operations on streams **)
-
-(** Concatenation of a list and a stream **)
-Fixpoint app_str {A:Type} (l:list A) (s:Stream A) :=
- match l with
- | nil => s
- | cons t q => Cons t (app_str q s)
- end.
-
-Infix "++" := app_str (right associativity, at level 60).
-
-Lemma app_str_app_assoc {A:Type} (l1 l2:list A) (s:Stream A) :
- l1 ++ (l2 ++ s) = (l1 ++ l2) ++ s.
-Proof.
-induction l1.
-reflexivity.
-simpl.
-rewrite IHl1.
-reflexivity.
-Qed.
-
-(** The type of a non initial state: the type of semantic values associated
- with the last symbol of this state. *)
-Definition noninitstate_type state :=
- symbol_semantic_type (last_symb_of_non_init_state state).
-
-(** The stack of the automaton : it can be either nil or contains a non
- initial state, a semantic value for the symbol associted with this state,
- and a nested stack. **)
-Definition stack := list (sigT noninitstate_type). (* eg. list {state & state_type state} *)
-
-Section Init.
-
-Variable init : initstate.
-
-(** The top state of a stack **)
-Definition state_of_stack (stack:stack): state :=
- match stack with
- | [] => init
- | existT _ s _::_ => s
- end.
-
-(** [pop] pops some symbols from the stack. It returns the popped semantic
- values using [sem_popped] as an accumulator and discards the popped
- states.**)
-Fixpoint pop (symbols_to_pop:list symbol) (stack_cur:stack):
- forall {A:Type} (action:arrows_right A (map symbol_semantic_type symbols_to_pop)),
- result (stack * A) :=
- match symbols_to_pop return forall {A:Type} (action:arrows_right A (map _ symbols_to_pop)), result (stack * A) with
- | [] => fun A action => OK (stack_cur, action)
- | t::q => fun A action =>
- match stack_cur with
- | existT _ state_cur sem::stack_rec =>
- match compare_eqdec (last_symb_of_non_init_state state_cur) t with
- | left e =>
- let sem_conv := eq_rect _ symbol_semantic_type sem _ e in
- pop q stack_rec (action sem_conv)
- | right _ => Err
- end
- | [] => Err
- end
- end.
-
-(** [step_result] represents the result of one step of the automaton : it can
- fail, accept or progress. [Fail_sr] means that the input is incorrect.
- [Accept_sr] means that this is the last step of the automaton, and it
- returns the semantic value of the input word. [Progress_sr] means that
- some progress has been made, but new steps are needed in order to accept
- a word.
-
- For [Accept_sr] and [Progress_sr], the result contains the new input buffer.
-
- [Fail_sr] means that the input word is rejected by the automaton. It is
- different to [Err] (from the error monad), which mean that the automaton is
- bogus and has perfomed a forbidden action. **)
-Inductive step_result :=
- | Fail_sr: step_result
- | Accept_sr: symbol_semantic_type (NT (start_nt init)) -> Stream token -> step_result
- | Progress_sr: stack -> Stream token -> step_result.
-
-Program Definition prod_action':
- forall p,
- arrows_right (symbol_semantic_type (NT (prod_lhs p)))
- (map symbol_semantic_type (prod_rhs_rev p)):=
- fun p =>
- eq_rect _ (fun x => x) (prod_action p) _ _.
-Next Obligation.
-unfold arrows_left, arrows_right; simpl.
-rewrite <- fold_left_rev_right, <- map_rev, rev_involutive.
-reflexivity.
-Qed.
-
-(** [reduce_step] does a reduce action :
- - pops some elements from the stack
- - execute the action of the production
- - follows the goto for the produced non terminal symbol **)
-Definition reduce_step stack_cur production buffer: result step_result :=
- do (stack_new, sem) <-
- pop (prod_rhs_rev production) stack_cur (prod_action' production);
- match goto_table (state_of_stack stack_new) (prod_lhs production) with
- | Some (exist _ state_new e) =>
- let sem := eq_rect _ _ sem _ e in
- OK (Progress_sr (existT noninitstate_type state_new sem::stack_new) buffer)
- | None =>
- match stack_new with
- | [] =>
- match compare_eqdec (prod_lhs production) (start_nt init) with
- | left e =>
- let sem := eq_rect _ (fun nt => symbol_semantic_type (NT nt)) sem _ e in
- OK (Accept_sr sem buffer)
- | right _ => Err
- end
- | _::_ => Err
- end
- end.
-
-(** One step of parsing. **)
-Definition step stack_cur buffer: result step_result :=
- match action_table (state_of_stack stack_cur) with
- | Default_reduce_act production =>
- reduce_step stack_cur production buffer
- | Lookahead_act awt =>
- match Streams.hd buffer with
- | existT _ term sem =>
- match awt term with
- | Shift_act state_new e =>
- let sem_conv := eq_rect _ symbol_semantic_type sem _ e in
- OK (Progress_sr (existT noninitstate_type state_new sem_conv::stack_cur)
- (Streams.tl buffer))
- | Reduce_act production =>
- reduce_step stack_cur production buffer
- | Fail_action =>
- OK Fail_sr
- end
- end
- end.
-
-(** The parsing use a [nat] parameter [n_steps], so that we do not have to prove
- terminaison, which is difficult. So the result of a parsing is either
- a failure (the automaton has rejected the input word), either a timeout
- (the automaton has spent all the given [n_steps]), either a parsed semantic
- value with a rest of the input buffer.
-**)
-Inductive parse_result :=
- | Fail_pr: parse_result
- | Timeout_pr: parse_result
- | Parsed_pr: symbol_semantic_type (NT (start_nt init)) -> Stream token -> parse_result.
-
-Fixpoint parse_fix stack_cur buffer n_steps: result parse_result:=
- match n_steps with
- | O => OK Timeout_pr
- | S it =>
- do r <- step stack_cur buffer;
- match r with
- | Fail_sr => OK Fail_pr
- | Accept_sr t buffer_new => OK (Parsed_pr t buffer_new)
- | Progress_sr s buffer_new => parse_fix s buffer_new it
- end
- end.
-
-Definition parse buffer n_steps: result parse_result :=
- parse_fix [] buffer n_steps.
-
-End Init.
-
-Arguments Fail_sr [init].
-Arguments Accept_sr [init] _ _.
-Arguments Progress_sr [init] _ _.
-
-Arguments Fail_pr [init].
-Arguments Timeout_pr [init].
-Arguments Parsed_pr [init] _ _.
-
-End Make.
-
-Module Type T(A:Automaton.T).
- Include (Make A).
-End T.
diff --git a/cparser/MenhirLib/Interpreter_complete.v b/cparser/MenhirLib/Interpreter_complete.v
deleted file mode 100644
index 2e64b8da..00000000
--- a/cparser/MenhirLib/Interpreter_complete.v
+++ /dev/null
@@ -1,686 +0,0 @@
-(* *********************************************************************)
-(* *)
-(* The Compcert verified compiler *)
-(* *)
-(* Jacques-Henri Jourdan, INRIA Paris-Rocquencourt *)
-(* *)
-(* Copyright Institut National de Recherche en Informatique et en *)
-(* Automatique. All rights reserved. This file is distributed *)
-(* under the terms of the GNU General Public License as published by *)
-(* the Free Software Foundation, either version 2 of the License, or *)
-(* (at your option) any later version. This file is also distributed *)
-(* under the terms of the INRIA Non-Commercial License Agreement. *)
-(* *)
-(* *********************************************************************)
-
-Require Import Streams.
-Require Import ProofIrrelevance.
-Require Import Equality.
-Require Import List.
-Require Import Syntax.
-Require Import Alphabet.
-Require Import Arith.
-Require Grammar.
-Require Automaton.
-Require Interpreter.
-Require Validator_complete.
-
-Module Make(Import A:Automaton.T) (Import Inter:Interpreter.T A).
-Module Import Valid := Validator_complete.Make A.
-
-(** * Completeness Proof **)
-
-Section Completeness_Proof.
-
-Hypothesis complete: complete.
-
-Proposition nullable_stable: nullable_stable.
-Proof. pose proof complete; unfold Valid.complete in H; intuition. Qed.
-Proposition first_stable: first_stable.
-Proof. pose proof complete; unfold Valid.complete in H; intuition. Qed.
-Proposition start_future: start_future.
-Proof. pose proof complete; unfold Valid.complete in H; intuition. Qed.
-Proposition terminal_shift: terminal_shift.
-Proof. pose proof complete; unfold Valid.complete in H; intuition. Qed.
-Proposition end_reduce: end_reduce.
-Proof. pose proof complete; unfold Valid.complete in H; intuition. Qed.
-Proposition start_goto: start_goto.
-Proof. pose proof complete; unfold Valid.complete in H; intuition. Qed.
-Proposition non_terminal_goto: non_terminal_goto.
-Proof. pose proof complete; unfold Valid.complete in H; intuition. Qed.
-Proposition non_terminal_closed: non_terminal_closed.
-Proof. pose proof complete; unfold Valid.complete in H; intuition. Qed.
-
-(** If the nullable predicate has been validated, then it is correct. **)
-Lemma nullable_correct:
- forall head sem word, word = [] ->
- parse_tree head word sem -> nullable_symb head = true
-with nullable_correct_list:
- forall heads sems word, word = [] ->
- parse_tree_list heads word sems -> nullable_word heads = true.
-Proof with eauto.
-intros.
-destruct X.
-congruence.
-apply nullable_stable...
-intros.
-destruct X; simpl...
-apply andb_true_intro.
-apply app_eq_nil in H; destruct H; split...
-Qed.
-
-(** If the first predicate has been validated, then it is correct. **)
-Lemma first_correct:
- forall head sem word t q, word = t::q ->
- parse_tree head word sem ->
- TerminalSet.In (projT1 t) (first_symb_set head)
-with first_correct_list:
- forall heads sems word t q, word = t::q ->
- parse_tree_list heads word sems ->
- TerminalSet.In (projT1 t) (first_word_set heads).
-Proof with eauto.
-intros.
-destruct X.
-inversion H; subst.
-apply TerminalSet.singleton_2, compare_refl...
-apply first_stable...
-intros.
-destruct X.
-congruence.
-simpl.
-case_eq wordt; intros.
-erewrite nullable_correct...
-apply TerminalSet.union_3.
-subst...
-rewrite H0 in *; inversion H; destruct H2.
-destruct (nullable_symb head_symbolt)...
-apply TerminalSet.union_2...
-Qed.
-
-Variable init: initstate.
-Variable full_word: list token.
-Variable buffer_end: Stream token.
-Variable full_sem: symbol_semantic_type (NT (start_nt init)).
-
-Inductive pt_zipper:
- forall (hole_symb:symbol) (hole_word:list token)
- (hole_sem:symbol_semantic_type hole_symb), Type :=
-| Top_ptz:
- pt_zipper (NT (start_nt init)) (full_word) (full_sem)
-| Cons_ptl_ptz:
- forall {head_symbolt:symbol}
- {wordt:list token}
- {semantic_valuet:symbol_semantic_type head_symbolt},
-
- forall {head_symbolsq:list symbol}
- {wordq:list token}
- {semantic_valuesq:tuple (map symbol_semantic_type head_symbolsq)},
- parse_tree_list head_symbolsq wordq semantic_valuesq ->
-
- ptl_zipper (head_symbolt::head_symbolsq) (wordt++wordq)
- (semantic_valuet,semantic_valuesq) ->
-
- pt_zipper head_symbolt wordt semantic_valuet
-with ptl_zipper:
- forall (hole_symbs:list symbol) (hole_word:list token)
- (hole_sems:tuple (map symbol_semantic_type hole_symbs)), Type :=
-| Non_terminal_pt_ptlz:
- forall {p:production} {word:list token}
- {semantic_values:tuple (map symbol_semantic_type (rev (prod_rhs_rev p)))},
- pt_zipper (NT (prod_lhs p)) word (uncurry (prod_action p) semantic_values) ->
- ptl_zipper (rev (prod_rhs_rev p)) word semantic_values
-
-| Cons_ptl_ptlz:
- forall {head_symbolt:symbol}
- {wordt:list token}
- {semantic_valuet:symbol_semantic_type head_symbolt},
- parse_tree head_symbolt wordt semantic_valuet ->
-
- forall {head_symbolsq:list symbol}
- {wordq:list token}
- {semantic_valuesq:tuple (map symbol_semantic_type head_symbolsq)},
-
- ptl_zipper (head_symbolt::head_symbolsq) (wordt++wordq)
- (semantic_valuet,semantic_valuesq) ->
-
- ptl_zipper head_symbolsq wordq semantic_valuesq.
-
-Fixpoint ptlz_cost {hole_symbs hole_word hole_sems}
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems) :=
- match ptlz with
- | Non_terminal_pt_ptlz ptz =>
- ptz_cost ptz
- | Cons_ptl_ptlz pt ptlz' =>
- ptlz_cost ptlz'
- end
-with ptz_cost {hole_symb hole_word hole_sem}
- (ptz:pt_zipper hole_symb hole_word hole_sem) :=
- match ptz with
- | Top_ptz => 0
- | Cons_ptl_ptz ptl ptlz' =>
- 1 + ptl_size ptl + ptlz_cost ptlz'
- end.
-
-Inductive pt_dot: Type :=
-| Reduce_ptd: ptl_zipper [] [] () -> pt_dot
-| Shift_ptd:
- forall (term:terminal) (sem: symbol_semantic_type (T term))
- {symbolsq wordq semsq},
- parse_tree_list symbolsq wordq semsq ->
- ptl_zipper (T term::symbolsq) (existT (fun t => symbol_semantic_type (T t)) term sem::wordq) (sem, semsq) ->
- pt_dot.
-
-Definition ptd_cost (ptd:pt_dot) :=
- match ptd with
- | Reduce_ptd ptlz => ptlz_cost ptlz
- | Shift_ptd _ _ ptl ptlz => 1 + ptl_size ptl + ptlz_cost ptlz
- end.
-
-Fixpoint ptlz_buffer {hole_symbs hole_word hole_sems}
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems): Stream token :=
- match ptlz with
- | Non_terminal_pt_ptlz ptz =>
- ptz_buffer ptz
- | Cons_ptl_ptlz _ ptlz' =>
- ptlz_buffer ptlz'
- end
-with ptz_buffer {hole_symb hole_word hole_sem}
- (ptz:pt_zipper hole_symb hole_word hole_sem): Stream token :=
- match ptz with
- | Top_ptz => buffer_end
- | @Cons_ptl_ptz _ _ _ _ wordq _ ptl ptlz' =>
- wordq++ptlz_buffer ptlz'
- end.
-
-Definition ptd_buffer (ptd:pt_dot) :=
- match ptd with
- | Reduce_ptd ptlz => ptlz_buffer ptlz
- | @Shift_ptd term sem _ wordq _ _ ptlz =>
- Cons (existT (fun t => symbol_semantic_type (T t)) term sem)
- (wordq ++ ptlz_buffer ptlz)
- end.
-
-Fixpoint ptlz_prod {hole_symbs hole_word hole_sems}
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems): production :=
- match ptlz with
- | @Non_terminal_pt_ptlz prod _ _ _ => prod
- | Cons_ptl_ptlz _ ptlz' =>
- ptlz_prod ptlz'
- end.
-
-Fixpoint ptlz_past {hole_symbs hole_word hole_sems}
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems): list symbol :=
- match ptlz with
- | Non_terminal_pt_ptlz _ => []
- | @Cons_ptl_ptlz s _ _ _ _ _ _ ptlz' => s::ptlz_past ptlz'
- end.
-
-Lemma ptlz_past_ptlz_prod:
- forall hole_symbs hole_word hole_sems
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems),
- rev_append hole_symbs (ptlz_past ptlz) = prod_rhs_rev (ptlz_prod ptlz).
-Proof.
-fix ptlz_past_ptlz_prod 4.
-destruct ptlz; simpl.
-rewrite <- rev_alt, rev_involutive; reflexivity.
-apply (ptlz_past_ptlz_prod _ _ _ ptlz).
-Qed.
-
-Definition ptlz_state_compat {hole_symbs hole_word hole_sems}
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems)
- (state:state): Prop :=
- state_has_future state (ptlz_prod ptlz) hole_symbs
- (projT1 (Streams.hd (ptlz_buffer ptlz))).
-
-Fixpoint ptlz_stack_compat {hole_symbs hole_word hole_sems}
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems)
- (stack:stack): Prop :=
- ptlz_state_compat ptlz (state_of_stack init stack) /\
- match ptlz with
- | Non_terminal_pt_ptlz ptz =>
- ptz_stack_compat ptz stack
- | @Cons_ptl_ptlz _ _ sem _ _ _ _ ptlz' =>
- match stack with
- | [] => False
- | existT _ _ sem'::stackq =>
- ptlz_stack_compat ptlz' stackq /\
- sem ~= sem'
- end
- end
-with ptz_stack_compat {hole_symb hole_word hole_sem}
- (ptz:pt_zipper hole_symb hole_word hole_sem)
- (stack:stack): Prop :=
- match ptz with
- | Top_ptz => stack = []
- | Cons_ptl_ptz _ ptlz' =>
- ptlz_stack_compat ptlz' stack
- end.
-
-Lemma ptlz_stack_compat_ptlz_state_compat:
- forall hole_symbs hole_word hole_sems
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems)
- (stack:stack),
- ptlz_stack_compat ptlz stack -> ptlz_state_compat ptlz (state_of_stack init stack).
-Proof.
-destruct ptlz; simpl; intuition.
-Qed.
-
-Definition ptd_stack_compat (ptd:pt_dot) (stack:stack): Prop :=
- match ptd with
- | Reduce_ptd ptlz => ptlz_stack_compat ptlz stack
- | Shift_ptd _ _ _ ptlz => ptlz_stack_compat ptlz stack
- end.
-
-Fixpoint build_pt_dot {hole_symbs hole_word hole_sems}
- (ptl:parse_tree_list hole_symbs hole_word hole_sems)
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems)
- :pt_dot :=
- match ptl in parse_tree_list hole_symbs hole_word hole_sems
- return ptl_zipper hole_symbs hole_word hole_sems -> _
- with
- | Nil_ptl => fun ptlz =>
- Reduce_ptd ptlz
- | Cons_ptl pt ptl' =>
- match pt in parse_tree hole_symb hole_word hole_sem
- return ptl_zipper (hole_symb::_) (hole_word++_) (hole_sem,_) -> _
- with
- | Terminal_pt term sem => fun ptlz =>
- Shift_ptd term sem ptl' ptlz
- | Non_terminal_pt ptl'' => fun ptlz =>
- build_pt_dot ptl''
- (Non_terminal_pt_ptlz (Cons_ptl_ptz ptl' ptlz))
- end
- end ptlz.
-
-Lemma build_pt_dot_cost:
- forall hole_symbs hole_word hole_sems
- (ptl:parse_tree_list hole_symbs hole_word hole_sems)
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems),
- ptd_cost (build_pt_dot ptl ptlz) = ptl_size ptl + ptlz_cost ptlz.
-Proof.
-fix build_pt_dot_cost 4.
-destruct ptl; intros.
-reflexivity.
-destruct p.
-reflexivity.
-simpl; rewrite build_pt_dot_cost.
-simpl; rewrite <- plus_n_Sm, Nat.add_assoc; reflexivity.
-Qed.
-
-Lemma build_pt_dot_buffer:
- forall hole_symbs hole_word hole_sems
- (ptl:parse_tree_list hole_symbs hole_word hole_sems)
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems),
- ptd_buffer (build_pt_dot ptl ptlz) = hole_word ++ ptlz_buffer ptlz.
-Proof.
-fix build_pt_dot_buffer 4.
-destruct ptl; intros.
-reflexivity.
-destruct p.
-reflexivity.
-simpl; rewrite build_pt_dot_buffer.
-apply app_str_app_assoc.
-Qed.
-
-Lemma ptd_stack_compat_build_pt_dot:
- forall hole_symbs hole_word hole_sems
- (ptl:parse_tree_list hole_symbs hole_word hole_sems)
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems)
- (stack:stack),
- ptlz_stack_compat ptlz stack ->
- ptd_stack_compat (build_pt_dot ptl ptlz) stack.
-Proof.
-fix ptd_stack_compat_build_pt_dot 4.
-destruct ptl; intros.
-eauto.
-destruct p.
-eauto.
-simpl.
-apply ptd_stack_compat_build_pt_dot.
-split.
-apply ptlz_stack_compat_ptlz_state_compat, non_terminal_closed in H.
-apply H; clear H; eauto.
-destruct wordq.
-right; split.
-eauto.
-eapply nullable_correct_list; eauto.
-left.
-eapply first_correct_list; eauto.
-eauto.
-Qed.
-
-Program Fixpoint pop_ptlz {hole_symbs hole_word hole_sems}
- (ptl:parse_tree_list hole_symbs hole_word hole_sems)
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems):
- { word:_ & { sem:_ &
- (pt_zipper (NT (prod_lhs (ptlz_prod ptlz))) word sem *
- parse_tree (NT (prod_lhs (ptlz_prod ptlz))) word sem)%type } } :=
- match ptlz in ptl_zipper hole_symbs hole_word hole_sems
- return parse_tree_list hole_symbs hole_word hole_sems ->
- { word:_ & { sem:_ &
- (pt_zipper (NT (prod_lhs (ptlz_prod ptlz))) word sem *
- parse_tree (NT (prod_lhs (ptlz_prod ptlz))) word sem)%type } }
- with
- | @Non_terminal_pt_ptlz prod word sem ptz => fun ptl =>
- let sem := uncurry (prod_action prod) sem in
- existT _ word (existT _ sem (ptz, Non_terminal_pt ptl))
- | Cons_ptl_ptlz pt ptlz' => fun ptl =>
- pop_ptlz (Cons_ptl pt ptl) ptlz'
- end ptl.
-
-Lemma pop_ptlz_cost:
- forall hole_symbs hole_word hole_sems
- (ptl:parse_tree_list hole_symbs hole_word hole_sems)
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems),
- let 'existT _ word (existT _ sem (ptz, pt)) := pop_ptlz ptl ptlz in
- ptlz_cost ptlz = ptz_cost ptz.
-Proof.
-fix pop_ptlz_cost 5.
-destruct ptlz.
-reflexivity.
-simpl; apply pop_ptlz_cost.
-Qed.
-
-Lemma pop_ptlz_buffer:
- forall hole_symbs hole_word hole_sems
- (ptl:parse_tree_list hole_symbs hole_word hole_sems)
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems),
- let 'existT _ word (existT _ sem (ptz, pt)) := pop_ptlz ptl ptlz in
- ptlz_buffer ptlz = ptz_buffer ptz.
-Proof.
-fix pop_ptlz_buffer 5.
-destruct ptlz.
-reflexivity.
-simpl; apply pop_ptlz_buffer.
-Qed.
-
-Lemma pop_ptlz_pop_stack_compat_converter:
- forall A hole_symbs hole_word hole_sems (ptlz:ptl_zipper hole_symbs hole_word hole_sems),
- arrows_left (map symbol_semantic_type (rev (prod_rhs_rev (ptlz_prod ptlz)))) A =
- arrows_left (map symbol_semantic_type hole_symbs)
- (arrows_right A (map symbol_semantic_type (ptlz_past ptlz))).
-Proof.
-intros.
-rewrite <- ptlz_past_ptlz_prod.
-unfold arrows_right, arrows_left.
-rewrite rev_append_rev, map_rev, map_app, map_rev, <- fold_left_rev_right, rev_involutive, fold_right_app.
-rewrite fold_left_rev_right.
-reflexivity.
-Qed.
-
-Lemma pop_ptlz_pop_stack_compat:
- forall hole_symbs hole_word hole_sems
- (ptl:parse_tree_list hole_symbs hole_word hole_sems)
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems)
- (stack:stack),
-
- ptlz_stack_compat ptlz stack ->
-
- let action' :=
- eq_rect _ (fun x=>x) (prod_action (ptlz_prod ptlz)) _
- (pop_ptlz_pop_stack_compat_converter _ _ _ _ _)
- in
- let 'existT _ word (existT _ sem (ptz, pt)) := pop_ptlz ptl ptlz in
- match pop (ptlz_past ptlz) stack (uncurry action' hole_sems) with
- | OK (stack', sem') =>
- ptz_stack_compat ptz stack' /\ sem = sem'
- | Err => True
- end.
-Proof.
-Opaque AlphabetComparable AlphabetComparableUsualEq.
-fix pop_ptlz_pop_stack_compat 5.
-destruct ptlz. intros; simpl.
-split.
-apply H.
-eapply (f_equal (fun X => uncurry X semantic_values)).
-apply JMeq_eq, JMeq_sym, JMeq_eqrect with (P:=fun x => x).
-simpl; intros; destruct stack0.
-destruct (proj2 H).
-simpl in H; destruct H.
-destruct s as (state, sem').
-destruct H0.
-specialize (pop_ptlz_pop_stack_compat _ _ _ (Cons_ptl p ptl) ptlz _ H0).
-destruct (pop_ptlz (Cons_ptl p ptl) ptlz) as [word [sem []]].
-destruct (compare_eqdec (last_symb_of_non_init_state state) head_symbolt); intuition.
-eapply JMeq_sym, JMeq_trans, JMeq_sym, JMeq_eq in H1; [|apply JMeq_eqrect with (e:=e)].
-rewrite <- H1.
-simpl in pop_ptlz_pop_stack_compat.
-erewrite proof_irrelevance with (p1:=pop_ptlz_pop_stack_compat_converter _ _ _ _ _).
-apply pop_ptlz_pop_stack_compat.
-Transparent AlphabetComparable AlphabetComparableUsualEq.
-Qed.
-
-Definition next_ptd (ptd:pt_dot): option pt_dot :=
- match ptd with
- | Shift_ptd term sem ptl ptlz =>
- Some (build_pt_dot ptl (Cons_ptl_ptlz (Terminal_pt term sem) ptlz))
- | Reduce_ptd ptlz =>
- let 'existT _ _ (existT _ _ (ptz, pt)) := pop_ptlz Nil_ptl ptlz in
- match ptz in pt_zipper sym _ _ return parse_tree sym _ _ -> _ with
- | Top_ptz => fun pt => None
- | Cons_ptl_ptz ptl ptlz' =>
- fun pt => Some (build_pt_dot ptl (Cons_ptl_ptlz pt ptlz'))
- end pt
- end.
-
-Lemma next_ptd_cost:
- forall ptd,
- match next_ptd ptd with
- | None => ptd_cost ptd = 0
- | Some ptd' => ptd_cost ptd = S (ptd_cost ptd')
- end.
-Proof.
-destruct ptd. unfold next_ptd.
-pose proof (pop_ptlz_cost _ _ _ Nil_ptl p).
-destruct (pop_ptlz Nil_ptl p) as [word[sem[[]]]].
-assumption.
-rewrite build_pt_dot_cost.
-assumption.
-simpl; rewrite build_pt_dot_cost; reflexivity.
-Qed.
-
-Lemma reduce_step_next_ptd:
- forall (ptlz:ptl_zipper [] [] ()) (stack:stack),
- ptlz_stack_compat ptlz stack ->
- match reduce_step init stack (ptlz_prod ptlz) (ptlz_buffer ptlz) with
- | OK Fail_sr =>
- False
- | OK (Accept_sr sem buffer) =>
- sem = full_sem /\ buffer = buffer_end /\ next_ptd (Reduce_ptd ptlz) = None
- | OK (Progress_sr stack buffer) =>
- match next_ptd (Reduce_ptd ptlz) with
- | None => False
- | Some ptd =>
- ptd_stack_compat ptd stack /\ buffer = ptd_buffer ptd
- end
- | Err =>
- True
- end.
-Proof.
-intros.
-unfold reduce_step, next_ptd.
-apply pop_ptlz_pop_stack_compat with (ptl:=Nil_ptl) in H.
-pose proof (pop_ptlz_buffer _ _ _ Nil_ptl ptlz).
-destruct (pop_ptlz Nil_ptl ptlz) as [word [sem [ptz pt]]].
-rewrite H0; clear H0.
-revert H.
-match goal with
- |- match ?p1 with Err => _ | OK _ => _ end -> match bind2 ?p2 _ with Err => _ | OK _ => _ end =>
- replace p1 with p2; [destruct p2 as [|[]]; intros|]
-end.
-assumption.
-simpl.
-destruct H; subst.
-generalize dependent s0.
-generalize (prod_lhs (ptlz_prod ptlz)); clear ptlz stack0.
-dependent destruction ptz; intros.
-simpl in H; subst; simpl.
-pose proof start_goto; unfold Valid.start_goto in H; rewrite H.
-destruct (compare_eqdec (start_nt init) (start_nt init)); intuition.
-apply JMeq_eq, JMeq_eqrect with (P:=fun nt => symbol_semantic_type (NT nt)).
-pose proof (ptlz_stack_compat_ptlz_state_compat _ _ _ _ _ H).
-apply non_terminal_goto in H0.
-destruct (goto_table (state_of_stack init s) n) as [[]|]; intuition.
-apply ptd_stack_compat_build_pt_dot; simpl; intuition.
-symmetry; apply JMeq_eqrect with (P:=symbol_semantic_type).
-symmetry; apply build_pt_dot_buffer.
-destruct s; intuition.
-simpl in H; apply ptlz_stack_compat_ptlz_state_compat in H.
-destruct (H0 _ _ _ H eq_refl).
-generalize (pop_ptlz_pop_stack_compat_converter (symbol_semantic_type (NT (prod_lhs (ptlz_prod ptlz)))) _ _ _ ptlz).
-pose proof (ptlz_past_ptlz_prod _ _ _ ptlz); simpl in H.
-rewrite H; clear H.
-intro; f_equal; simpl.
-apply JMeq_eq.
-etransitivity.
-apply JMeq_eqrect with (P:=fun x => x).
-symmetry.
-apply JMeq_eqrect with (P:=fun x => x).
-Qed.
-
-Lemma step_next_ptd:
- forall (ptd:pt_dot) (stack:stack),
- ptd_stack_compat ptd stack ->
- match step init stack (ptd_buffer ptd) with
- | OK Fail_sr =>
- False
- | OK (Accept_sr sem buffer) =>
- sem = full_sem /\ buffer = buffer_end /\ next_ptd ptd = None
- | OK (Progress_sr stack buffer) =>
- match next_ptd ptd with
- | None => False
- | Some ptd =>
- ptd_stack_compat ptd stack /\ buffer = ptd_buffer ptd
- end
- | Err =>
- True
- end.
-Proof.
-intros.
-destruct ptd.
-pose proof (ptlz_stack_compat_ptlz_state_compat _ _ _ _ _ H).
-apply end_reduce in H0.
-unfold step.
-destruct (action_table (state_of_stack init stack0)).
-rewrite H0 by reflexivity.
-apply reduce_step_next_ptd; assumption.
-simpl; destruct (Streams.hd (ptlz_buffer p)); simpl in H0.
-destruct (l x); intuition; rewrite H1.
-apply reduce_step_next_ptd; assumption.
-pose proof (ptlz_stack_compat_ptlz_state_compat _ _ _ _ _ H).
-apply terminal_shift in H0.
-unfold step.
-destruct (action_table (state_of_stack init stack0)); intuition.
-simpl; destruct (Streams.hd (ptlz_buffer p0)) as [] eqn:?.
-destruct (l term); intuition.
-apply ptd_stack_compat_build_pt_dot; simpl; intuition.
-unfold ptlz_state_compat; simpl; destruct Heqt; assumption.
-symmetry; apply JMeq_eqrect with (P:=symbol_semantic_type).
-rewrite build_pt_dot_buffer; reflexivity.
-Qed.
-
-Lemma parse_fix_complete:
- forall (ptd:pt_dot) (stack:stack) (n_steps:nat),
- ptd_stack_compat ptd stack ->
- match parse_fix init stack (ptd_buffer ptd) n_steps with
- | OK (Parsed_pr sem_res buffer_end_res) =>
- sem_res = full_sem /\ buffer_end_res = buffer_end /\
- S (ptd_cost ptd) <= n_steps
- | OK Fail_pr => False
- | OK Timeout_pr => n_steps < S (ptd_cost ptd)
- | Err => True
- end.
-Proof.
-fix parse_fix_complete 3.
-destruct n_steps; intros; simpl.
-apply Nat.lt_0_succ.
-apply step_next_ptd in H.
-pose proof (next_ptd_cost ptd).
-destruct (step init stack0 (ptd_buffer ptd)) as [|[]]; simpl; intuition.
-rewrite H3 in H0; rewrite H0.
-apply le_n_S, Nat.le_0_l.
-destruct (next_ptd ptd); intuition; subst.
-eapply parse_fix_complete with (n_steps:=n_steps) in H1.
-rewrite H0.
-destruct (parse_fix init s (ptd_buffer p) n_steps) as [|[]]; try assumption.
-apply lt_n_S; assumption.
-destruct H1 as [H1 []]; split; [|split]; try assumption.
-apply le_n_S; assumption.
-Qed.
-
-Variable full_pt: parse_tree (NT (start_nt init)) full_word full_sem.
-
-Definition init_ptd :=
- match full_pt in parse_tree head full_word full_sem return
- pt_zipper head full_word full_sem ->
- match head return Type with | T _ => unit | NT _ => pt_dot end
- with
- | Terminal_pt _ _ => fun _ => ()
- | Non_terminal_pt ptl =>
- fun top => build_pt_dot ptl (Non_terminal_pt_ptlz top)
- end Top_ptz.
-
-Lemma init_ptd_compat:
- ptd_stack_compat init_ptd [].
-Proof.
-unfold init_ptd.
-assert (ptz_stack_compat Top_ptz []) by reflexivity.
-pose proof (start_future init); revert H0.
-generalize dependent Top_ptz.
-generalize dependent full_word.
-generalize full_sem.
-generalize (start_nt init).
-dependent destruction full_pt0.
-intros.
-apply ptd_stack_compat_build_pt_dot; simpl; intuition.
-apply H0; reflexivity.
-Qed.
-
-Lemma init_ptd_cost:
- S (ptd_cost init_ptd) = pt_size full_pt.
-Proof.
-unfold init_ptd.
-assert (ptz_cost Top_ptz = 0) by reflexivity.
-generalize dependent Top_ptz.
-generalize dependent full_word.
-generalize full_sem.
-generalize (start_nt init).
-dependent destruction full_pt0.
-intros.
-rewrite build_pt_dot_cost; simpl.
-rewrite H, Nat.add_0_r; reflexivity.
-Qed.
-
-Lemma init_ptd_buffer:
- ptd_buffer init_ptd = full_word ++ buffer_end.
-Proof.
-unfold init_ptd.
-assert (ptz_buffer Top_ptz = buffer_end) by reflexivity.
-generalize dependent Top_ptz.
-generalize dependent full_word.
-generalize full_sem.
-generalize (start_nt init).
-dependent destruction full_pt0.
-intros.
-rewrite build_pt_dot_buffer; simpl.
-rewrite H; reflexivity.
-Qed.
-
-Theorem parse_complete n_steps:
- match parse init (full_word ++ buffer_end) n_steps with
- | OK (Parsed_pr sem_res buffer_end_res) =>
- sem_res = full_sem /\ buffer_end_res = buffer_end /\
- pt_size full_pt <= n_steps
- | OK Fail_pr => False
- | OK Timeout_pr => n_steps < pt_size full_pt
- | Err => True
- end.
-Proof.
-pose proof (parse_fix_complete init_ptd [] n_steps init_ptd_compat).
-rewrite init_ptd_buffer, init_ptd_cost in H.
-apply H.
-Qed.
-
-End Completeness_Proof.
-
-End Make.
diff --git a/cparser/MenhirLib/Interpreter_correct.v b/cparser/MenhirLib/Interpreter_correct.v
deleted file mode 100644
index 1263d4e3..00000000
--- a/cparser/MenhirLib/Interpreter_correct.v
+++ /dev/null
@@ -1,228 +0,0 @@
-(* *********************************************************************)
-(* *)
-(* The Compcert verified compiler *)
-(* *)
-(* Jacques-Henri Jourdan, INRIA Paris-Rocquencourt *)
-(* *)
-(* Copyright Institut National de Recherche en Informatique et en *)
-(* Automatique. All rights reserved. This file is distributed *)
-(* under the terms of the GNU General Public License as published by *)
-(* the Free Software Foundation, either version 2 of the License, or *)
-(* (at your option) any later version. This file is also distributed *)
-(* under the terms of the INRIA Non-Commercial License Agreement. *)
-(* *)
-(* *********************************************************************)
-
-Require Import Streams.
-Require Import List.
-Require Import Syntax.
-Require Import Equality.
-Require Import Alphabet.
-Require Grammar.
-Require Automaton.
-Require Interpreter.
-
-Module Make(Import A:Automaton.T) (Import Inter:Interpreter.T A).
-
-(** * Correctness of the interpreter **)
-
-(** We prove that, in any case, if the interpreter accepts returning a
- semantic value, then this is a semantic value of the input **)
-
-Section Init.
-
-Variable init:initstate.
-
-(** [word_has_stack_semantics] relates a word with a state, stating that the
- word is a concatenation of words that have the semantic values stored in
- the stack. **)
-Inductive word_has_stack_semantics:
- forall (word:list token) (stack:stack), Prop :=
- | Nil_stack_whss: word_has_stack_semantics [] []
- | Cons_stack_whss:
- forall (wordq:list token) (stackq:stack),
- word_has_stack_semantics wordq stackq ->
-
- forall (wordt:list token) (s:noninitstate)
- (semantic_valuet:_),
- inhabited (parse_tree (last_symb_of_non_init_state s) wordt semantic_valuet) ->
-
- word_has_stack_semantics
- (wordq++wordt) (existT noninitstate_type s semantic_valuet::stackq).
-
-Lemma pop_invariant_converter:
- forall A symbols_to_pop symbols_popped,
- arrows_left (map symbol_semantic_type (rev_append symbols_to_pop symbols_popped)) A =
- arrows_left (map symbol_semantic_type symbols_popped)
- (arrows_right A (map symbol_semantic_type symbols_to_pop)).
-Proof.
-intros.
-unfold arrows_right, arrows_left.
-rewrite rev_append_rev, map_app, map_rev, fold_left_app.
-apply f_equal.
-rewrite <- fold_left_rev_right, rev_involutive.
-reflexivity.
-Qed.
-
-(** [pop] preserves the invariant **)
-Lemma pop_invariant:
- forall (symbols_to_pop symbols_popped:list symbol)
- (stack_cur:stack)
- (A:Type)
- (action:arrows_left (map symbol_semantic_type (rev_append symbols_to_pop symbols_popped)) A),
- forall word_stack word_popped,
- forall sem_popped,
- word_has_stack_semantics word_stack stack_cur ->
- inhabited (parse_tree_list symbols_popped word_popped sem_popped) ->
- let action' := eq_rect _ (fun x=>x) action _ (pop_invariant_converter _ _ _) in
- match pop symbols_to_pop stack_cur (uncurry action' sem_popped) with
- | OK (stack_new, sem) =>
- exists word1res word2res sem_full,
- (word_stack = word1res ++ word2res)%list /\
- word_has_stack_semantics word1res stack_new /\
- sem = uncurry action sem_full /\
- inhabited (
- parse_tree_list (rev_append symbols_to_pop symbols_popped) (word2res++word_popped) sem_full)
- | Err => True
- end.
-Proof.
-induction symbols_to_pop; intros; unfold pop; fold pop.
-exists word_stack, ([]:list token), sem_popped; intuition.
-f_equal.
-apply JMeq_eq, JMeq_eqrect with (P:=(fun x => x)).
-destruct stack_cur as [|[]]; eauto.
-destruct (compare_eqdec (last_symb_of_non_init_state x) a); eauto.
-destruct e; simpl.
-dependent destruction H.
-destruct H0, H1. apply (Cons_ptl X), inhabits in X0.
-specialize (IHsymbols_to_pop _ _ _ action0 _ _ _ H X0).
-match goal with
- IHsymbols_to_pop:match ?p1 with Err => _ | OK _ => _ end |- match ?p2 with Err => _ | OK _ => _ end =>
- replace p2 with p1; [destruct p1 as [|[]]|]; intuition
-end.
-destruct IHsymbols_to_pop as [word1res [word2res [sem_full []]]]; intuition; subst.
-exists word1res.
-eexists.
-exists sem_full.
-intuition.
-rewrite <- app_assoc; assumption.
-simpl; f_equal; f_equal.
-apply JMeq_eq.
-etransitivity.
-apply JMeq_eqrect with (P:=(fun x => x)).
-symmetry.
-apply JMeq_eqrect with (P:=(fun x => x)).
-Qed.
-
-(** [reduce_step] preserves the invariant **)
-Lemma reduce_step_invariant (stack:stack) (prod:production):
- forall word buffer, word_has_stack_semantics word stack ->
- match reduce_step init stack prod buffer with
- | OK (Accept_sr sem buffer_new) =>
- buffer = buffer_new /\
- inhabited (parse_tree (NT (start_nt init)) word sem)
- | OK (Progress_sr stack_new buffer_new) =>
- buffer = buffer_new /\
- word_has_stack_semantics word stack_new
- | Err | OK Fail_sr => True
- end.
-Proof with eauto.
-intros.
-unfold reduce_step.
-pose proof (pop_invariant (prod_rhs_rev prod) [] stack (symbol_semantic_type (NT (prod_lhs prod)))).
-revert H0.
-generalize (pop_invariant_converter (symbol_semantic_type (NT (prod_lhs prod))) (prod_rhs_rev prod) []).
-rewrite <- rev_alt.
-intros.
-specialize (H0 (prod_action prod) _ [] () H (inhabits Nil_ptl)).
-match goal with H0:let action' := ?a in _ |- _ => replace a with (prod_action' prod) in H0 end.
-simpl in H0.
-destruct (pop (prod_rhs_rev prod) stack (prod_action' prod)) as [|[]]; intuition.
-destruct H0 as [word1res [word2res [sem_full]]]; intuition.
-destruct H4; apply Non_terminal_pt, inhabits in X.
-match goal with X:inhabited (parse_tree _ _ ?u) |- _ => replace u with s0 in X end.
-clear sem_full H2.
-simpl; destruct (goto_table (state_of_stack init s) (prod_lhs prod)) as [[]|]; intuition; subst.
-rewrite app_nil_r in X; revert s0 X; rewrite e0; intro; simpl.
-constructor...
-destruct s; intuition.
-destruct (compare_eqdec (prod_lhs prod) (start_nt init)); intuition.
-rewrite app_nil_r in X.
-rewrite <- e0.
-inversion H0.
-destruct X; constructor...
-apply JMeq_eq.
-etransitivity.
-apply JMeq_eqrect with (P:=(fun x => x)).
-symmetry.
-apply JMeq_eqrect with (P:=(fun x => x)).
-Qed.
-
-(** [step] preserves the invariant **)
-Lemma step_invariant (stack:stack) (buffer:Stream token):
- forall buffer_tmp,
- (exists word_old,
- buffer = word_old ++ buffer_tmp /\
- word_has_stack_semantics word_old stack) ->
- match step init stack buffer_tmp with
- | OK (Accept_sr sem buffer_new) =>
- exists word_new,
- buffer = word_new ++ buffer_new /\
- inhabited (parse_tree (NT (start_nt init)) word_new sem)
- | OK (Progress_sr stack_new buffer_new) =>
- exists word_new,
- buffer = word_new ++ buffer_new /\
- word_has_stack_semantics word_new stack_new
- | Err | OK Fail_sr => True
- end.
-Proof with eauto.
-intros.
-destruct H, H.
-unfold step.
-destruct (action_table (state_of_stack init stack)).
-pose proof (reduce_step_invariant stack p x buffer_tmp).
-destruct (reduce_step init stack p buffer_tmp) as [|[]]; intuition; subst...
-destruct buffer_tmp.
-unfold Streams.hd.
-destruct t.
-destruct (l x0); intuition.
-exists (x ++ [existT (fun t => symbol_semantic_type (T t)) x0 s])%list.
-split.
-now rewrite <- app_str_app_assoc; intuition.
-apply Cons_stack_whss; intuition.
-destruct e; simpl.
-now exact (inhabits (Terminal_pt _ _)).
-match goal with |- (match reduce_step init stack p ?buff with Err => _ | OK _ => _ end) =>
- pose proof (reduce_step_invariant stack p x buff);
- destruct (reduce_step init stack p buff) as [|[]]; intuition; subst
-end...
-Qed.
-
-(** The interpreter is correct : if it returns a semantic value, then the input
- word has this semantic value.
-**)
-Theorem parse_correct buffer n_steps:
- match parse init buffer n_steps with
- | OK (Parsed_pr sem buffer_new) =>
- exists word_new,
- buffer = word_new ++ buffer_new /\
- inhabited (parse_tree (NT (start_nt init)) word_new sem)
- | _ => True
- end.
-Proof.
-unfold parse.
-assert (exists w, buffer = w ++ buffer /\ word_has_stack_semantics w []).
-exists ([]:list token); intuition.
-now apply Nil_stack_whss.
-revert H.
-generalize ([]:stack), buffer at 2 3.
-induction n_steps; simpl; intuition.
-pose proof (step_invariant _ _ _ H).
-destruct (step init s buffer0); simpl; intuition.
-destruct s0; intuition.
-apply IHn_steps; intuition.
-Qed.
-
-End Init.
-
-End Make.
diff --git a/cparser/MenhirLib/Interpreter_safe.v b/cparser/MenhirLib/Interpreter_safe.v
deleted file mode 100644
index a1aa35b8..00000000
--- a/cparser/MenhirLib/Interpreter_safe.v
+++ /dev/null
@@ -1,275 +0,0 @@
-(* *********************************************************************)
-(* *)
-(* The Compcert verified compiler *)
-(* *)
-(* Jacques-Henri Jourdan, INRIA Paris-Rocquencourt *)
-(* *)
-(* Copyright Institut National de Recherche en Informatique et en *)
-(* Automatique. All rights reserved. This file is distributed *)
-(* under the terms of the GNU General Public License as published by *)
-(* the Free Software Foundation, either version 2 of the License, or *)
-(* (at your option) any later version. This file is also distributed *)
-(* under the terms of the INRIA Non-Commercial License Agreement. *)
-(* *)
-(* *********************************************************************)
-
-Require Import Streams.
-Require Import Equality.
-Require Import List.
-Require Import Syntax.
-Require Import Alphabet.
-Require Grammar.
-Require Automaton.
-Require Validator_safe.
-Require Interpreter.
-
-Module Make(Import A:Automaton.T) (Import Inter:Interpreter.T A).
-Module Import Valid := Validator_safe.Make A.
-
-(** * A correct automaton does not crash **)
-
-Section Safety_proof.
-
-Hypothesis safe: safe.
-
-Proposition shift_head_symbs: shift_head_symbs.
-Proof. pose proof safe; unfold Valid.safe in H; intuition. Qed.
-Proposition goto_head_symbs: goto_head_symbs.
-Proof. pose proof safe; unfold Valid.safe in H; intuition. Qed.
-Proposition shift_past_state: shift_past_state.
-Proof. pose proof safe; unfold Valid.safe in H; intuition. Qed.
-Proposition goto_past_state: goto_past_state.
-Proof. pose proof safe; unfold Valid.safe in H; intuition. Qed.
-Proposition reduce_ok: reduce_ok.
-Proof. pose proof safe; unfold Valid.safe in H; intuition. Qed.
-
-(** We prove that a correct automaton won't crash : the interpreter will
- not return [Err] **)
-
-Variable init : initstate.
-
-(** The stack of states of an automaton stack **)
-Definition state_stack_of_stack (stack:stack) :=
- (List.map
- (fun cell:sigT noninitstate_type => singleton_state_pred (projT1 cell))
- stack ++ [singleton_state_pred init])%list.
-
-(** The stack of symbols of an automaton stack **)
-Definition symb_stack_of_stack (stack:stack) :=
- List.map
- (fun cell:sigT noninitstate_type => last_symb_of_non_init_state (projT1 cell))
- stack.
-
-(** The stack invariant : it basically states that the assumptions on the
- states are true. **)
-Inductive stack_invariant: stack -> Prop :=
- | stack_invariant_constr:
- forall stack,
- prefix (head_symbs_of_state (state_of_stack init stack))
- (symb_stack_of_stack stack) ->
- prefix_pred (head_states_of_state (state_of_stack init stack))
- (state_stack_of_stack stack) ->
- stack_invariant_next stack ->
- stack_invariant stack
-with stack_invariant_next: stack -> Prop :=
- | stack_invariant_next_nil:
- stack_invariant_next []
- | stack_invariant_next_cons:
- forall state_cur st stack_rec,
- stack_invariant stack_rec ->
- stack_invariant_next (existT _ state_cur st::stack_rec).
-
-(** [pop] conserves the stack invariant and does not crash
- under the assumption that we can pop at this place.
- Moreover, after a pop, the top state of the stack is allowed. **)
-Lemma pop_stack_invariant_conserved
- (symbols_to_pop:list symbol) (stack_cur:stack) A action:
- stack_invariant stack_cur ->
- prefix symbols_to_pop (head_symbs_of_state (state_of_stack init stack_cur)) ->
- match pop symbols_to_pop stack_cur (A:=A) action with
- | OK (stack_new, _) =>
- stack_invariant stack_new /\
- state_valid_after_pop
- (state_of_stack init stack_new) symbols_to_pop
- (head_states_of_state (state_of_stack init stack_cur))
- | Err => False
- end.
-Proof with eauto.
- intros.
- pose proof H.
- destruct H.
- revert H H0 H1 H2 H3.
- generalize (head_symbs_of_state (state_of_stack init stack0)).
- generalize (head_states_of_state (state_of_stack init stack0)).
- revert stack0 A action.
- induction symbols_to_pop; intros.
- - split...
- destruct l; constructor.
- inversion H2; subst.
- specialize (H7 (state_of_stack init stack0)).
- destruct (f2 (state_of_stack init stack0)) as [] eqn:? ...
- destruct stack0 as [|[]]; simpl in *.
- + inversion H6; subst.
- unfold singleton_state_pred in Heqb0.
- now rewrite compare_refl in Heqb0; discriminate.
- + inversion H6; subst.
- unfold singleton_state_pred in Heqb0.
- now rewrite compare_refl in Heqb0; discriminate.
- - destruct stack0 as [|[]]; unfold pop.
- + inversion H0; subst.
- now inversion H.
- + fold pop.
- destruct (compare_eqdec (last_symb_of_non_init_state x) a).
- * inversion H0; subst. clear H0.
- inversion H; subst. clear H.
- dependent destruction H3; simpl.
- assert (prefix_pred (List.tl l) (state_stack_of_stack stack0)).
- unfold tl; destruct l; [constructor | inversion H2]...
- pose proof H. destruct H3.
- specialize (IHsymbols_to_pop stack0 A (action0 n) _ _ H4 H7 H H0 H6).
- revert IHsymbols_to_pop.
- fold (noninitstate_type x); generalize (pop symbols_to_pop stack0 (action0 n)).
- destruct r as [|[]]; intuition...
- destruct l; constructor...
- * apply n0.
- inversion H0; subst.
- inversion H; subst...
-Qed.
-
-(** [prefix] is associative **)
-Lemma prefix_ass:
- forall (l1 l2 l3:list symbol), prefix l1 l2 -> prefix l2 l3 -> prefix l1 l3.
-Proof.
-induction l1; intros.
-constructor.
-inversion H; subst.
-inversion H0; subst.
-constructor; eauto.
-Qed.
-
-(** [prefix_pred] is associative **)
-Lemma prefix_pred_ass:
- forall (l1 l2 l3:list (state->bool)),
- prefix_pred l1 l2 -> prefix_pred l2 l3 -> prefix_pred l1 l3.
-Proof.
-induction l1; intros.
-constructor.
-inversion H; subst.
-inversion H0; subst.
-constructor; eauto.
-intro.
-specialize (H3 x).
-specialize (H4 x).
-destruct (f0 x); simpl in *; intuition.
-rewrite H4 in H3; intuition.
-Qed.
-
-(** If we have the right to reduce at this state, then the stack invariant
- is conserved by [reduce_step] and [reduce_step] does not crash. **)
-Lemma reduce_step_stack_invariant_conserved stack prod buff:
- stack_invariant stack ->
- valid_for_reduce (state_of_stack init stack) prod ->
- match reduce_step init stack prod buff with
- | OK (Fail_sr | Accept_sr _ _) => True
- | OK (Progress_sr stack_new _) => stack_invariant stack_new
- | Err => False
- end.
-Proof with eauto.
-unfold valid_for_reduce.
-intuition.
-unfold reduce_step.
-pose proof (pop_stack_invariant_conserved (prod_rhs_rev prod) stack _ (prod_action' prod)).
-destruct (pop (prod_rhs_rev prod) stack (prod_action' prod)) as [|[]].
-apply H0...
-destruct H0...
-pose proof (goto_head_symbs (state_of_stack init s) (prod_lhs prod)).
-pose proof (goto_past_state (state_of_stack init s) (prod_lhs prod)).
-unfold bind2.
-destruct H0.
-specialize (H2 _ H3)...
-destruct (goto_table (state_of_stack init stack0) (prod_lhs prod)) as [[]|].
-constructor.
-simpl.
-constructor.
-eapply prefix_ass...
-unfold state_stack_of_stack; simpl; constructor.
-intro; destruct (singleton_state_pred x x0); reflexivity.
-eapply prefix_pred_ass...
-constructor...
-constructor...
-destruct stack0 as [|[]]...
-destruct (compare_eqdec (prod_lhs prod) (start_nt init))...
-apply n, H2, eq_refl.
-apply H2, eq_refl.
-Qed.
-
-(** If the automaton is safe, then the stack invariant is conserved by
- [step] and [step] does not crash. **)
-Lemma step_stack_invariant_conserved (stack:stack) buffer:
- stack_invariant stack ->
- match step init stack buffer with
- | OK (Fail_sr | Accept_sr _ _) => True
- | OK (Progress_sr stack_new _) => stack_invariant stack_new
- | Err => False
- end.
-Proof with eauto.
-intro.
-unfold step.
-pose proof (shift_head_symbs (state_of_stack init stack)).
-pose proof (shift_past_state (state_of_stack init stack)).
-pose proof (reduce_ok (state_of_stack init stack)).
-destruct (action_table (state_of_stack init stack)).
-apply reduce_step_stack_invariant_conserved...
-destruct buffer as [[]]; simpl.
-specialize (H0 x); specialize (H1 x); specialize (H2 x).
-destruct (l x)...
-destruct H.
-constructor.
-unfold state_of_stack.
-constructor.
-eapply prefix_ass...
-unfold state_stack_of_stack; simpl; constructor.
-intro; destruct (singleton_state_pred s0 x0)...
-eapply prefix_pred_ass...
-constructor...
-constructor...
-apply reduce_step_stack_invariant_conserved...
-Qed.
-
-(** If the automaton is safe, then it does not crash **)
-Theorem parse_no_err buffer n_steps:
- parse init buffer n_steps <> Err.
-Proof with eauto.
-unfold parse.
-assert (stack_invariant []).
-constructor.
-constructor.
-unfold state_stack_of_stack; simpl; constructor.
-intro; destruct (singleton_state_pred init x)...
-constructor.
-constructor.
-revert H.
-generalize buffer ([]:stack).
-induction n_steps; simpl.
-now discriminate.
-intros.
-pose proof (step_stack_invariant_conserved s buffer0 H).
-destruct (step init s buffer0) as [|[]]; simpl...
-discriminate.
-discriminate.
-Qed.
-
-(** A version of [parse] that uses safeness in order to return a
- [parse_result], and not a [result parse_result] : we have proven that
- parsing does not return [Err]. **)
-Definition parse_with_safe (buffer:Stream token) (n_steps:nat):
- parse_result init.
-Proof with eauto.
-pose proof (parse_no_err buffer n_steps).
-destruct (parse init buffer n_steps)...
-congruence.
-Defined.
-
-End Safety_proof.
-
-End Make.
diff --git a/cparser/MenhirLib/Main.v b/cparser/MenhirLib/Main.v
deleted file mode 100644
index 1a17e988..00000000
--- a/cparser/MenhirLib/Main.v
+++ /dev/null
@@ -1,92 +0,0 @@
-(* *********************************************************************)
-(* *)
-(* The Compcert verified compiler *)
-(* *)
-(* Jacques-Henri Jourdan, INRIA Paris-Rocquencourt *)
-(* *)
-(* Copyright Institut National de Recherche en Informatique et en *)
-(* Automatique. All rights reserved. This file is distributed *)
-(* under the terms of the GNU General Public License as published by *)
-(* the Free Software Foundation, either version 2 of the License, or *)
-(* (at your option) any later version. This file is also distributed *)
-(* under the terms of the INRIA Non-Commercial License Agreement. *)
-(* *)
-(* *********************************************************************)
-
-Require Grammar.
-Require Automaton.
-Require Interpreter_safe.
-Require Interpreter_correct.
-Require Interpreter_complete.
-Require Import Syntax.
-
-Module Make(Export Aut:Automaton.T).
-Export Aut.Gram.
-Export Aut.GramDefs.
-
-Module Import Inter := Interpreter.Make Aut.
-Module Safe := Interpreter_safe.Make Aut Inter.
-Module Correct := Interpreter_correct.Make Aut Inter.
-Module Complete := Interpreter_complete.Make Aut Inter.
-
-Definition complete_validator:unit->bool := Complete.Valid.is_complete.
-Definition safe_validator:unit->bool := Safe.Valid.is_safe.
-Definition parse (safe:safe_validator ()=true) init n_steps buffer : parse_result init:=
- Safe.parse_with_safe (Safe.Valid.is_safe_correct safe) init buffer n_steps.
-
-(** Correction theorem. **)
-Theorem parse_correct
- (safe:safe_validator ()= true) init n_steps buffer:
- match parse safe init n_steps buffer with
- | Parsed_pr sem buffer_new =>
- exists word,
- buffer = word ++ buffer_new /\ inhabited (parse_tree (NT (start_nt init)) word sem)
- | _ => True
- end.
-Proof.
-unfold parse, Safe.parse_with_safe.
-pose proof (Correct.parse_correct init buffer n_steps).
-generalize (Safe.parse_no_err (Safe.Valid.is_safe_correct safe) init buffer n_steps).
-destruct (Inter.parse init buffer n_steps); intros.
-now destruct (n (eq_refl _)).
-now destruct p; trivial.
-Qed.
-
-(** Completeness theorem. **)
-Theorem parse_complete
- (safe:safe_validator () = true) init n_steps word buffer_end sem:
- complete_validator () = true ->
- forall tree:parse_tree (NT (start_nt init)) word sem,
- match parse safe init n_steps (word ++ buffer_end) with
- | Fail_pr => False
- | Parsed_pr sem_res buffer_end_res =>
- sem_res = sem /\ buffer_end_res = buffer_end /\ pt_size tree <= n_steps
- | Timeout_pr => n_steps < pt_size tree
- end.
-Proof.
-intros.
-unfold parse, Safe.parse_with_safe.
-pose proof (Complete.parse_complete (Complete.Valid.is_complete_correct H) init _ buffer_end _ tree n_steps).
-generalize (Safe.parse_no_err (Safe.Valid.is_safe_correct safe) init (word ++ buffer_end) n_steps).
-destruct (Inter.parse init (word ++ buffer_end) n_steps); intros.
-now destruct (n eq_refl).
-now exact H0.
-Qed.
-
-(** Unambiguity theorem. **)
-Theorem unambiguity:
- safe_validator () = true -> complete_validator () = true -> inhabited token ->
- forall init word,
- forall sem1 (tree1:parse_tree (NT (start_nt init)) word sem1),
- forall sem2 (tree2:parse_tree (NT (start_nt init)) word sem2),
- sem1 = sem2.
-Proof.
-intros.
-destruct H1.
-pose proof (parse_complete H init (pt_size tree1) word (Streams.const X) sem1) H0 tree1.
-pose proof (parse_complete H init (pt_size tree1) word (Streams.const X) sem2) H0 tree2.
-destruct (parse H init (pt_size tree1) (word ++ Streams.const X)); intuition.
-rewrite <- H3, H1; reflexivity.
-Qed.
-
-End Make.
diff --git a/cparser/MenhirLib/Tuples.v b/cparser/MenhirLib/Tuples.v
deleted file mode 100644
index 3fd2ec03..00000000
--- a/cparser/MenhirLib/Tuples.v
+++ /dev/null
@@ -1,49 +0,0 @@
-(* *********************************************************************)
-(* *)
-(* The Compcert verified compiler *)
-(* *)
-(* Jacques-Henri Jourdan, INRIA Paris-Rocquencourt *)
-(* *)
-(* Copyright Institut National de Recherche en Informatique et en *)
-(* Automatique. All rights reserved. This file is distributed *)
-(* under the terms of the GNU General Public License as published by *)
-(* the Free Software Foundation, either version 2 of the License, or *)
-(* (at your option) any later version. This file is also distributed *)
-(* under the terms of the INRIA Non-Commercial License Agreement. *)
-(* *)
-(* *********************************************************************)
-
-Require Import List.
-Require Import Coq.Program.Syntax.
-Require Import Equality.
-
-(** A curryfied function with multiple parameters **)
-Definition arrows_left: list Type -> Type -> Type :=
- fold_left (fun A B => B -> A).
-
-(** A curryfied function with multiple parameters **)
-Definition arrows_right: Type -> list Type -> Type :=
- fold_right (fun A B => A -> B).
-
-(** A tuple is a heterogeneous list. For convenience, we use pairs. **)
-Fixpoint tuple (types : list Type) : Type :=
- match types with
- | nil => unit
- | t::q => prod t (tuple q)
- end.
-
-Fixpoint uncurry {args:list Type} {res:Type}:
- arrows_left args res -> tuple args -> res :=
- match args return forall res, arrows_left args res -> tuple args -> res with
- | [] => fun _ f _ => f
- | t::q => fun res f p => let (d, t) := p in
- (@uncurry q _ f t) d
- end res.
-
-Lemma JMeq_eqrect:
- forall (U:Type) (a b:U) (P:U -> Type) (x:P a) (e:a=b),
- eq_rect a P x b e ~= x.
-Proof.
-destruct e.
-reflexivity.
-Qed.
diff --git a/cparser/MenhirLib/Validator_complete.v b/cparser/MenhirLib/Validator_complete.v
deleted file mode 100644
index a9823278..00000000
--- a/cparser/MenhirLib/Validator_complete.v
+++ /dev/null
@@ -1,542 +0,0 @@
-(* *********************************************************************)
-(* *)
-(* The Compcert verified compiler *)
-(* *)
-(* Jacques-Henri Jourdan, INRIA Paris-Rocquencourt *)
-(* *)
-(* Copyright Institut National de Recherche en Informatique et en *)
-(* Automatique. All rights reserved. This file is distributed *)
-(* under the terms of the GNU General Public License as published by *)
-(* the Free Software Foundation, either version 2 of the License, or *)
-(* (at your option) any later version. This file is also distributed *)
-(* under the terms of the INRIA Non-Commercial License Agreement. *)
-(* *)
-(* *********************************************************************)
-
-Require Automaton.
-Require Import Alphabet.
-Require Import List.
-Require Import Syntax.
-
-Module Make(Import A:Automaton.T).
-
-(** We instantiate some sets/map. **)
-Module TerminalComparableM <: ComparableM.
- Definition t := terminal.
- Instance tComparable : Comparable t := _.
-End TerminalComparableM.
-Module TerminalOrderedType := OrderedType_from_ComparableM TerminalComparableM.
-Module StateProdPosComparableM <: ComparableM.
- Definition t := (state*production*nat)%type.
- Instance tComparable : Comparable t := _.
-End StateProdPosComparableM.
-Module StateProdPosOrderedType :=
- OrderedType_from_ComparableM StateProdPosComparableM.
-
-Module TerminalSet := FSetAVL.Make TerminalOrderedType.
-Module StateProdPosMap := FMapAVL.Make StateProdPosOrderedType.
-
-(** Nullable predicate for symbols and list of symbols. **)
-Definition nullable_symb (symbol:symbol) :=
- match symbol with
- | NT nt => nullable_nterm nt
- | _ => false
- end.
-
-Definition nullable_word (word:list symbol) :=
- forallb nullable_symb word.
-
-(** First predicate for non terminal, symbols and list of symbols, given as FSets. **)
-Definition first_nterm_set (nterm:nonterminal) :=
- fold_left (fun acc t => TerminalSet.add t acc)
- (first_nterm nterm) TerminalSet.empty.
-
-Definition first_symb_set (symbol:symbol) :=
- match symbol with
- | NT nt => first_nterm_set nt
- | T t => TerminalSet.singleton t
- end.
-
-Fixpoint first_word_set (word:list symbol) :=
- match word with
- | [] => TerminalSet.empty
- | t::q =>
- if nullable_symb t then
- TerminalSet.union (first_symb_set t) (first_word_set q)
- else
- first_symb_set t
- end.
-
-(** Small helper for finding the part of an item that is after the dot. **)
-Definition future_of_prod prod dot_pos : list symbol :=
- (fix loop n lst :=
- match n with
- | O => lst
- | S x => match loop x lst with [] => [] | _::q => q end
- end)
- dot_pos (rev' (prod_rhs_rev prod)).
-
-(** We build a fast map to store all the items of all the states. **)
-Definition items_map (_:unit): StateProdPosMap.t TerminalSet.t :=
- fold_left (fun acc state =>
- fold_left (fun acc item =>
- let key := (state, prod_item item, dot_pos_item item) in
- let data := fold_left (fun acc t => TerminalSet.add t acc)
- (lookaheads_item item) TerminalSet.empty
- in
- let old :=
- match StateProdPosMap.find key acc with
- | Some x => x | None => TerminalSet.empty
- end
- in
- StateProdPosMap.add key (TerminalSet.union data old) acc
- ) (items_of_state state) acc
- ) all_list (StateProdPosMap.empty TerminalSet.t).
-
-(** Accessor. **)
-Definition find_items_map items_map state prod dot_pos : TerminalSet.t :=
- match StateProdPosMap.find (state, prod, dot_pos) items_map with
- | None => TerminalSet.empty
- | Some x => x
- end.
-
-Definition state_has_future state prod (fut:list symbol) (lookahead:terminal) :=
- exists dot_pos:nat,
- fut = future_of_prod prod dot_pos /\
- TerminalSet.In lookahead (find_items_map (items_map ()) state prod dot_pos).
-
-(** Iterator over items. **)
-Definition forallb_items items_map (P:state -> production -> nat -> TerminalSet.t -> bool): bool:=
- StateProdPosMap.fold (fun key set acc =>
- match key with (st, p, pos) => (acc && P st p pos set)%bool end
- ) items_map true.
-
-Lemma forallb_items_spec :
- forall p, forallb_items (items_map ()) p = true ->
- forall st prod fut lookahead, state_has_future st prod fut lookahead ->
- forall P:state -> production -> list symbol -> terminal -> Prop,
- (forall st prod pos set lookahead,
- TerminalSet.In lookahead set -> p st prod pos set = true ->
- P st prod (future_of_prod prod pos) lookahead) ->
- P st prod fut lookahead.
-Proof.
-intros.
-unfold forallb_items in H.
-rewrite StateProdPosMap.fold_1 in H.
-destruct H0; destruct H0.
-specialize (H1 st prod x _ _ H2).
-destruct H0.
-apply H1.
-unfold find_items_map in *.
-pose proof (@StateProdPosMap.find_2 _ (items_map ()) (st, prod, x)).
-destruct (StateProdPosMap.find (st, prod, x) (items_map ())); [ |destruct (TerminalSet.empty_1 H2)].
-specialize (H0 _ (eq_refl _)).
-pose proof (StateProdPosMap.elements_1 H0).
-revert H.
-generalize true at 1.
-induction H3.
-destruct H.
-destruct y.
-simpl in H3; destruct H3.
-pose proof (compare_eq (st, prod, x) k H).
-destruct H3.
-simpl.
-generalize (p st prod x t).
-induction l; simpl; intros.
-rewrite Bool.andb_true_iff in H3.
-intuition.
-destruct a; destruct k; destruct p0.
-simpl in H3.
-replace (b0 && b && p s p0 n t0)%bool with (b0 && p s p0 n t0 && b)%bool in H3.
-apply (IHl _ _ H3).
-destruct b, b0, (p s p0 n t0); reflexivity.
-intro.
-apply IHInA.
-Qed.
-
-(** * Validation for completeness **)
-
-(** The nullable predicate is a fixpoint : it is correct. **)
-Definition nullable_stable:=
- forall p:production,
- nullable_word (rev (prod_rhs_rev p)) = true ->
- nullable_nterm (prod_lhs p) = true.
-
-Definition is_nullable_stable (_:unit) :=
- forallb (fun p:production =>
- implb (nullable_word (rev' (prod_rhs_rev p))) (nullable_nterm (prod_lhs p)))
- all_list.
-
-Property is_nullable_stable_correct :
- is_nullable_stable () = true -> nullable_stable.
-Proof.
-unfold is_nullable_stable, nullable_stable.
-intros.
-rewrite forallb_forall in H.
-specialize (H p (all_list_forall p)).
-unfold rev' in H; rewrite <- rev_alt in H.
-rewrite H0 in H; intuition.
-Qed.
-
-(** The first predicate is a fixpoint : it is correct. **)
-Definition first_stable:=
- forall (p:production),
- TerminalSet.Subset (first_word_set (rev (prod_rhs_rev p)))
- (first_nterm_set (prod_lhs p)).
-
-Definition is_first_stable (_:unit) :=
- forallb (fun p:production =>
- TerminalSet.subset (first_word_set (rev' (prod_rhs_rev p)))
- (first_nterm_set (prod_lhs p)))
- all_list.
-
-Property is_first_stable_correct :
- is_first_stable () = true -> first_stable.
-Proof.
-unfold is_first_stable, first_stable.
-intros.
-rewrite forallb_forall in H.
-specialize (H p (all_list_forall p)).
-unfold rev' in H; rewrite <- rev_alt in H.
-apply TerminalSet.subset_2; intuition.
-Qed.
-
-(** The initial state has all the S=>.u items, where S is the start non-terminal **)
-Definition start_future :=
- forall (init:initstate) (t:terminal) (p:production),
- prod_lhs p = start_nt init ->
- state_has_future init p (rev (prod_rhs_rev p)) t.
-
-Definition is_start_future items_map :=
- forallb (fun init =>
- forallb (fun prod =>
- if compare_eqb (prod_lhs prod) (start_nt init) then
- let lookaheads := find_items_map items_map init prod 0 in
- forallb (fun t => TerminalSet.mem t lookaheads) all_list
- else
- true) all_list) all_list.
-
-Property is_start_future_correct :
- is_start_future (items_map ()) = true -> start_future.
-Proof.
-unfold is_start_future, start_future.
-intros.
-rewrite forallb_forall in H.
-specialize (H init (all_list_forall _)).
-rewrite forallb_forall in H.
-specialize (H p (all_list_forall _)).
-rewrite <- compare_eqb_iff in H0.
-rewrite H0 in H.
-rewrite forallb_forall in H.
-specialize (H t (all_list_forall _)).
-exists 0.
-split.
-apply rev_alt.
-apply TerminalSet.mem_2; eauto.
-Qed.
-
-(** If a state contains an item of the form A->_.av[[b]], where a is a
- terminal, then reading an a does a [Shift_act], to a state containing
- an item of the form A->_.v[[b]]. **)
-Definition terminal_shift :=
- forall (s1:state) prod fut lookahead,
- state_has_future s1 prod fut lookahead ->
- match fut with
- | T t::q =>
- match action_table s1 with
- | Lookahead_act awp =>
- match awp t with
- | Shift_act s2 _ =>
- state_has_future s2 prod q lookahead
- | _ => False
- end
- | _ => False
- end
- | _ => True
- end.
-
-Definition is_terminal_shift items_map :=
- forallb_items items_map (fun s1 prod pos lset =>
- match future_of_prod prod pos with
- | T t::_ =>
- match action_table s1 with
- | Lookahead_act awp =>
- match awp t with
- | Shift_act s2 _ =>
- TerminalSet.subset lset (find_items_map items_map s2 prod (S pos))
- | _ => false
- end
- | _ => false
- end
- | _ => true
- end).
-
-Property is_terminal_shift_correct :
- is_terminal_shift (items_map ()) = true -> terminal_shift.
-Proof.
-unfold is_terminal_shift, terminal_shift.
-intros.
-apply (forallb_items_spec _ H _ _ _ _ H0 (fun _ _ fut look => _)).
-intros.
-destruct (future_of_prod prod0 pos) as [|[]] eqn:?; intuition.
-destruct (action_table st); intuition.
-destruct (l0 t); intuition.
-exists (S pos).
-split.
-unfold future_of_prod in *.
-rewrite Heql; reflexivity.
-apply (TerminalSet.subset_2 H2); intuition.
-Qed.
-
-(** If a state contains an item of the form A->_.[[a]], then either we do a
- [Default_reduce_act] of the corresponding production, either a is a
- terminal (ie. there is a lookahead terminal), and reading a does a
- [Reduce_act] of the corresponding production. **)
-Definition end_reduce :=
- forall (s:state) prod fut lookahead,
- state_has_future s prod fut lookahead ->
- fut = [] ->
- match action_table s with
- | Default_reduce_act p => p = prod
- | Lookahead_act awt =>
- match awt lookahead with
- | Reduce_act p => p = prod
- | _ => False
- end
- end.
-
-Definition is_end_reduce items_map :=
- forallb_items items_map (fun s prod pos lset =>
- match future_of_prod prod pos with
- | [] =>
- match action_table s with
- | Default_reduce_act p => compare_eqb p prod
- | Lookahead_act awt =>
- TerminalSet.fold (fun lookahead acc =>
- match awt lookahead with
- | Reduce_act p => (acc && compare_eqb p prod)%bool
- | _ => false
- end) lset true
- end
- | _ => true
- end).
-
-Property is_end_reduce_correct :
- is_end_reduce (items_map ()) = true -> end_reduce.
-Proof.
-unfold is_end_reduce, end_reduce.
-intros.
-revert H1.
-apply (forallb_items_spec _ H _ _ _ _ H0 (fun st prod fut look => _ ->
- match action_table st with
- | Default_reduce_act p => p = prod
- | _ => _
- end)).
-intros.
-rewrite H3 in H2.
-destruct (action_table st); intuition.
-apply compare_eqb_iff; intuition.
-rewrite TerminalSet.fold_1 in H2.
-revert H2.
-generalize true at 1.
-pose proof (TerminalSet.elements_1 H1).
-induction H2.
-pose proof (compare_eq _ _ H2).
-destruct H4.
-simpl.
-assert (fold_left
- (fun (a : bool) (e : TerminalSet.elt) =>
- match l e with
- | Shift_act _ _ => false
- | Reduce_act p => (a && compare_eqb p prod0)%bool
- | Fail_act => false
- end) l0 false = true -> False).
-induction l0; intuition.
-apply IHl0.
-simpl in H4.
-destruct (l a); intuition.
-destruct (l lookahead0); intuition.
-apply compare_eqb_iff.
-destruct (compare_eqb p prod0); intuition.
-destruct b; intuition.
-simpl; intros.
-eapply IHInA; eauto.
-Qed.
-
-(** If a state contains an item of the form A->_.Bv[[b]], where B is a
- non terminal, then the goto table says we have to go to a state containing
- an item of the form A->_.v[[b]]. **)
-Definition non_terminal_goto :=
- forall (s1:state) prod fut lookahead,
- state_has_future s1 prod fut lookahead ->
- match fut with
- | NT nt::q =>
- match goto_table s1 nt with
- | Some (exist _ s2 _) =>
- state_has_future s2 prod q lookahead
- | None =>
- forall prod fut lookahead,
- state_has_future s1 prod fut lookahead ->
- match fut with
- | NT nt'::_ => nt <> nt'
- | _ => True
- end
- end
- | _ => True
- end.
-
-Definition is_non_terminal_goto items_map :=
- forallb_items items_map (fun s1 prod pos lset =>
- match future_of_prod prod pos with
- | NT nt::_ =>
- match goto_table s1 nt with
- | Some (exist _ s2 _) =>
- TerminalSet.subset lset (find_items_map items_map s2 prod (S pos))
- | None => forallb_items items_map (fun s1' prod' pos' _ =>
- (implb (compare_eqb s1 s1')
- match future_of_prod prod' pos' with
- | NT nt' :: _ => negb (compare_eqb nt nt')
- | _ => true
- end)%bool)
- end
- | _ => true
- end).
-
-Property is_non_terminal_goto_correct :
- is_non_terminal_goto (items_map ()) = true -> non_terminal_goto.
-Proof.
-unfold is_non_terminal_goto, non_terminal_goto.
-intros.
-apply (forallb_items_spec _ H _ _ _ _ H0 (fun st prod fut look =>
- match fut with
- | NT nt :: q =>
- match goto_table st nt with
- | Some _ => _
- | None =>
- forall p f l, state_has_future st p f l -> (_:Prop)
- end
- | _ => _
- end)).
-intros.
-destruct (future_of_prod prod0 pos) as [|[]] eqn:?; intuition.
-destruct (goto_table st n) as [[]|].
-exists (S pos).
-split.
-unfold future_of_prod in *.
-rewrite Heql; reflexivity.
-apply (TerminalSet.subset_2 H2); intuition.
-intros.
-remember st in H2; revert Heqs.
-apply (forallb_items_spec _ H2 _ _ _ _ H3 (fun st' prod fut look => s = st' -> match fut return Prop with [] => _ | _ => _ end)); intros.
-rewrite <- compare_eqb_iff in H6; rewrite H6 in H5.
-destruct (future_of_prod prod1 pos0) as [|[]]; intuition.
-rewrite <- compare_eqb_iff in H7; rewrite H7 in H5.
-discriminate.
-Qed.
-
-Definition start_goto :=
- forall (init:initstate), goto_table init (start_nt init) = None.
-
-Definition is_start_goto (_:unit) :=
- forallb (fun (init:initstate) =>
- match goto_table init (start_nt init) with
- | Some _ => false
- | None => true
- end) all_list.
-
-Definition is_start_goto_correct:
- is_start_goto () = true -> start_goto.
-Proof.
-unfold is_start_goto, start_goto.
-rewrite forallb_forall.
-intros.
-specialize (H init (all_list_forall _)).
-destruct (goto_table init (start_nt init)); congruence.
-Qed.
-
-(** Closure property of item sets : if a state contains an item of the form
- A->_.Bv[[b]], then for each production B->u and each terminal a of
- first(vb), the state contains an item of the form B->_.u[[a]] **)
-Definition non_terminal_closed :=
- forall (s1:state) prod fut lookahead,
- state_has_future s1 prod fut lookahead ->
- match fut with
- | NT nt::q =>
- forall (p:production) (lookahead2:terminal),
- prod_lhs p = nt ->
- TerminalSet.In lookahead2 (first_word_set q) \/
- lookahead2 = lookahead /\ nullable_word q = true ->
- state_has_future s1 p (rev (prod_rhs_rev p)) lookahead2
- | _ => True
- end.
-
-Definition is_non_terminal_closed items_map :=
- forallb_items items_map (fun s1 prod pos lset =>
- match future_of_prod prod pos with
- | NT nt::q =>
- forallb (fun p =>
- if compare_eqb (prod_lhs p) nt then
- let lookaheads := find_items_map items_map s1 p 0 in
- (implb (nullable_word q) (TerminalSet.subset lset lookaheads)) &&
- TerminalSet.subset (first_word_set q) lookaheads
- else true
- )%bool all_list
- | _ => true
- end).
-
-Property is_non_terminal_closed_correct:
- is_non_terminal_closed (items_map ()) = true -> non_terminal_closed.
-Proof.
-unfold is_non_terminal_closed, non_terminal_closed.
-intros.
-apply (forallb_items_spec _ H _ _ _ _ H0 (fun st prod fut look =>
- match fut with
- | NT nt :: q => forall p l, _ -> _ -> state_has_future st _ _ _
- | _ => _
- end)).
-intros.
-destruct (future_of_prod prod0 pos); intuition.
-destruct s; eauto; intros.
-rewrite forallb_forall in H2.
-specialize (H2 p (all_list_forall p)).
-rewrite <- compare_eqb_iff in H3.
-rewrite H3 in H2.
-rewrite Bool.andb_true_iff in H2.
-destruct H2.
-exists 0.
-split.
-apply rev_alt.
-destruct H4 as [|[]]; subst.
-apply (TerminalSet.subset_2 H5); intuition.
-rewrite H6 in H2.
-apply (TerminalSet.subset_2 H2); intuition.
-Qed.
-
-(** The automaton is complete **)
-Definition complete :=
- nullable_stable /\ first_stable /\ start_future /\ terminal_shift
- /\ end_reduce /\ non_terminal_goto /\ start_goto /\ non_terminal_closed.
-
-Definition is_complete (_:unit) :=
- let items_map := items_map () in
- (is_nullable_stable () && is_first_stable () && is_start_future items_map &&
- is_terminal_shift items_map && is_end_reduce items_map && is_start_goto () &&
- is_non_terminal_goto items_map && is_non_terminal_closed items_map)%bool.
-
-Property is_complete_correct:
- is_complete () = true -> complete.
-Proof.
-unfold is_complete, complete.
-repeat rewrite Bool.andb_true_iff.
-intuition.
-apply is_nullable_stable_correct; assumption.
-apply is_first_stable_correct; assumption.
-apply is_start_future_correct; assumption.
-apply is_terminal_shift_correct; assumption.
-apply is_end_reduce_correct; assumption.
-apply is_non_terminal_goto_correct; assumption.
-apply is_start_goto_correct; assumption.
-apply is_non_terminal_closed_correct; assumption.
-Qed.
-
-End Make.
diff --git a/cparser/MenhirLib/Validator_safe.v b/cparser/MenhirLib/Validator_safe.v
deleted file mode 100644
index 183d661b..00000000
--- a/cparser/MenhirLib/Validator_safe.v
+++ /dev/null
@@ -1,414 +0,0 @@
-(* *********************************************************************)
-(* *)
-(* The Compcert verified compiler *)
-(* *)
-(* Jacques-Henri Jourdan, INRIA Paris-Rocquencourt *)
-(* *)
-(* Copyright Institut National de Recherche en Informatique et en *)
-(* Automatique. All rights reserved. This file is distributed *)
-(* under the terms of the GNU General Public License as published by *)
-(* the Free Software Foundation, either version 2 of the License, or *)
-(* (at your option) any later version. This file is also distributed *)
-(* under the terms of the INRIA Non-Commercial License Agreement. *)
-(* *)
-(* *********************************************************************)
-
-Require Automaton.
-Require Import Alphabet.
-Require Import List.
-Require Import Syntax.
-
-Module Make(Import A:Automaton.T).
-
-(** The singleton predicate for states **)
-Definition singleton_state_pred (state:state) :=
- (fun state' => match compare state state' with Eq => true |_ => false end).
-
-(** [past_state_of_non_init_state], extended for all states. **)
-Definition past_state_of_state (state:state) :=
- match state with
- | Init _ => []
- | Ninit nis => past_state_of_non_init_state nis
- end.
-
-(** Concatenations of last and past **)
-Definition head_symbs_of_state (state:state) :=
- match state with
- | Init _ => []
- | Ninit s =>
- last_symb_of_non_init_state s::past_symb_of_non_init_state s
- end.
-Definition head_states_of_state (state:state) :=
- singleton_state_pred state::past_state_of_state state.
-
-(** * Validation for correctness **)
-
-(** Prefix predicate between two lists of symbols. **)
-Inductive prefix: list symbol -> list symbol -> Prop :=
- | prefix_nil: forall l, prefix [] l
- | prefix_cons: forall l1 l2 x, prefix l1 l2 -> prefix (x::l1) (x::l2).
-
-Fixpoint is_prefix (l1 l2:list symbol):=
- match l1, l2 with
- | [], _ => true
- | t1::q1, t2::q2 => (compare_eqb t1 t2 && is_prefix q1 q2)%bool
- | _::_, [] => false
- end.
-
-Property is_prefix_correct (l1 l2:list symbol):
- is_prefix l1 l2 = true -> prefix l1 l2.
-Proof.
-revert l2.
-induction l1; intros.
-apply prefix_nil.
-unfold is_prefix in H.
-destruct l2; intuition; try discriminate.
-rewrite Bool.andb_true_iff in H.
-destruct H.
-rewrite compare_eqb_iff in H.
-destruct H.
-apply prefix_cons.
-apply IHl1; intuition.
-Qed.
-
-(** If we shift, then the known top symbols of the destination state is
- a prefix of the known top symbols of the source state, with the new
- symbol added. **)
-Definition shift_head_symbs :=
- forall s,
- match action_table s with
- | Lookahead_act awp =>
- forall t, match awp t with
- | Shift_act s2 _ =>
- prefix (past_symb_of_non_init_state s2) (head_symbs_of_state s)
- | _ => True
- end
- | _ => True
- end.
-
-Definition is_shift_head_symbs (_:unit) :=
- forallb (fun s:state =>
- match action_table s with
- | Lookahead_act awp =>
- forallb (fun t =>
- match awp t with
- | Shift_act s2 _ =>
- is_prefix (past_symb_of_non_init_state s2) (head_symbs_of_state s)
- | _ => true
- end)
- all_list
- | _ => true
- end)
- all_list.
-
-Property is_shift_head_symbs_correct:
- is_shift_head_symbs () = true -> shift_head_symbs.
-Proof.
-unfold is_shift_head_symbs, shift_head_symbs.
-intros.
-rewrite forallb_forall in H.
-specialize (H s (all_list_forall s)).
-destruct (action_table s); intuition.
-rewrite forallb_forall in H.
-specialize (H t (all_list_forall t)).
-destruct (l t); intuition.
-apply is_prefix_correct; intuition.
-Qed.
-
-(** When a goto happens, then the known top symbols of the destination state
- is a prefix of the known top symbols of the source state, with the new
- symbol added. **)
-Definition goto_head_symbs :=
- forall s nt,
- match goto_table s nt with
- | Some (exist _ s2 _) =>
- prefix (past_symb_of_non_init_state s2) (head_symbs_of_state s)
- | None => True
- end.
-
-Definition is_goto_head_symbs (_:unit) :=
- forallb (fun s:state =>
- forallb (fun nt =>
- match goto_table s nt with
- | Some (exist _ s2 _) =>
- is_prefix (past_symb_of_non_init_state s2) (head_symbs_of_state s)
- | None => true
- end)
- all_list)
- all_list.
-
-Property is_goto_head_symbs_correct:
- is_goto_head_symbs () = true -> goto_head_symbs.
-Proof.
-unfold is_goto_head_symbs, goto_head_symbs.
-intros.
-rewrite forallb_forall in H.
-specialize (H s (all_list_forall s)).
-rewrite forallb_forall in H.
-specialize (H nt (all_list_forall nt)).
-destruct (goto_table s nt); intuition.
-destruct s0.
-apply is_prefix_correct; intuition.
-Qed.
-
-(** We have to say the same kind of checks for the assumptions about the
- states stack. However, theses assumptions are predicates. So we define
- a notion of "prefix" over predicates lists, that means, basically, that
- an assumption entails another **)
-Inductive prefix_pred: list (state->bool) -> list (state->bool) -> Prop :=
- | prefix_pred_nil: forall l, prefix_pred [] l
- | prefix_pred_cons: forall l1 l2 f1 f2,
- (forall x, implb (f2 x) (f1 x) = true) ->
- prefix_pred l1 l2 -> prefix_pred (f1::l1) (f2::l2).
-
-Fixpoint is_prefix_pred (l1 l2:list (state->bool)) :=
- match l1, l2 with
- | [], _ => true
- | f1::q1, f2::q2 =>
- (forallb (fun x => implb (f2 x) (f1 x)) all_list
- && is_prefix_pred q1 q2)%bool
- | _::_, [] => false
- end.
-
-Property is_prefix_pred_correct (l1 l2:list (state->bool)) :
- is_prefix_pred l1 l2 = true -> prefix_pred l1 l2.
-Proof.
-revert l2.
-induction l1.
-intros.
-apply prefix_pred_nil.
-intros.
-destruct l2; intuition; try discriminate.
-unfold is_prefix_pred in H.
-rewrite Bool.andb_true_iff in H.
-rewrite forallb_forall in H.
-apply prefix_pred_cons; intuition.
-apply H0.
-apply all_list_forall.
-Qed.
-
-(** The assumptions about state stack is conserved when we shift **)
-Definition shift_past_state :=
- forall s,
- match action_table s with
- | Lookahead_act awp =>
- forall t, match awp t with
- | Shift_act s2 _ =>
- prefix_pred (past_state_of_non_init_state s2)
- (head_states_of_state s)
- | _ => True
- end
- | _ => True
- end.
-
-Definition is_shift_past_state (_:unit) :=
- forallb (fun s:state =>
- match action_table s with
- | Lookahead_act awp =>
- forallb (fun t =>
- match awp t with
- | Shift_act s2 _ =>
- is_prefix_pred
- (past_state_of_non_init_state s2) (head_states_of_state s)
- | _ => true
- end)
- all_list
- | _ => true
- end)
- all_list.
-
-Property is_shift_past_state_correct:
- is_shift_past_state () = true -> shift_past_state.
-Proof.
-unfold is_shift_past_state, shift_past_state.
-intros.
-rewrite forallb_forall in H.
-specialize (H s (all_list_forall s)).
-destruct (action_table s); intuition.
-rewrite forallb_forall in H.
-specialize (H t (all_list_forall t)).
-destruct (l t); intuition.
-apply is_prefix_pred_correct; intuition.
-Qed.
-
-(** The assumptions about state stack is conserved when we do a goto **)
-Definition goto_past_state :=
- forall s nt,
- match goto_table s nt with
- | Some (exist _ s2 _) =>
- prefix_pred (past_state_of_non_init_state s2)
- (head_states_of_state s)
- | None => True
- end.
-
-Definition is_goto_past_state (_:unit) :=
- forallb (fun s:state =>
- forallb (fun nt =>
- match goto_table s nt with
- | Some (exist _ s2 _) =>
- is_prefix_pred
- (past_state_of_non_init_state s2) (head_states_of_state s)
- | None => true
- end)
- all_list)
- all_list.
-
-Property is_goto_past_state_correct :
- is_goto_past_state () = true -> goto_past_state.
-Proof.
-unfold is_goto_past_state, goto_past_state.
-intros.
-rewrite forallb_forall in H.
-specialize (H s (all_list_forall s)).
-rewrite forallb_forall in H.
-specialize (H nt (all_list_forall nt)).
-destruct (goto_table s nt); intuition.
-destruct s0.
-apply is_prefix_pred_correct; intuition.
-Qed.
-
-(** What states are possible after having popped these symbols from the
- stack, given the annotation of the current state ? **)
-Inductive state_valid_after_pop (s:state):
- list symbol -> list (state -> bool) -> Prop :=
- | state_valid_after_pop_nil1:
- forall p pl, p s = true -> state_valid_after_pop s [] (p::pl)
- | state_valid_after_pop_nil2:
- forall sl, state_valid_after_pop s sl []
- | state_valid_after_pop_cons:
- forall st sq p pl, state_valid_after_pop s sq pl ->
- state_valid_after_pop s (st::sq) (p::pl).
-
-Fixpoint is_state_valid_after_pop
- (state:state) (to_pop:list symbol) annot :=
- match annot, to_pop with
- | [], _ => true
- | p::_, [] => p state
- | p::pl, s::sl => is_state_valid_after_pop state sl pl
- end.
-
-Property is_state_valid_after_pop_complete state sl pl :
- state_valid_after_pop state sl pl -> is_state_valid_after_pop state sl pl = true.
-Proof.
-intro.
-induction H; intuition.
-destruct sl; intuition.
-Qed.
-
-(** A state is valid for reducing a production when :
- - The assumptions on the state are such that we will find the right hand
- side of the production on the stack.
- - We will be able to do a goto after having popped the right hand side.
-**)
-Definition valid_for_reduce (state:state) prod :=
- prefix (prod_rhs_rev prod) (head_symbs_of_state state) /\
- forall state_new,
- state_valid_after_pop state_new
- (prod_rhs_rev prod) (head_states_of_state state) ->
- goto_table state_new (prod_lhs prod) = None ->
- match state_new with
- | Init i => prod_lhs prod = start_nt i
- | Ninit _ => False
- end.
-
-Definition is_valid_for_reduce (state:state) prod:=
- (is_prefix (prod_rhs_rev prod) (head_symbs_of_state state) &&
- forallb (fun state_new =>
- if is_state_valid_after_pop state_new (prod_rhs_rev prod)
- (head_states_of_state state) then
- match goto_table state_new (prod_lhs prod) with
- | Some _ => true
- | None =>
- match state_new with
- | Init i => compare_eqb (prod_lhs prod) (start_nt i)
- | Ninit _ => false
- end
- end
- else
- true)
- all_list)%bool.
-
-Property is_valid_for_reduce_correct (state:state) prod:
- is_valid_for_reduce state prod = true -> valid_for_reduce state prod.
-Proof.
-unfold is_valid_for_reduce, valid_for_reduce.
-intros.
-rewrite Bool.andb_true_iff in H.
-split.
-apply is_prefix_correct.
-intuition.
-intros.
-rewrite forallb_forall in H.
-destruct H.
-specialize (H2 state_new (all_list_forall state_new)).
-rewrite is_state_valid_after_pop_complete, H1 in H2.
-destruct state_new; intuition.
-rewrite compare_eqb_iff in H2; intuition.
-intuition.
-Qed.
-
-(** All the states that does a reduce are valid for reduction **)
-Definition reduce_ok :=
- forall s,
- match action_table s with
- | Lookahead_act awp =>
- forall t, match awp t with
- | Reduce_act p => valid_for_reduce s p
- | _ => True
- end
- | Default_reduce_act p => valid_for_reduce s p
- end.
-
-Definition is_reduce_ok (_:unit) :=
- forallb (fun s =>
- match action_table s with
- | Lookahead_act awp =>
- forallb (fun t =>
- match awp t with
- | Reduce_act p => is_valid_for_reduce s p
- | _ => true
- end)
- all_list
- | Default_reduce_act p => is_valid_for_reduce s p
- end)
- all_list.
-
-Property is_reduce_ok_correct :
- is_reduce_ok () = true -> reduce_ok.
-Proof.
-unfold is_reduce_ok, reduce_ok.
-intros.
-rewrite forallb_forall in H.
-specialize (H s (all_list_forall s)).
-destruct (action_table s).
-apply is_valid_for_reduce_correct; intuition.
-intro.
-rewrite forallb_forall in H.
-specialize (H t (all_list_forall t)).
-destruct (l t); intuition.
-apply is_valid_for_reduce_correct; intuition.
-Qed.
-
-(** The automaton is safe **)
-Definition safe :=
- shift_head_symbs /\ goto_head_symbs /\ shift_past_state /\
- goto_past_state /\ reduce_ok.
-
-Definition is_safe (_:unit) :=
- (is_shift_head_symbs () && is_goto_head_symbs () && is_shift_past_state () &&
- is_goto_past_state () && is_reduce_ok ())%bool.
-
-Property is_safe_correct:
- is_safe () = true -> safe.
-Proof.
-unfold safe, is_safe.
-repeat rewrite Bool.andb_true_iff.
-intuition.
-apply is_shift_head_symbs_correct; assumption.
-apply is_goto_head_symbs_correct; assumption.
-apply is_shift_past_state_correct; assumption.
-apply is_goto_past_state_correct; assumption.
-apply is_reduce_ok_correct; assumption.
-Qed.
-
-End Make.
diff --git a/cparser/PackedStructs.ml b/cparser/PackedStructs.ml
index 3c27f3a9..4c70c7ae 100644
--- a/cparser/PackedStructs.ml
+++ b/cparser/PackedStructs.ml
@@ -418,4 +418,4 @@ let program p =
| _ -> false
end;
Hashtbl.clear byteswapped_fields;
- transf_globdecls (Builtins.environment()) [] p
+ transf_globdecls (Env.initial()) [] p
diff --git a/cparser/Parse.ml b/cparser/Parse.ml
index 154e3dcf..29245083 100644
--- a/cparser/Parse.ml
+++ b/cparser/Parse.ml
@@ -56,22 +56,21 @@ let preprocessed_file transfs name sourcefile =
let text = read_file sourcefile in
let p =
let t = parse_transformations transfs in
- let rec inf = Datatypes.S inf in
+ let log_fuel = Camlcoq.Nat.of_int 50 in
let ast : Cabs.definition list =
- Obj.magic
(match Timing.time "Parsing"
(* The call to Lexer.tokens_stream results in the pre
parsing of the entire file. This is non-negligeabe,
so we cannot use Timing.time2 *)
(fun () ->
- Parser.translation_unit_file inf (Lexer.tokens_stream name text)) ()
+ Parser.translation_unit_file log_fuel (Lexer.tokens_stream name text)) ()
with
- | Parser.Parser.Inter.Fail_pr ->
+ | Parser.MenhirLibParser.Inter.Fail_pr ->
(* Theoretically impossible : implies inconsistencies
between grammars. *)
- Diagnostics.fatal_error Diagnostics.no_loc "internal error while parsing"
- | Parser.Parser.Inter.Timeout_pr -> assert false
- | Parser.Parser.Inter.Parsed_pr (ast, _ ) -> ast) in
+ Diagnostics.fatal_error Diagnostics.no_loc "internal error while parsing"
+ | Parser.MenhirLibParser.Inter.Timeout_pr -> assert false
+ | Parser.MenhirLibParser.Inter.Parsed_pr (ast, _ ) -> ast) in
let p1 = Timing.time "Elaboration" Elab.elab_file ast in
Diagnostics.check_errors ();
Timing.time2 "Emulations" transform_program t p1 name in
diff --git a/cparser/Parser.vy b/cparser/Parser.vy
index 79e3793d..03bfa590 100644
--- a/cparser/Parser.vy
+++ b/cparser/Parser.vy
@@ -15,96 +15,99 @@
%{
-Require Import Cabs.
Require Import List.
+Require Cabs.
%}
-%token<string * cabsloc> VAR_NAME TYPEDEF_NAME OTHER_NAME
-%token<string * cabsloc> PRAGMA
-%token<bool * list char_code * cabsloc> STRING_LITERAL
-%token<constant * cabsloc> CONSTANT
-%token<cabsloc> SIZEOF PTR INC DEC LEFT RIGHT LEQ GEQ EQEQ EQ NEQ LT GT
+%token<Cabs.string * Cabs.loc> VAR_NAME TYPEDEF_NAME OTHER_NAME
+%token<Cabs.string * Cabs.loc> PRAGMA
+%token<bool * list Cabs.char_code * Cabs.loc> STRING_LITERAL
+%token<Cabs.constant * Cabs.loc> CONSTANT
+%token<Cabs.loc> SIZEOF PTR INC DEC LEFT RIGHT LEQ GEQ EQEQ EQ NEQ LT GT
ANDAND BARBAR PLUS MINUS STAR TILDE BANG SLASH PERCENT HAT BAR QUESTION
COLON AND ALIGNOF
-%token<cabsloc> MUL_ASSIGN DIV_ASSIGN MOD_ASSIGN ADD_ASSIGN SUB_ASSIGN
+%token<Cabs.loc> MUL_ASSIGN DIV_ASSIGN MOD_ASSIGN ADD_ASSIGN SUB_ASSIGN
LEFT_ASSIGN RIGHT_ASSIGN AND_ASSIGN XOR_ASSIGN OR_ASSIGN
-%token<cabsloc> LPAREN RPAREN LBRACK RBRACK LBRACE RBRACE DOT COMMA
- SEMICOLON ELLIPSIS TYPEDEF EXTERN STATIC RESTRICT AUTO REGISTER INLINE NORETURN
- CHAR SHORT INT LONG SIGNED UNSIGNED FLOAT DOUBLE CONST VOLATILE VOID
+%token<Cabs.loc> LPAREN RPAREN LBRACK RBRACK LBRACE RBRACE DOT COMMA
+ SEMICOLON ELLIPSIS TYPEDEF EXTERN STATIC RESTRICT AUTO REGISTER INLINE
+ NORETURN CHAR SHORT INT LONG SIGNED UNSIGNED FLOAT DOUBLE CONST VOLATILE VOID
STRUCT UNION ENUM UNDERSCORE_BOOL PACKED ALIGNAS ATTRIBUTE ASM
-%token<cabsloc> CASE DEFAULT IF ELSE SWITCH WHILE DO FOR GOTO CONTINUE BREAK
+%token<Cabs.loc> CASE DEFAULT IF_ ELSE SWITCH WHILE DO FOR GOTO CONTINUE BREAK
RETURN BUILTIN_VA_ARG BUILTIN_OFFSETOF
%token EOF
-%type<expression * cabsloc> primary_expression postfix_expression
+%type<Cabs.expression * Cabs.loc> primary_expression postfix_expression
unary_expression cast_expression multiplicative_expression additive_expression
shift_expression relational_expression equality_expression AND_expression
exclusive_OR_expression inclusive_OR_expression logical_AND_expression
logical_OR_expression conditional_expression assignment_expression
constant_expression expression
-%type<unary_operator * cabsloc> unary_operator
-%type<binary_operator> assignment_operator
-%type<list expression (* Reverse order *)> argument_expression_list
-%type<definition> declaration
-%type<list spec_elem * cabsloc> declaration_specifiers
-%type<list spec_elem> declaration_specifiers_typespec_opt
-%type<list init_name (* Reverse order *)> init_declarator_list
-%type<init_name> init_declarator
-%type<storage * cabsloc> storage_class_specifier
-%type<typeSpecifier * cabsloc> type_specifier struct_or_union_specifier enum_specifier
-%type<structOrUnion * cabsloc> struct_or_union
-%type<list field_group (* Reverse order *)> struct_declaration_list
-%type<field_group> struct_declaration
-%type<list spec_elem * cabsloc> specifier_qualifier_list
-%type<list (option name * option expression) (* Reverse order *)> struct_declarator_list
-%type<option name * option expression> struct_declarator
-%type<list (string * option expression * cabsloc) (* Reverse order *)> enumerator_list
-%type<string * option expression * cabsloc> enumerator
-%type<string * cabsloc> enumeration_constant
-%type<cvspec * cabsloc> type_qualifier type_qualifier_noattr
-%type<funspec * cabsloc> function_specifier
-%type<name> declarator declarator_noattrend direct_declarator
-%type<(decl_type -> decl_type) * cabsloc> pointer
-%type<list cvspec (* Reverse order *)> type_qualifier_list
-%type<list parameter * bool> parameter_type_list
-%type<list parameter (* Reverse order *)> parameter_list
-%type<parameter> parameter_declaration
-%type<list spec_elem * decl_type> type_name
-%type<decl_type> abstract_declarator direct_abstract_declarator
-%type<init_expression> c_initializer
-%type<list (list initwhat * init_expression) (* Reverse order *)> initializer_list
-%type<list initwhat> designation
-%type<list initwhat (* Reverse order *)> designator_list
-%type<initwhat> designator
-%type<statement> statement_dangerous statement_safe
+%type<Cabs.unary_operator * Cabs.loc> unary_operator
+%type<Cabs.binary_operator> assignment_operator
+%type<list Cabs.expression (* Reverse order *)> argument_expression_list
+%type<Cabs.definition> declaration
+%type<list Cabs.spec_elem * Cabs.loc> declaration_specifiers
+%type<list Cabs.spec_elem> declaration_specifiers_typespec_opt
+%type<list Cabs.init_name (* Reverse order *)> init_declarator_list
+%type<Cabs.init_name> init_declarator
+%type<Cabs.storage * Cabs.loc> storage_class_specifier
+%type<Cabs.typeSpecifier * Cabs.loc> type_specifier struct_or_union_specifier enum_specifier
+%type<Cabs.structOrUnion * Cabs.loc> struct_or_union
+%type<list Cabs.field_group (* Reverse order *)> struct_declaration_list
+%type<Cabs.field_group> struct_declaration
+%type<list Cabs.spec_elem * Cabs.loc> specifier_qualifier_list
+%type<list (option Cabs.name * option Cabs.expression) (* Reverse order *)>
+ struct_declarator_list
+%type<option Cabs.name * option Cabs.expression> struct_declarator
+%type<list (Cabs.string * option Cabs.expression * Cabs.loc) (* Reverse order *)>
+ enumerator_list
+%type<Cabs.string * option Cabs.expression * Cabs.loc> enumerator
+%type<Cabs.string * Cabs.loc> enumeration_constant
+%type<Cabs.cvspec * Cabs.loc> type_qualifier type_qualifier_noattr
+%type<Cabs.funspec * Cabs.loc> function_specifier
+%type<Cabs.name> declarator declarator_noattrend direct_declarator
+%type<(Cabs.decl_type -> Cabs.decl_type) * Cabs.loc> pointer
+%type<list Cabs.cvspec (* Reverse order *)> type_qualifier_list
+%type<list Cabs.parameter * bool> parameter_type_list
+%type<list Cabs.parameter (* Reverse order *)> parameter_list
+%type<Cabs.parameter> parameter_declaration
+%type<list Cabs.spec_elem * Cabs.decl_type> type_name
+%type<Cabs.decl_type> abstract_declarator direct_abstract_declarator
+%type<Cabs.init_expression> c_initializer
+%type<list (list Cabs.initwhat * Cabs.init_expression) (* Reverse order *)>
+ initializer_list
+%type<list Cabs.initwhat> designation
+%type<list Cabs.initwhat (* Reverse order *)> designator_list
+%type<Cabs.initwhat> designator
+%type<Cabs.statement> statement_dangerous statement_safe
labeled_statement(statement_safe) labeled_statement(statement_dangerous)
iteration_statement(statement_safe) iteration_statement(statement_dangerous)
compound_statement
-%type<list statement (* Reverse order *)> block_item_list
-%type<statement> block_item expression_statement selection_statement_dangerous
+%type<list Cabs.statement (* Reverse order *)> block_item_list
+%type<Cabs.statement> block_item expression_statement selection_statement_dangerous
selection_statement_safe jump_statement asm_statement
-%type<list definition (* Reverse order *)> translation_unit
-%type<definition> external_declaration function_definition
-%type<list definition> declaration_list
-%type<attribute * cabsloc> attribute_specifier
-%type<list attribute> attribute_specifier_list
-%type<gcc_attribute> gcc_attribute
-%type<list gcc_attribute> gcc_attribute_list
-%type<gcc_attribute_word> gcc_attribute_word
-%type<list string (* Reverse order *)> identifier_list
-%type<list asm_flag> asm_flags
-%type<option string> asm_op_name
-%type<asm_operand> asm_operand
-%type<list asm_operand> asm_operands asm_operands_ne
-%type<list asm_operand * list asm_operand * list asm_flag> asm_arguments
-%type<list cvspec> asm_attributes
-
-%start<list definition> translation_unit_file
+%type<list Cabs.definition (* Reverse order *)> translation_unit
+%type<Cabs.definition> external_declaration function_definition
+%type<list Cabs.definition> declaration_list
+%type<Cabs.attribute * Cabs.loc> attribute_specifier
+%type<list Cabs.attribute> attribute_specifier_list
+%type<Cabs.gcc_attribute> gcc_attribute
+%type<list Cabs.gcc_attribute> gcc_attribute_list
+%type<Cabs.gcc_attribute_word> gcc_attribute_word
+%type<list Cabs.string (* Reverse order *)> identifier_list
+%type<list Cabs.asm_flag> asm_flags
+%type<option Cabs.string> asm_op_name
+%type<Cabs.asm_operand> asm_operand
+%type<list Cabs.asm_operand> asm_operands asm_operands_ne
+%type<list Cabs.asm_operand * list Cabs.asm_operand * list Cabs.asm_flag> asm_arguments
+%type<list Cabs.cvspec> asm_attributes
+
+%start<list Cabs.definition> translation_unit_file
%%
(* Actual grammar *)
@@ -112,12 +115,12 @@ Require Import List.
(* 6.5.1 *)
primary_expression:
| var = VAR_NAME
- { (VARIABLE (fst var), snd var) }
+ { (Cabs.VARIABLE (fst var), snd var) }
| cst = CONSTANT
- { (CONSTANT (fst cst), snd cst) }
+ { (Cabs.CONSTANT (fst cst), snd cst) }
| str = STRING_LITERAL
{ let '((wide, chars), loc) := str in
- (CONSTANT (CONST_STRING wide chars), loc) }
+ (Cabs.CONSTANT (Cabs.CONST_STRING wide chars), loc) }
| loc = LPAREN expr = expression RPAREN
{ (fst expr, loc)}
@@ -126,29 +129,30 @@ postfix_expression:
| expr = primary_expression
{ expr }
| expr = postfix_expression LBRACK index = expression RBRACK
- { (INDEX (fst expr) (fst index), snd expr) }
+ { (Cabs.INDEX (fst expr) (fst index), snd expr) }
| expr = postfix_expression LPAREN args = argument_expression_list RPAREN
- { (CALL (fst expr) (rev' args), snd expr) }
+ { (Cabs.CALL (fst expr) (rev' args), snd expr) }
| expr = postfix_expression LPAREN RPAREN
- { (CALL (fst expr) [], snd expr) }
+ { (Cabs.CALL (fst expr) [], snd expr) }
| loc = BUILTIN_VA_ARG LPAREN expr = assignment_expression COMMA ty = type_name RPAREN
- { (BUILTIN_VA_ARG (fst expr) ty, loc) }
+ { (Cabs.BUILTIN_VA_ARG (fst expr) ty, loc) }
| expr = postfix_expression DOT mem = OTHER_NAME
- { (MEMBEROF (fst expr) (fst mem), snd expr) }
+ { (Cabs.MEMBEROF (fst expr) (fst mem), snd expr) }
| expr = postfix_expression PTR mem = OTHER_NAME
- { (MEMBEROFPTR (fst expr) (fst mem), snd expr) }
+ { (Cabs.MEMBEROFPTR (fst expr) (fst mem), snd expr) }
| expr = postfix_expression INC
- { (UNARY POSINCR (fst expr), snd expr) }
+ { (Cabs.UNARY Cabs.POSINCR (fst expr), snd expr) }
| expr = postfix_expression DEC
- { (UNARY POSDECR (fst expr), snd expr) }
+ { (Cabs.UNARY Cabs.POSDECR (fst expr), snd expr) }
| loc = LPAREN typ = type_name RPAREN LBRACE init = initializer_list RBRACE
- { (CAST typ (COMPOUND_INIT (rev' init)), loc) }
+ { (Cabs.CAST typ (Cabs.COMPOUND_INIT (rev' init)), loc) }
| loc = LPAREN typ = type_name RPAREN LBRACE init = initializer_list COMMA RBRACE
- { (CAST typ (COMPOUND_INIT (rev' init)), loc) }
-| loc = BUILTIN_OFFSETOF LPAREN typ = type_name COMMA id = OTHER_NAME mems = designator_list RPAREN
- { (BUILTIN_OFFSETOF typ ((INFIELD_INIT (fst id))::(rev mems)), loc) }
+ { (Cabs.CAST typ (Cabs.COMPOUND_INIT (rev' init)), loc) }
+| loc = BUILTIN_OFFSETOF LPAREN typ = type_name COMMA id = OTHER_NAME
+ mems = designator_list RPAREN
+ { (Cabs.BUILTIN_OFFSETOF typ ((Cabs.INFIELD_INIT (fst id))::(rev mems)), loc) }
| loc = BUILTIN_OFFSETOF LPAREN typ = type_name COMMA mem = OTHER_NAME RPAREN
- { (BUILTIN_OFFSETOF typ [INFIELD_INIT (fst mem)], loc) }
+ { (Cabs.BUILTIN_OFFSETOF typ [Cabs.INFIELD_INIT (fst mem)], loc) }
(* Semantic value is in reverse order. *)
argument_expression_list:
@@ -162,170 +166,171 @@ unary_expression:
| expr = postfix_expression
{ expr }
| loc = INC expr = unary_expression
- { (UNARY PREINCR (fst expr), loc) }
+ { (Cabs.UNARY Cabs.PREINCR (fst expr), loc) }
| loc = DEC expr = unary_expression
- { (UNARY PREDECR (fst expr), loc) }
+ { (Cabs.UNARY Cabs.PREDECR (fst expr), loc) }
| op = unary_operator expr = cast_expression
- { (UNARY (fst op) (fst expr), snd op) }
+ { (Cabs.UNARY (fst op) (fst expr), snd op) }
| loc = SIZEOF expr = unary_expression
- { (EXPR_SIZEOF (fst expr), loc) }
+ { (Cabs.EXPR_SIZEOF (fst expr), loc) }
| loc = SIZEOF LPAREN typ = type_name RPAREN
- { (TYPE_SIZEOF typ, loc) }
+ { (Cabs.TYPE_SIZEOF typ, loc) }
(* Non-standard *)
| loc = ALIGNOF LPAREN typ = type_name RPAREN
- { (ALIGNOF typ, loc) }
+ { (Cabs.ALIGNOF typ, loc) }
unary_operator:
| loc = AND
- { (ADDROF, loc) }
+ { (Cabs.ADDROF, loc) }
| loc = STAR
- { (MEMOF, loc) }
+ { (Cabs.MEMOF, loc) }
| loc = PLUS
- { (PLUS, loc) }
+ { (Cabs.PLUS, loc) }
| loc = MINUS
- { (MINUS, loc) }
+ { (Cabs.MINUS, loc) }
| loc = TILDE
- { (BNOT, loc) }
+ { (Cabs.BNOT, loc) }
| loc = BANG
- { (NOT, loc) }
+ { (Cabs.NOT, loc) }
(* 6.5.4 *)
cast_expression:
| expr = unary_expression
{ expr }
| loc = LPAREN typ = type_name RPAREN expr = cast_expression
- { (CAST typ (SINGLE_INIT (fst expr)), loc) }
+ { (Cabs.CAST typ (Cabs.SINGLE_INIT (fst expr)), loc) }
(* 6.5.5 *)
multiplicative_expression:
| expr = cast_expression
{ expr }
| expr1 = multiplicative_expression STAR expr2 = cast_expression
- { (BINARY MUL (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.MUL (fst expr1) (fst expr2), snd expr1) }
| expr1 = multiplicative_expression SLASH expr2 = cast_expression
- { (BINARY DIV (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.DIV (fst expr1) (fst expr2), snd expr1) }
| expr1 = multiplicative_expression PERCENT expr2 = cast_expression
- { (BINARY MOD (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.MOD (fst expr1) (fst expr2), snd expr1) }
(* 6.5.6 *)
additive_expression:
| expr = multiplicative_expression
{ expr }
| expr1 = additive_expression PLUS expr2 = multiplicative_expression
- { (BINARY ADD (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.ADD (fst expr1) (fst expr2), snd expr1) }
| expr1 = additive_expression MINUS expr2 = multiplicative_expression
- { (BINARY SUB (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.SUB (fst expr1) (fst expr2), snd expr1) }
(* 6.5.7 *)
shift_expression:
| expr = additive_expression
{ expr }
| expr1 = shift_expression LEFT expr2 = additive_expression
- { (BINARY SHL (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.SHL (fst expr1) (fst expr2), snd expr1) }
| expr1 = shift_expression RIGHT expr2 = additive_expression
- { (BINARY SHR (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.SHR (fst expr1) (fst expr2), snd expr1) }
(* 6.5.8 *)
relational_expression:
| expr = shift_expression
{ expr }
| expr1 = relational_expression LT expr2 = shift_expression
- { (BINARY LT (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.LT (fst expr1) (fst expr2), snd expr1) }
| expr1 = relational_expression GT expr2 = shift_expression
- { (BINARY GT (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.GT (fst expr1) (fst expr2), snd expr1) }
| expr1 = relational_expression LEQ expr2 = shift_expression
- { (BINARY LE (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.LE (fst expr1) (fst expr2), snd expr1) }
| expr1 = relational_expression GEQ expr2 = shift_expression
- { (BINARY GE (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.GE (fst expr1) (fst expr2), snd expr1) }
(* 6.5.9 *)
equality_expression:
| expr = relational_expression
{ expr }
| expr1 = equality_expression EQEQ expr2 = relational_expression
- { (BINARY EQ (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.EQ (fst expr1) (fst expr2), snd expr1) }
| expr1 = equality_expression NEQ expr2 = relational_expression
- { (BINARY NE (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.NE (fst expr1) (fst expr2), snd expr1) }
(* 6.5.10 *)
AND_expression:
| expr = equality_expression
{ expr }
| expr1 = AND_expression AND expr2 = equality_expression
- { (BINARY BAND (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.BAND (fst expr1) (fst expr2), snd expr1) }
(* 6.5.11 *)
exclusive_OR_expression:
| expr = AND_expression
{ expr }
| expr1 = exclusive_OR_expression HAT expr2 = AND_expression
- { (BINARY XOR (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.XOR (fst expr1) (fst expr2), snd expr1) }
(* 6.5.12 *)
inclusive_OR_expression:
| expr = exclusive_OR_expression
{ expr }
| expr1 = inclusive_OR_expression BAR expr2 = exclusive_OR_expression
- { (BINARY BOR (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.BOR (fst expr1) (fst expr2), snd expr1) }
(* 6.5.13 *)
logical_AND_expression:
| expr = inclusive_OR_expression
{ expr }
| expr1 = logical_AND_expression ANDAND expr2 = inclusive_OR_expression
- { (BINARY AND (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.AND (fst expr1) (fst expr2), snd expr1) }
(* 6.5.14 *)
logical_OR_expression:
| expr = logical_AND_expression
{ expr }
| expr1 = logical_OR_expression BARBAR expr2 = logical_AND_expression
- { (BINARY OR (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.OR (fst expr1) (fst expr2), snd expr1) }
(* 6.5.15 *)
conditional_expression:
| expr = logical_OR_expression
{ expr }
-| expr1 = logical_OR_expression QUESTION expr2 = expression COLON expr3 = conditional_expression
- { (QUESTION (fst expr1) (fst expr2) (fst expr3), snd expr1) }
+| expr1 = logical_OR_expression QUESTION expr2 = expression COLON
+ expr3 = conditional_expression
+ { (Cabs.QUESTION (fst expr1) (fst expr2) (fst expr3), snd expr1) }
(* 6.5.16 *)
assignment_expression:
| expr = conditional_expression
{ expr }
| expr1 = unary_expression op = assignment_operator expr2 = assignment_expression
- { (BINARY op (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY op (fst expr1) (fst expr2), snd expr1) }
assignment_operator:
| EQ
- { ASSIGN }
+ { Cabs.ASSIGN }
| MUL_ASSIGN
- { MUL_ASSIGN }
+ { Cabs.MUL_ASSIGN }
| DIV_ASSIGN
- { DIV_ASSIGN }
+ { Cabs.DIV_ASSIGN }
| MOD_ASSIGN
- { MOD_ASSIGN }
+ { Cabs.MOD_ASSIGN }
| ADD_ASSIGN
- { ADD_ASSIGN }
+ { Cabs.ADD_ASSIGN }
| SUB_ASSIGN
- { SUB_ASSIGN }
+ { Cabs.SUB_ASSIGN }
| LEFT_ASSIGN
- { SHL_ASSIGN }
+ { Cabs.SHL_ASSIGN }
| RIGHT_ASSIGN
- { SHR_ASSIGN }
+ { Cabs.SHR_ASSIGN }
| XOR_ASSIGN
- { XOR_ASSIGN }
+ { Cabs.XOR_ASSIGN }
| OR_ASSIGN
- { BOR_ASSIGN }
+ { Cabs.BOR_ASSIGN }
| AND_ASSIGN
- { BAND_ASSIGN }
+ { Cabs.BAND_ASSIGN }
(* 6.5.17 *)
expression:
| expr = assignment_expression
{ expr }
| expr1 = expression COMMA expr2 = assignment_expression
- { (BINARY COMMA (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.COMMA (fst expr1) (fst expr2), snd expr1) }
(* 6.6 *)
constant_expression:
@@ -335,19 +340,19 @@ constant_expression:
(* 6.7 *)
declaration:
| decspec = declaration_specifiers decls = init_declarator_list SEMICOLON
- { DECDEF (fst decspec, rev' decls) (snd decspec) }
+ { Cabs.DECDEF (fst decspec, rev' decls) (snd decspec) }
| decspec = declaration_specifiers SEMICOLON
- { DECDEF (fst decspec, []) (snd decspec) }
+ { Cabs.DECDEF (fst decspec, []) (snd decspec) }
declaration_specifiers_typespec_opt:
| storage = storage_class_specifier rest = declaration_specifiers_typespec_opt
- { SpecStorage (fst storage)::rest }
+ { Cabs.SpecStorage (fst storage)::rest }
| typ = type_specifier rest = declaration_specifiers_typespec_opt
- { SpecType (fst typ)::rest }
+ { Cabs.SpecType (fst typ)::rest }
| qual = type_qualifier rest = declaration_specifiers_typespec_opt
- { SpecCV (fst qual)::rest }
+ { Cabs.SpecCV (fst qual)::rest }
| func = function_specifier rest = declaration_specifiers_typespec_opt
- { SpecFunction (fst func)::rest }
+ { Cabs.SpecFunction (fst func)::rest }
| /* empty */
{ [] }
@@ -357,16 +362,16 @@ declaration_specifiers_typespec_opt:
specifier. *)
declaration_specifiers:
| storage = storage_class_specifier rest = declaration_specifiers
- { (SpecStorage (fst storage)::fst rest, snd storage) }
+ { (Cabs.SpecStorage (fst storage)::fst rest, snd storage) }
| typ = type_specifier rest = declaration_specifiers_typespec_opt
- { (SpecType (fst typ)::rest, snd typ) }
+ { (Cabs.SpecType (fst typ)::rest, snd typ) }
(* We have to inline type_qualifier in order to avoid a conflict. *)
| qual = type_qualifier_noattr rest = declaration_specifiers
- { (SpecCV (fst qual)::fst rest, snd qual) }
+ { (Cabs.SpecCV (fst qual)::fst rest, snd qual) }
| attr = attribute_specifier rest = declaration_specifiers
- { (SpecCV (CV_ATTR (fst attr))::fst rest, snd attr) }
+ { (Cabs.SpecCV (Cabs.CV_ATTR (fst attr))::fst rest, snd attr) }
| func = function_specifier rest = declaration_specifiers
- { (SpecFunction (fst func)::fst rest, snd func) }
+ { (Cabs.SpecFunction (fst func)::fst rest, snd func) }
init_declarator_list:
| init = init_declarator
@@ -376,71 +381,71 @@ init_declarator_list:
init_declarator:
| name = declarator
- { Init_name name NO_INIT }
+ { Cabs.Init_name name Cabs.NO_INIT }
| name = declarator EQ init = c_initializer
- { Init_name name init }
+ { Cabs.Init_name name init }
(* 6.7.1 *)
storage_class_specifier:
| loc = TYPEDEF
- { (TYPEDEF, loc) }
+ { (Cabs.TYPEDEF, loc) }
| loc = EXTERN
- { (EXTERN, loc) }
+ { (Cabs.EXTERN, loc) }
| loc = STATIC
- { (STATIC, loc) }
+ { (Cabs.STATIC, loc) }
| loc = AUTO
- { (AUTO, loc) }
+ { (Cabs.AUTO, loc) }
| loc = REGISTER
- { (REGISTER, loc) }
+ { (Cabs.REGISTER, loc) }
(* 6.7.2 *)
type_specifier:
| loc = VOID
- { (Tvoid, loc) }
+ { (Cabs.Tvoid, loc) }
| loc = CHAR
- { (Tchar, loc) }
+ { (Cabs.Tchar, loc) }
| loc = SHORT
- { (Tshort, loc) }
+ { (Cabs.Tshort, loc) }
| loc = INT
- { (Tint, loc) }
+ { (Cabs.Tint, loc) }
| loc = LONG
- { (Tlong, loc) }
+ { (Cabs.Tlong, loc) }
| loc = FLOAT
- { (Tfloat, loc) }
+ { (Cabs.Tfloat, loc) }
| loc = DOUBLE
- { (Tdouble, loc) }
+ { (Cabs.Tdouble, loc) }
| loc = SIGNED
- { (Tsigned, loc) }
+ { (Cabs.Tsigned, loc) }
| loc = UNSIGNED
- { (Tunsigned, loc) }
+ { (Cabs.Tunsigned, loc) }
| loc = UNDERSCORE_BOOL
- { (T_Bool, loc) }
+ { (Cabs.T_Bool, loc) }
| spec = struct_or_union_specifier
{ spec }
| spec = enum_specifier
{ spec }
| id = TYPEDEF_NAME
- { (Tnamed (fst id), snd id) }
+ { (Cabs.Tnamed (fst id), snd id) }
(* 6.7.2.1 *)
struct_or_union_specifier:
| str_uni = struct_or_union attrs = attribute_specifier_list id = OTHER_NAME
LBRACE decls = struct_declaration_list RBRACE
- { (Tstruct_union (fst str_uni) (Some (fst id)) (Some (rev' decls)) attrs,
+ { (Cabs.Tstruct_union (fst str_uni) (Some (fst id)) (Some (rev' decls)) attrs,
snd str_uni) }
| str_uni = struct_or_union attrs = attribute_specifier_list
LBRACE decls = struct_declaration_list RBRACE
- { (Tstruct_union (fst str_uni) None (Some (rev' decls)) attrs,
+ { (Cabs.Tstruct_union (fst str_uni) None (Some (rev' decls)) attrs,
snd str_uni) }
| str_uni = struct_or_union attrs = attribute_specifier_list id = OTHER_NAME
- { (Tstruct_union (fst str_uni) (Some (fst id)) None attrs,
+ { (Cabs.Tstruct_union (fst str_uni) (Some (fst id)) None attrs,
snd str_uni) }
struct_or_union:
| loc = STRUCT
- { (STRUCT, loc) }
+ { (Cabs.STRUCT, loc) }
| loc = UNION
- { (UNION, loc) }
+ { (Cabs.UNION, loc) }
struct_declaration_list:
| (* empty *)
@@ -450,20 +455,20 @@ struct_declaration_list:
struct_declaration:
| decspec = specifier_qualifier_list decls = struct_declarator_list SEMICOLON
- { Field_group (fst decspec) (rev' decls) (snd decspec) }
+ { Cabs.Field_group (fst decspec) (rev' decls) (snd decspec) }
(* Extension to C99 grammar needed to parse some GNU header files. *)
| decspec = specifier_qualifier_list SEMICOLON
- { Field_group (fst decspec) [(None,None)] (snd decspec) }
+ { Cabs.Field_group (fst decspec) [(None,None)] (snd decspec) }
specifier_qualifier_list:
| typ = type_specifier rest = specifier_qualifier_list
- { (SpecType (fst typ)::fst rest, snd typ) }
+ { (Cabs.SpecType (fst typ)::fst rest, snd typ) }
| typ = type_specifier
- { ([SpecType (fst typ)], snd typ) }
+ { ([Cabs.SpecType (fst typ)], snd typ) }
| qual = type_qualifier rest = specifier_qualifier_list
- { (SpecCV (fst qual)::fst rest, snd qual) }
+ { (Cabs.SpecCV (fst qual)::fst rest, snd qual) }
| qual = type_qualifier
- { ([SpecCV (fst qual)], snd qual) }
+ { ([Cabs.SpecCV (fst qual)], snd qual) }
struct_declarator_list:
| decl = struct_declarator
@@ -483,18 +488,18 @@ struct_declarator:
enum_specifier:
| loc = ENUM attrs = attribute_specifier_list name = OTHER_NAME
LBRACE enum_list = enumerator_list RBRACE
- { (Tenum (Some (fst name)) (Some (rev' enum_list)) attrs, loc) }
+ { (Cabs.Tenum (Some (fst name)) (Some (rev' enum_list)) attrs, loc) }
| loc = ENUM attrs = attribute_specifier_list
LBRACE enum_list = enumerator_list RBRACE
- { (Tenum None (Some (rev' enum_list)) attrs, loc) }
+ { (Cabs.Tenum None (Some (rev' enum_list)) attrs, loc) }
| loc = ENUM attrs = attribute_specifier_list name = OTHER_NAME
LBRACE enum_list = enumerator_list COMMA RBRACE
- { (Tenum (Some (fst name)) (Some (rev' enum_list)) attrs, loc) }
+ { (Cabs.Tenum (Some (fst name)) (Some (rev' enum_list)) attrs, loc) }
| loc = ENUM attrs = attribute_specifier_list
LBRACE enum_list = enumerator_list COMMA RBRACE
- { (Tenum None (Some (rev' enum_list)) attrs, loc) }
+ { (Cabs.Tenum None (Some (rev' enum_list)) attrs, loc) }
| loc = ENUM attrs = attribute_specifier_list name = OTHER_NAME
- { (Tenum (Some (fst name)) None attrs, loc) }
+ { (Cabs.Tenum (Some (fst name)) None attrs, loc) }
enumerator_list:
| enum = enumerator
@@ -515,18 +520,18 @@ enumeration_constant:
(* 6.7.3 *)
type_qualifier_noattr:
| loc = CONST
- { (CV_CONST, loc) }
+ { (Cabs.CV_CONST, loc) }
| loc = RESTRICT
- { (CV_RESTRICT, loc) }
+ { (Cabs.CV_RESTRICT, loc) }
| loc = VOLATILE
- { (CV_VOLATILE, loc) }
+ { (Cabs.CV_VOLATILE, loc) }
type_qualifier:
| qual = type_qualifier_noattr
{ qual }
(* Non-standard *)
| attr = attribute_specifier
- { (CV_ATTR (fst attr), snd attr) }
+ { (Cabs.CV_ATTR (fst attr), snd attr) }
(* Non-standard *)
@@ -538,13 +543,13 @@ attribute_specifier_list:
attribute_specifier:
| loc = ATTRIBUTE LPAREN LPAREN attr = gcc_attribute_list RPAREN RPAREN
- { (GCC_ATTR (rev' attr) loc, loc) }
+ { (Cabs.GCC_ATTR (rev' attr) loc, loc) }
| loc = PACKED LPAREN args = argument_expression_list RPAREN
- { (PACKED_ATTR (rev' args) loc, loc) }
+ { (Cabs.PACKED_ATTR (rev' args) loc, loc) }
| loc = ALIGNAS LPAREN args = argument_expression_list RPAREN
- { (ALIGNAS_ATTR (rev' args) loc, loc) }
+ { (Cabs.ALIGNAS_ATTR (rev' args) loc, loc) }
| loc = ALIGNAS LPAREN typ = type_name RPAREN
- { (ALIGNAS_ATTR [ALIGNOF typ] loc, loc) }
+ { (Cabs.ALIGNAS_ATTR [Cabs.ALIGNOF typ] loc, loc) }
gcc_attribute_list:
| a = gcc_attribute
@@ -554,80 +559,81 @@ gcc_attribute_list:
gcc_attribute:
| /* empty */
- { GCC_ATTR_EMPTY }
+ { Cabs.GCC_ATTR_EMPTY }
| w = gcc_attribute_word
- { GCC_ATTR_NOARGS w }
+ { Cabs.GCC_ATTR_NOARGS w }
| w = gcc_attribute_word LPAREN RPAREN
- { GCC_ATTR_ARGS w [] }
+ { Cabs.GCC_ATTR_ARGS w [] }
| w = gcc_attribute_word LPAREN args = argument_expression_list RPAREN
- { GCC_ATTR_ARGS w (rev' args) }
+ { Cabs.GCC_ATTR_ARGS w (rev' args) }
gcc_attribute_word:
| i = OTHER_NAME
- { GCC_ATTR_IDENT (fst i) }
+ { Cabs.GCC_ATTR_IDENT (fst i) }
| CONST
- { GCC_ATTR_CONST }
+ { Cabs.GCC_ATTR_CONST }
| PACKED
- { GCC_ATTR_PACKED }
+ { Cabs.GCC_ATTR_PACKED }
(* 6.7.4 *)
function_specifier:
| loc = INLINE
- { (INLINE, loc) }
+ { (Cabs.INLINE, loc) }
| loc = NORETURN
- { (NORETURN, loc)}
+ { (Cabs.NORETURN, loc)}
(* 6.7.5 *)
declarator:
| decl = declarator_noattrend attrs = attribute_specifier_list
- { match decl with Name name typ attr loc =>
- Name name typ (List.app attr attrs) loc end }
+ { let 'Cabs.Name name typ attr loc := decl in
+ Cabs.Name name typ (List.app attr attrs) loc }
declarator_noattrend:
| decl = direct_declarator
{ decl }
| pt = pointer decl = direct_declarator
- { match decl with Name name typ attr _ =>
- Name name ((fst pt) typ) attr (snd pt) end }
+ { let 'Cabs.Name name typ attr _ := decl in
+ Cabs.Name name ((fst pt) typ) attr (snd pt) }
direct_declarator:
| id = VAR_NAME
- { Name (fst id) JUSTBASE [] (snd id) }
+ { Cabs.Name (fst id) Cabs.JUSTBASE [] (snd id) }
| LPAREN decl = declarator RPAREN
{ decl }
-| decl = direct_declarator LBRACK quallst = type_qualifier_list expr = assignment_expression RBRACK
- { match decl with Name name typ attr loc =>
- Name name (ARRAY typ (rev' quallst) (Some (fst expr))) attr loc end }
+| decl = direct_declarator LBRACK quallst = type_qualifier_list
+ expr = assignment_expression RBRACK
+ { let 'Cabs.Name name typ attr loc := decl in
+ Cabs.Name name (Cabs.ARRAY typ (rev' quallst) (Some (fst expr))) attr loc }
| decl = direct_declarator LBRACK expr = assignment_expression RBRACK
- { match decl with Name name typ attr loc =>
- Name name (ARRAY typ [] (Some (fst expr))) attr loc end }
+ { let 'Cabs.Name name typ attr loc := decl in
+ Cabs.Name name (Cabs.ARRAY typ [] (Some (fst expr))) attr loc }
| decl = direct_declarator LBRACK quallst = type_qualifier_list RBRACK
- { match decl with Name name typ attr loc =>
- Name name (ARRAY typ (rev' quallst) None) attr loc end }
+ { let 'Cabs.Name name typ attr loc := decl in
+ Cabs.Name name (Cabs.ARRAY typ (rev' quallst) None) attr loc }
| decl = direct_declarator LBRACK RBRACK
- { match decl with Name name typ attr loc =>
- Name name (ARRAY typ [] None) attr loc end }
+ { let 'Cabs.Name name typ attr loc := decl in
+ Cabs.Name name (Cabs.ARRAY typ [] None) attr loc }
(*| direct_declarator LBRACK ... STATIC ... RBRACK
| direct_declarator LBRACK STAR RBRACK*)
| decl = direct_declarator LPAREN params = parameter_type_list RPAREN
- { match decl with Name name typ attr loc =>
- Name name (PROTO typ params) attr loc end }
+ { let 'Cabs.Name name typ attr loc := decl in
+ Cabs.Name name (Cabs.PROTO typ params) attr loc }
| decl = direct_declarator LPAREN RPAREN
- { match decl with Name name typ attr loc =>
- Name name (PROTO_OLD typ []) attr loc end }
+ { let 'Cabs.Name name typ attr loc := decl in
+ Cabs.Name name (Cabs.PROTO_OLD typ []) attr loc }
| decl = direct_declarator LPAREN params = identifier_list RPAREN
- { match decl with Name name typ attr loc =>
- Name name (PROTO_OLD typ (rev' params)) attr loc end }
+ { let 'Cabs.Name name typ attr loc := decl in
+ Cabs.Name name (Cabs.PROTO_OLD typ (rev' params)) attr loc }
pointer:
| loc = STAR
- { (fun typ => PTR [] typ, loc) }
+ { (fun typ => Cabs.PTR [] typ, loc) }
| loc = STAR quallst = type_qualifier_list
- { (fun typ => PTR (rev' quallst) typ, loc) }
+ { (fun typ => Cabs.PTR (rev' quallst) typ, loc) }
| loc = STAR pt = pointer
- { (fun typ => PTR [] ((fst pt) typ), loc) }
+ { (fun typ => Cabs.PTR [] ((fst pt) typ), loc) }
| loc = STAR quallst = type_qualifier_list pt = pointer
- { (fun typ => PTR (rev' quallst) ((fst pt) typ), loc) }
+ { (fun typ => Cabs.PTR (rev' quallst) ((fst pt) typ), loc) }
type_qualifier_list:
| qual = type_qualifier
@@ -649,12 +655,12 @@ parameter_list:
parameter_declaration:
| specs = declaration_specifiers decl = declarator
- { match decl with Name name typ attr _ =>
- PARAM (fst specs) (Some name) typ attr (snd specs) end }
+ { match decl with Cabs.Name name typ attr _ =>
+ Cabs.PARAM (fst specs) (Some name) typ attr (snd specs) end }
| specs = declaration_specifiers decl = abstract_declarator
- { PARAM (fst specs) None decl [] (snd specs) }
+ { Cabs.PARAM (fst specs) None decl [] (snd specs) }
| specs = declaration_specifiers
- { PARAM (fst specs) None JUSTBASE [] (snd specs) }
+ { Cabs.PARAM (fst specs) None Cabs.JUSTBASE [] (snd specs) }
identifier_list:
| id = VAR_NAME
@@ -665,13 +671,13 @@ identifier_list:
(* 6.7.6 *)
type_name:
| specqual = specifier_qualifier_list
- { (fst specqual, JUSTBASE) }
+ { (fst specqual, Cabs.JUSTBASE) }
| specqual = specifier_qualifier_list typ = abstract_declarator
{ (fst specqual, typ) }
abstract_declarator:
| pt = pointer
- { (fst pt) JUSTBASE }
+ { (fst pt) Cabs.JUSTBASE }
| pt = pointer typ = direct_abstract_declarator
{ (fst pt) typ }
| typ = direct_abstract_declarator
@@ -680,41 +686,42 @@ abstract_declarator:
direct_abstract_declarator:
| LPAREN typ = abstract_declarator RPAREN
{ typ }
-| typ = direct_abstract_declarator LBRACK cvspec = type_qualifier_list expr = assignment_expression RBRACK
- { ARRAY typ cvspec (Some (fst expr)) }
+| typ = direct_abstract_declarator LBRACK cvspec = type_qualifier_list
+ expr = assignment_expression RBRACK
+ { Cabs.ARRAY typ cvspec (Some (fst expr)) }
| LBRACK cvspec = type_qualifier_list expr = assignment_expression RBRACK
- { ARRAY JUSTBASE cvspec (Some (fst expr)) }
+ { Cabs.ARRAY Cabs.JUSTBASE cvspec (Some (fst expr)) }
| typ = direct_abstract_declarator LBRACK expr = assignment_expression RBRACK
- { ARRAY typ [] (Some (fst expr)) }
+ { Cabs.ARRAY typ [] (Some (fst expr)) }
| LBRACK expr = assignment_expression RBRACK
- { ARRAY JUSTBASE [] (Some (fst expr)) }
+ { Cabs.ARRAY Cabs.JUSTBASE [] (Some (fst expr)) }
| typ = direct_abstract_declarator LBRACK cvspec = type_qualifier_list RBRACK
- { ARRAY typ cvspec None }
+ { Cabs.ARRAY typ cvspec None }
| LBRACK cvspec = type_qualifier_list RBRACK
- { ARRAY JUSTBASE cvspec None }
+ { Cabs.ARRAY Cabs.JUSTBASE cvspec None }
| typ = direct_abstract_declarator LBRACK RBRACK
- { ARRAY typ [] None }
+ { Cabs.ARRAY typ [] None }
| LBRACK RBRACK
- { ARRAY JUSTBASE [] None }
+ { Cabs.ARRAY Cabs.JUSTBASE [] None }
(*| direct_abstract_declarator? LBRACK STAR RBRACK*)
(*| direct_abstract_declarator? LBRACK ... STATIC ... RBRACK*)
| typ = direct_abstract_declarator LPAREN params = parameter_type_list RPAREN
- { PROTO typ params }
+ { Cabs.PROTO typ params }
| LPAREN params = parameter_type_list RPAREN
- { PROTO JUSTBASE params }
+ { Cabs.PROTO Cabs.JUSTBASE params }
| typ = direct_abstract_declarator LPAREN RPAREN
- { PROTO typ ([], false) }
+ { Cabs.PROTO typ ([], false) }
| LPAREN RPAREN
- { PROTO JUSTBASE ([], false) }
+ { Cabs.PROTO Cabs.JUSTBASE ([], false) }
(* 6.7.8 *)
c_initializer:
| expr = assignment_expression
- { SINGLE_INIT (fst expr) }
+ { Cabs.SINGLE_INIT (fst expr) }
| LBRACE init = initializer_list RBRACE
- { COMPOUND_INIT (rev' init) }
+ { Cabs.COMPOUND_INIT (rev' init) }
| LBRACE init = initializer_list COMMA RBRACE
- { COMPOUND_INIT (rev' init) }
+ { Cabs.COMPOUND_INIT (rev' init) }
initializer_list:
| design = designation init = c_initializer
@@ -738,9 +745,9 @@ designator_list:
designator:
| LBRACK expr = constant_expression RBRACK
- { ATINDEX_INIT (fst expr) }
+ { Cabs.ATINDEX_INIT (fst expr) }
| DOT id = OTHER_NAME
- { INFIELD_INIT (fst id) }
+ { Cabs.INFIELD_INIT (fst id) }
(* 6.8 *)
statement_dangerous:
@@ -768,18 +775,18 @@ statement_safe:
(* 6.8.1 *)
labeled_statement(last_statement):
| lbl = OTHER_NAME COLON stmt = last_statement
- { LABEL (fst lbl) stmt (snd lbl) }
+ { Cabs.LABEL (fst lbl) stmt (snd lbl) }
| loc = CASE expr = constant_expression COLON stmt = last_statement
- { CASE (fst expr) stmt loc }
+ { Cabs.CASE (fst expr) stmt loc }
| loc = DEFAULT COLON stmt = last_statement
- { DEFAULT stmt loc }
+ { Cabs.DEFAULT stmt loc }
(* 6.8.2 *)
compound_statement:
| loc = LBRACE lst = block_item_list RBRACE
- { BLOCK (rev' lst) loc }
+ { Cabs.BLOCK (rev' lst) loc }
| loc = LBRACE RBRACE
- { BLOCK [] loc }
+ { Cabs.BLOCK [] loc }
block_item_list:
| stmt = block_item
@@ -789,93 +796,103 @@ block_item_list:
block_item:
| decl = declaration
- { DEFINITION decl }
+ { Cabs.DEFINITION decl }
| stmt = statement_dangerous
{ stmt }
(* Non-standard *)
| p = PRAGMA
- { DEFINITION (PRAGMA (fst p) (snd p)) }
+ { Cabs.DEFINITION (Cabs.PRAGMA (fst p) (snd p)) }
(* 6.8.3 *)
expression_statement:
| expr = expression SEMICOLON
- { COMPUTATION (fst expr) (snd expr) }
+ { Cabs.COMPUTATION (fst expr) (snd expr) }
| loc = SEMICOLON
- { NOP loc }
+ { Cabs.NOP loc }
(* 6.8.4 *)
selection_statement_dangerous:
-| loc = IF LPAREN expr = expression RPAREN stmt = statement_dangerous
- { If (fst expr) stmt None loc }
-| loc = IF LPAREN expr = expression RPAREN stmt1 = statement_safe ELSE stmt2 = statement_dangerous
- { If (fst expr) stmt1 (Some stmt2) loc }
+| loc = IF_ LPAREN expr = expression RPAREN stmt = statement_dangerous
+ { Cabs.If (fst expr) stmt None loc }
+| loc = IF_ LPAREN expr = expression RPAREN stmt1 = statement_safe ELSE
+ stmt2 = statement_dangerous
+ { Cabs.If (fst expr) stmt1 (Some stmt2) loc }
| loc = SWITCH LPAREN expr = expression RPAREN stmt = statement_dangerous
- { SWITCH (fst expr) stmt loc }
+ { Cabs.SWITCH (fst expr) stmt loc }
selection_statement_safe:
-| loc = IF LPAREN expr = expression RPAREN stmt1 = statement_safe ELSE stmt2 = statement_safe
- { If (fst expr) stmt1 (Some stmt2) loc }
+| loc = IF_ LPAREN expr = expression RPAREN stmt1 = statement_safe ELSE
+ stmt2 = statement_safe
+ { Cabs.If (fst expr) stmt1 (Some stmt2) loc }
| loc = SWITCH LPAREN expr = expression RPAREN stmt = statement_safe
- { SWITCH (fst expr) stmt loc }
+ { Cabs.SWITCH (fst expr) stmt loc }
(* 6.8.5 *)
iteration_statement(last_statement):
| loc = WHILE LPAREN expr = expression RPAREN stmt = last_statement
- { WHILE (fst expr) stmt loc }
+ { Cabs.WHILE (fst expr) stmt loc }
| loc = DO stmt = statement_dangerous WHILE LPAREN expr = expression RPAREN SEMICOLON
- { DOWHILE (fst expr) stmt loc }
-| loc = FOR LPAREN expr1 = expression SEMICOLON expr2 = expression SEMICOLON expr3 = expression RPAREN stmt = last_statement
- { FOR (Some (FC_EXP (fst expr1))) (Some (fst expr2)) (Some (fst expr3)) stmt loc }
-| loc = FOR LPAREN decl1 = declaration expr2 = expression SEMICOLON expr3 = expression RPAREN stmt = last_statement
- { FOR (Some (FC_DECL decl1)) (Some (fst expr2)) (Some (fst expr3)) stmt loc }
-| loc = FOR LPAREN SEMICOLON expr2 = expression SEMICOLON expr3 = expression RPAREN stmt = last_statement
- { FOR None (Some (fst expr2)) (Some (fst expr3)) stmt loc }
-| loc = FOR LPAREN expr1 = expression SEMICOLON SEMICOLON expr3 = expression RPAREN stmt = last_statement
- { FOR (Some (FC_EXP (fst expr1))) None (Some (fst expr3)) stmt loc }
-| loc = FOR LPAREN decl1 = declaration SEMICOLON expr3 = expression RPAREN stmt = last_statement
- { FOR (Some (FC_DECL decl1)) None (Some (fst expr3)) stmt loc }
+ { Cabs.DOWHILE (fst expr) stmt loc }
+| loc = FOR LPAREN expr1 = expression SEMICOLON expr2 = expression SEMICOLON
+ expr3 = expression RPAREN stmt = last_statement
+ { Cabs.FOR (Some (Cabs.FC_EXP (fst expr1))) (Some (fst expr2)) (Some (fst expr3)) stmt loc }
+| loc = FOR LPAREN decl1 = declaration expr2 = expression SEMICOLON
+ expr3 = expression RPAREN stmt = last_statement
+ { Cabs.FOR (Some (Cabs.FC_DECL decl1)) (Some (fst expr2)) (Some (fst expr3)) stmt loc }
+| loc = FOR LPAREN SEMICOLON expr2 = expression SEMICOLON expr3 = expression RPAREN
+ stmt = last_statement
+ { Cabs.FOR None (Some (fst expr2)) (Some (fst expr3)) stmt loc }
+| loc = FOR LPAREN expr1 = expression SEMICOLON SEMICOLON expr3 = expression RPAREN
+ stmt = last_statement
+ { Cabs.FOR (Some (Cabs.FC_EXP (fst expr1))) None (Some (fst expr3)) stmt loc }
+| loc = FOR LPAREN decl1 = declaration SEMICOLON expr3 = expression RPAREN
+ stmt = last_statement
+ { Cabs.FOR (Some (Cabs.FC_DECL decl1)) None (Some (fst expr3)) stmt loc }
| loc = FOR LPAREN SEMICOLON SEMICOLON expr3 = expression RPAREN stmt = last_statement
- { FOR None None (Some (fst expr3)) stmt loc }
-| loc = FOR LPAREN expr1 = expression SEMICOLON expr2 = expression SEMICOLON RPAREN stmt = last_statement
- { FOR (Some (FC_EXP (fst expr1))) (Some (fst expr2)) None stmt loc }
-| loc = FOR LPAREN decl1 = declaration expr2 = expression SEMICOLON RPAREN stmt = last_statement
- { FOR (Some (FC_DECL decl1)) (Some (fst expr2)) None stmt loc }
+ { Cabs.FOR None None (Some (fst expr3)) stmt loc }
+| loc = FOR LPAREN expr1 = expression SEMICOLON expr2 = expression SEMICOLON RPAREN
+ stmt = last_statement
+ { Cabs.FOR (Some (Cabs.FC_EXP (fst expr1))) (Some (fst expr2)) None stmt loc }
+| loc = FOR LPAREN decl1 = declaration expr2 = expression SEMICOLON RPAREN
+ stmt = last_statement
+ { Cabs.FOR (Some (Cabs.FC_DECL decl1)) (Some (fst expr2)) None stmt loc }
| loc = FOR LPAREN SEMICOLON expr2 = expression SEMICOLON RPAREN stmt = last_statement
- { FOR None (Some (fst expr2)) None stmt loc }
+ { Cabs.FOR None (Some (fst expr2)) None stmt loc }
| loc = FOR LPAREN expr1 = expression SEMICOLON SEMICOLON RPAREN stmt = last_statement
- { FOR (Some (FC_EXP (fst expr1))) None None stmt loc }
+ { Cabs.FOR (Some (Cabs.FC_EXP (fst expr1))) None None stmt loc }
| loc = FOR LPAREN decl1 = declaration SEMICOLON RPAREN stmt = last_statement
- { FOR (Some (FC_DECL decl1)) None None stmt loc }
+ { Cabs.FOR (Some (Cabs.FC_DECL decl1)) None None stmt loc }
| loc = FOR LPAREN SEMICOLON SEMICOLON RPAREN stmt = last_statement
- { FOR None None None stmt loc }
+ { Cabs.FOR None None None stmt loc }
(* 6.8.6 *)
jump_statement:
| loc = GOTO id = OTHER_NAME SEMICOLON
- { GOTO (fst id) loc }
+ { Cabs.GOTO (fst id) loc }
| loc = CONTINUE SEMICOLON
- { CONTINUE loc }
+ { Cabs.CONTINUE loc }
| loc = BREAK SEMICOLON
- { BREAK loc }
+ { Cabs.BREAK loc }
| loc = RETURN expr = expression SEMICOLON
- { RETURN (Some (fst expr)) loc }
+ { Cabs.RETURN (Some (fst expr)) loc }
| loc = RETURN SEMICOLON
- { RETURN None loc }
+ { Cabs.RETURN None loc }
(* Non-standard *)
asm_statement:
-| loc = ASM attr = asm_attributes LPAREN template = STRING_LITERAL args = asm_arguments RPAREN SEMICOLON
+| loc = ASM attr = asm_attributes LPAREN template = STRING_LITERAL args = asm_arguments
+ RPAREN SEMICOLON
{ let '(wide, chars, _) := template in
let '(outputs, inputs, flags) := args in
- ASM attr wide chars outputs inputs flags loc }
+ Cabs.ASM attr wide chars outputs inputs flags loc }
asm_attributes:
| /* empty */
{ [] }
| CONST attr = asm_attributes
- { CV_CONST :: attr }
+ { Cabs.CV_CONST :: attr }
| VOLATILE attr = asm_attributes
- { CV_VOLATILE :: attr }
+ { Cabs.CV_VOLATILE :: attr }
asm_arguments:
| /* empty */
@@ -897,7 +914,7 @@ asm_operands_ne:
asm_operand:
| n = asm_op_name cstr = STRING_LITERAL LPAREN e = expression RPAREN
- { let '(wide, s, loc) := cstr in ASMOPERAND n wide s (fst e) }
+ { let '(wide, s, loc) := cstr in Cabs.ASMOPERAND n wide s (fst e) }
asm_op_name:
| /* empty */ { None }
@@ -934,7 +951,7 @@ external_declaration:
{ def }
(* Non-standard *)
| p = PRAGMA
- { PRAGMA (fst p) (snd p) }
+ { Cabs.PRAGMA (fst p) (snd p) }
(* 6.9.1 *)
@@ -943,11 +960,11 @@ function_definition:
decl = declarator_noattrend
dlist = declaration_list
stmt = compound_statement
- { FUNDEF (fst specs) decl (List.rev' dlist) stmt (snd specs) }
+ { Cabs.FUNDEF (fst specs) decl (List.rev' dlist) stmt (snd specs) }
| specs = declaration_specifiers
decl = declarator
stmt = compound_statement
- { FUNDEF (fst specs) decl [] stmt (snd specs) }
+ { Cabs.FUNDEF (fst specs) decl [] stmt (snd specs) }
declaration_list:
| d = declaration
diff --git a/cparser/Rename.ml b/cparser/Rename.ml
index eb31eaf0..64412194 100644
--- a/cparser/Rename.ml
+++ b/cparser/Rename.ml
@@ -246,7 +246,7 @@ let rec globdecls env accu = function
(* Reserve names of builtins *)
let reserve_builtins () =
- List.fold_left enter_public empty_env (Builtins.identifiers())
+ List.fold_left enter_public empty_env (Env.initial_identifiers())
(* Reserve global declarations with public visibility *)
diff --git a/cparser/Transform.ml b/cparser/Transform.ml
index 0a2ce3bb..349a3155 100644
--- a/cparser/Transform.ml
+++ b/cparser/Transform.ml
@@ -227,4 +227,4 @@ let program
in
transf_globdecls env' ({g with gdesc = desc'} :: accu) gl
- in transf_globdecls (Builtins.environment()) [] p
+ in transf_globdecls (Env.initial()) [] p
diff --git a/cparser/Unblock.ml b/cparser/Unblock.ml
index 66b497cc..d25f70c6 100644
--- a/cparser/Unblock.ml
+++ b/cparser/Unblock.ml
@@ -399,4 +399,4 @@ let rec unblock_glob env accu = function
let program p =
next_scope_id := 0;
{gloc = no_loc; gdesc = Gdecl(Storage_extern, debug_id, debug_ty, None)} ::
- unblock_glob (Builtins.environment()) [] p
+ unblock_glob (Env.initial()) [] p
diff --git a/cparser/pre_parser.mly b/cparser/pre_parser.mly
index 71eaf419..669ecf5e 100644
--- a/cparser/pre_parser.mly
+++ b/cparser/pre_parser.mly
@@ -43,13 +43,13 @@
%}
%token<string> PRE_NAME
-%token<string * Pre_parser_aux.identifier_type ref * Cabs.cabsloc>
+%token<string * Pre_parser_aux.identifier_type ref * Cabs.loc>
VAR_NAME TYPEDEF_NAME
-%token<Cabs.constant * Cabs.cabsloc> CONSTANT
-%token<bool * int64 list * Cabs.cabsloc> STRING_LITERAL
-%token<string * Cabs.cabsloc> PRAGMA
+%token<Cabs.constant * Cabs.loc> CONSTANT
+%token<bool * int64 list * Cabs.loc> STRING_LITERAL
+%token<string * Cabs.loc> PRAGMA
-%token<Cabs.cabsloc> SIZEOF PTR INC DEC LEFT RIGHT LEQ GEQ EQEQ EQ NEQ LT GT
+%token<Cabs.loc> SIZEOF PTR INC DEC LEFT RIGHT LEQ GEQ EQEQ EQ NEQ LT GT
ANDAND BARBAR PLUS MINUS STAR TILDE BANG SLASH PERCENT HAT BAR QUESTION
COLON AND MUL_ASSIGN DIV_ASSIGN MOD_ASSIGN ADD_ASSIGN SUB_ASSIGN LEFT_ASSIGN
RIGHT_ASSIGN AND_ASSIGN XOR_ASSIGN OR_ASSIGN LPAREN RPAREN LBRACK RBRACK