aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--LICENSE27
-rw-r--r--Makefile22
-rw-r--r--Makefile.extr4
-rw-r--r--MenhirLib/Alphabet.v247
-rw-r--r--MenhirLib/Automaton.v (renamed from cparser/MenhirLib/Automaton.v)31
-rw-r--r--MenhirLib/Grammar.v162
-rw-r--r--MenhirLib/Interpreter.v453
-rw-r--r--MenhirLib/Interpreter_complete.v825
-rw-r--r--MenhirLib/Interpreter_correct.v175
-rw-r--r--MenhirLib/Main.v79
-rw-r--r--MenhirLib/Validator_classes.v74
-rw-r--r--MenhirLib/Validator_complete.v394
-rw-r--r--MenhirLib/Validator_safe.v234
-rwxr-xr-xconfigure16
-rw-r--r--cparser/Cabs.v54
-rw-r--r--cparser/Cabshelper.ml13
-rw-r--r--cparser/Elab.ml4
-rw-r--r--cparser/Elab.mli4
-rw-r--r--cparser/Lexer.mll247
-rw-r--r--cparser/MenhirLib/Alphabet.v320
-rw-r--r--cparser/MenhirLib/Grammar.v166
-rw-r--r--cparser/MenhirLib/Interpreter.v228
-rw-r--r--cparser/MenhirLib/Interpreter_complete.v686
-rw-r--r--cparser/MenhirLib/Interpreter_correct.v228
-rw-r--r--cparser/MenhirLib/Interpreter_safe.v275
-rw-r--r--cparser/MenhirLib/Main.v92
-rw-r--r--cparser/MenhirLib/Tuples.v49
-rw-r--r--cparser/MenhirLib/Validator_complete.v542
-rw-r--r--cparser/MenhirLib/Validator_safe.v414
-rw-r--r--cparser/Parse.ml13
-rw-r--r--cparser/Parser.vy595
-rw-r--r--cparser/pre_parser.mly10
-rw-r--r--extraction/extraction.v12
-rw-r--r--lib/Camlcoq.ml51
34 files changed, 3163 insertions, 3583 deletions
diff --git a/LICENSE b/LICENSE
index 5c7d7294..5a7ae79f 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,6 +1,6 @@
All files in this distribution are part of the CompCert verified compiler.
-The CompCert verified compiler is Copyright by Institut National de
+The CompCert verified compiler is Copyright by Institut National de
Recherche en Informatique et en Automatique (INRIA) and
AbsInt Angewandte Informatik GmbH.
@@ -9,12 +9,12 @@ INRIA Non-Commercial License Agreement given below or under the terms
of a Software Usage Agreement of AbsInt Angewandte Informatik GmbH.
The latter is a separate contract document.
-The INRIA Non-Commercial License Agreement is a non-free license that
-grants you the right to use the CompCert verified compiler for
-educational, research or evaluation purposes only, but prohibits
+The INRIA Non-Commercial License Agreement is a non-free license that
+grants you the right to use the CompCert verified compiler for
+educational, research or evaluation purposes only, but prohibits
any commercial use.
-For commercial use you need a Software Usage Agreement from
+For commercial use you need a Software Usage Agreement from
AbsInt Angewandte Informatik GmbH.
The following files in this distribution are dual-licensed both under
@@ -38,7 +38,7 @@ option) any later version:
cfrontend/Ctyping.v
cfrontend/PrintClight.ml
cfrontend/PrintCsyntax.ml
-
+
backend/Cminor.v
backend/PrintCminor.ml
@@ -46,7 +46,7 @@ option) any later version:
all files in the exportclight/ directory
- the Archi.v, CBuiltins.ml, and extractionMachdep.v files
+ the Archi.v, CBuiltins.ml, and extractionMachdep.v files
in directories arm, powerpc, riscV, x86, x86_32, x86_64
extraction/extraction.v
@@ -64,11 +64,14 @@ non-commercial contexts, subject to the terms of the GNU General
Public License.
The files contained in the flocq/ directory and its subdirectories are
-taken from the Flocq project, http://flocq.gforge.inria.fr/
-These files are Copyright 2010-2017 INRIA and distributed under the
-terms of the GNU Lesser General Public Licence, either version 3 of
-the licence, or (at your option) any later version. A copy of the GNU
-Lesser General Public Licence version 3 is included below.
+taken from the Flocq project, http://flocq.gforge.inria.fr/. The files
+contained in the MenhirLib directory are taken from the Menhir
+project, http://gallium.inria.fr/~fpottier/menhir/. The files from the
+Flocq project and the files in the MenhirLib directory are Copyright
+2010-2019 INRIA and distributed under the terms of the GNU Lesser
+General Public Licence, either version 3 of the licence, or (at your
+option) any later version. A copy of the GNU Lesser General Public
+Licence version 3 is included below.
The files contained in the runtime/ directory and its subdirectories
are Copyright 2013-2017 INRIA and distributed under the terms of the BSD
diff --git a/Makefile b/Makefile
index 369fd4cd..1e578006 100644
--- a/Makefile
+++ b/Makefile
@@ -23,9 +23,10 @@ endif
DIRS=lib common $(ARCHDIRS) backend cfrontend driver \
flocq/Core flocq/Prop flocq/Calc flocq/IEEE754 \
- exportclight cparser cparser/MenhirLib
+ exportclight MenhirLib cparser
-RECDIRS=lib common $(ARCHDIRS) backend cfrontend driver flocq exportclight cparser
+RECDIRS=lib common $(ARCHDIRS) backend cfrontend driver flocq exportclight \
+ MenhirLib cparser
COQINCLUDES=$(foreach d, $(RECDIRS), -R $(d) compcert.$(d))
@@ -103,16 +104,16 @@ CFRONTEND=Ctypes.v Cop.v Csyntax.v Csem.v Ctyping.v Cstrategy.v Cexec.v \
Cshmgen.v Cshmgenproof.v \
Csharpminor.v Cminorgen.v Cminorgenproof.v
-# LR(1) parser validator
-
-PARSERVALIDATOR=Alphabet.v Interpreter_complete.v Interpreter.v \
- Validator_complete.v Automaton.v Interpreter_correct.v Main.v \
- Validator_safe.v Grammar.v Interpreter_safe.v Tuples.v
-
# Parser
PARSER=Cabs.v Parser.v
+# MenhirLib
+
+MENHIRLIB=Alphabet.v Automaton.v Grammar.v Interpreter_complete.v \
+ Interpreter_correct.v Interpreter.v Main.v Validator_complete.v \
+ Validator_safe.v Validator_classes.v
+
# Putting everything together (in driver/)
DRIVER=Compopts.v Compiler.v Complements.v
@@ -120,7 +121,7 @@ DRIVER=Compopts.v Compiler.v Complements.v
# All source files
FILES=$(VLIB) $(COMMON) $(BACKEND) $(CFRONTEND) $(DRIVER) $(FLOCQ) \
- $(PARSERVALIDATOR) $(PARSER)
+ $(MENHIRLIB) $(PARSER)
# Generated source files
@@ -141,7 +142,6 @@ ifeq ($(CLIGHTGEN),true)
$(MAKE) clightgen
endif
-
proof: $(FILES:.v=.vo)
# Turn off some warnings for compiling Flocq
@@ -225,7 +225,7 @@ driver/Version.ml: VERSION
cparser/Parser.v: cparser/Parser.vy
@rm -f $@
- $(MENHIR) $(MENHIR_FLAGS) --coq cparser/Parser.vy
+ $(MENHIR) --coq --coq-lib-path compcert.MenhirLib --coq-no-version-check cparser/Parser.vy
@chmod a-w $@
depend: $(GENERATED) depend1
diff --git a/Makefile.extr b/Makefile.extr
index a1c2ef7c..d375fd29 100644
--- a/Makefile.extr
+++ b/Makefile.extr
@@ -50,8 +50,8 @@ INCLUDES=$(patsubst %,-I %, $(DIRS))
# Control of warnings:
WARNINGS=-w +a-4-9-27 -strict-sequence -safe-string -warn-error +a #Deprication returns with ocaml 4.03
-extraction/%.cmx: WARNINGS +=-w -20-27-32..34-39-41-44..45
-extraction/%.cmo: WARNINGS +=-w -20-27-32..34-39-41-44..45
+extraction/%.cmx: WARNINGS +=-w -20-27-32..34-39-41-44..45-60
+extraction/%.cmo: WARNINGS +=-w -20-27-32..34-39-41-44..45-60
cparser/pre_parser.cmx: WARNINGS += -w -41
cparser/pre_parser.cmo: WARNINGS += -w -41
diff --git a/MenhirLib/Alphabet.v b/MenhirLib/Alphabet.v
new file mode 100644
index 00000000..29070e3d
--- /dev/null
+++ b/MenhirLib/Alphabet.v
@@ -0,0 +1,247 @@
+(****************************************************************************)
+(* *)
+(* Menhir *)
+(* *)
+(* Jacques-Henri Jourdan, CNRS, LRI, Université Paris Sud *)
+(* *)
+(* Copyright Inria. All rights reserved. This file is distributed under *)
+(* the terms of the GNU Lesser General Public License as published by the *)
+(* Free Software Foundation, either version 3 of the License, or (at your *)
+(* option) any later version, as described in the file LICENSE. *)
+(* *)
+(****************************************************************************)
+
+From Coq Require Import Omega List Syntax Relations RelationClasses.
+
+Local Obligation Tactic := intros.
+
+(** A comparable type is equiped with a [compare] function, that define an order
+ relation. **)
+Class Comparable (A:Type) := {
+ compare : A -> A -> comparison;
+ compare_antisym : forall x y, CompOpp (compare x y) = compare y x;
+ compare_trans : forall x y z c,
+ (compare x y) = c -> (compare y z) = c -> (compare x z) = c
+}.
+
+Theorem compare_refl {A:Type} (C: Comparable A) :
+ forall x, compare x x = Eq.
+Proof.
+intros.
+pose proof (compare_antisym x x).
+destruct (compare x x); intuition; try discriminate.
+Qed.
+
+(** The corresponding order is a strict order. **)
+Definition comparableLt {A:Type} (C: Comparable A) : relation A :=
+ fun x y => compare x y = Lt.
+
+Instance ComparableLtStrictOrder {A:Type} (C: Comparable A) :
+ StrictOrder (comparableLt C).
+Proof.
+apply Build_StrictOrder.
+unfold Irreflexive, Reflexive, complement, comparableLt.
+intros.
+pose proof H.
+rewrite <- compare_antisym in H.
+rewrite H0 in H.
+discriminate H.
+unfold Transitive, comparableLt.
+intros x y z.
+apply compare_trans.
+Qed.
+
+(** nat is comparable. **)
+Program Instance natComparable : Comparable nat :=
+ { compare := Nat.compare }.
+Next Obligation.
+symmetry.
+destruct (Nat.compare x y) as [] eqn:?.
+rewrite Nat.compare_eq_iff in Heqc.
+destruct Heqc.
+rewrite Nat.compare_eq_iff.
+trivial.
+rewrite <- nat_compare_lt in *.
+rewrite <- nat_compare_gt in *.
+trivial.
+rewrite <- nat_compare_lt in *.
+rewrite <- nat_compare_gt in *.
+trivial.
+Qed.
+Next Obligation.
+destruct c.
+rewrite Nat.compare_eq_iff in *; destruct H; assumption.
+rewrite <- nat_compare_lt in *.
+apply (Nat.lt_trans _ _ _ H H0).
+rewrite <- nat_compare_gt in *.
+apply (gt_trans _ _ _ H H0).
+Qed.
+
+(** A pair of comparable is comparable. **)
+Program Instance PairComparable {A:Type} (CA:Comparable A) {B:Type} (CB:Comparable B) :
+ Comparable (A*B) :=
+ { compare := fun x y =>
+ let (xa, xb) := x in let (ya, yb) := y in
+ match compare xa ya return comparison with
+ | Eq => compare xb yb
+ | x => x
+ end }.
+Next Obligation.
+destruct x, y.
+rewrite <- (compare_antisym a a0).
+rewrite <- (compare_antisym b b0).
+destruct (compare a a0); intuition.
+Qed.
+Next Obligation.
+destruct x, y, z.
+destruct (compare a a0) as [] eqn:?, (compare a0 a1) as [] eqn:?;
+try (rewrite <- H0 in H; discriminate);
+try (destruct (compare a a1) as [] eqn:?;
+ try (rewrite <- compare_antisym in Heqc0;
+ rewrite CompOpp_iff in Heqc0;
+ rewrite (compare_trans _ _ _ _ Heqc0 Heqc2) in Heqc1;
+ discriminate);
+ try (rewrite <- compare_antisym in Heqc1;
+ rewrite CompOpp_iff in Heqc1;
+ rewrite (compare_trans _ _ _ _ Heqc2 Heqc1) in Heqc0;
+ discriminate);
+ assumption);
+rewrite (compare_trans _ _ _ _ Heqc0 Heqc1);
+try assumption.
+apply (compare_trans _ _ _ _ H H0).
+Qed.
+
+(** Special case of comparable, where equality is Leibniz equality. **)
+Class ComparableLeibnizEq {A:Type} (C: Comparable A) :=
+ compare_eq : forall x y, compare x y = Eq -> x = y.
+
+(** Boolean equality for a [Comparable]. **)
+Definition compare_eqb {A:Type} {C:Comparable A} (x y:A) :=
+ match compare x y with
+ | Eq => true
+ | _ => false
+ end.
+
+Theorem compare_eqb_iff {A:Type} {C:Comparable A} {U:ComparableLeibnizEq C} :
+ forall x y, compare_eqb x y = true <-> x = y.
+Proof.
+unfold compare_eqb.
+intuition.
+apply compare_eq.
+destruct (compare x y); intuition; discriminate.
+destruct H.
+rewrite compare_refl; intuition.
+Qed.
+
+Instance NComparableLeibnizEq : ComparableLeibnizEq natComparable := Nat.compare_eq.
+
+(** A pair of ComparableLeibnizEq is ComparableLeibnizEq **)
+Instance PairComparableLeibnizEq
+ {A:Type} {CA:Comparable A} (UA:ComparableLeibnizEq CA)
+ {B:Type} {CB:Comparable B} (UB:ComparableLeibnizEq CB) :
+ ComparableLeibnizEq (PairComparable CA CB).
+Proof.
+intros x y; destruct x, y; simpl.
+pose proof (compare_eq a a0); pose proof (compare_eq b b0).
+destruct (compare a a0); try discriminate.
+intuition.
+destruct H2, H0.
+reflexivity.
+Qed.
+
+(** An [Finite] type is a type with the list of all elements. **)
+Class Finite (A:Type) := {
+ all_list : list A;
+ all_list_forall : forall x:A, In x all_list
+}.
+
+(** An alphabet is both [ComparableLeibnizEq] and [Finite]. **)
+Class Alphabet (A:Type) := {
+ AlphabetComparable :> Comparable A;
+ AlphabetComparableLeibnizEq :> ComparableLeibnizEq AlphabetComparable;
+ AlphabetFinite :> Finite A
+}.
+
+(** The [Numbered] class provides a conveniant way to build [Alphabet] instances,
+ with a good computationnal complexity. It is mainly a injection from it to
+ [positive] **)
+Class Numbered (A:Type) := {
+ inj : A -> positive;
+ surj : positive -> A;
+ surj_inj_compat : forall x, surj (inj x) = x;
+ inj_bound : positive;
+ inj_bound_spec : forall x, (inj x < Pos.succ inj_bound)%positive
+}.
+
+Program Instance NumberedAlphabet {A:Type} (N:Numbered A) : Alphabet A :=
+ { AlphabetComparable := {| compare := fun x y => Pos.compare (inj x) (inj y) |};
+ AlphabetFinite :=
+ {| all_list := fst (Pos.iter
+ (fun '(l, p) => (surj p::l, Pos.succ p))
+ ([], 1%positive) inj_bound) |} }.
+Next Obligation. simpl. now rewrite <- Pos.compare_antisym. Qed.
+Next Obligation.
+ match goal with c : comparison |- _ => destruct c end.
+ - rewrite Pos.compare_eq_iff in *. congruence.
+ - rewrite Pos.compare_lt_iff in *. eauto using Pos.lt_trans.
+ - rewrite Pos.compare_gt_iff in *. eauto using Pos.lt_trans.
+Qed.
+Next Obligation.
+ intros x y. unfold compare. intros Hxy.
+ assert (Hxy' : inj x = inj y).
+ (* We do not use [Pos.compare_eq_iff] directly to make sure the
+ proof is executable. *)
+ { destruct (Pos.eq_dec (inj x) (inj y)) as [|[]]; [now auto|].
+ now apply Pos.compare_eq_iff. }
+ (* Using rewrite here leads to non-executable proofs. *)
+ transitivity (surj (inj x)).
+ { apply eq_sym, surj_inj_compat. }
+ transitivity (surj (inj y)); cycle 1.
+ { apply surj_inj_compat. }
+ apply f_equal, Hxy'.
+Defined.
+Next Obligation.
+ rewrite <-(surj_inj_compat x).
+ generalize (inj_bound_spec x). generalize (inj x). clear x. intros x.
+ match goal with |- ?Hx -> In ?s (fst ?p) =>
+ assert ((Hx -> In s (fst p)) /\ snd p = Pos.succ inj_bound); [|now intuition] end.
+ rewrite Pos.lt_succ_r.
+ induction inj_bound as [|y [IH1 IH2]] using Pos.peano_ind;
+ (split; [intros Hx|]); simpl.
+ - rewrite (Pos.le_antisym _ _ Hx); auto using Pos.le_1_l.
+ - auto.
+ - rewrite Pos.iter_succ. destruct Pos.iter; simpl in *. subst.
+ rewrite Pos.le_lteq in Hx. destruct Hx as [?%Pos.lt_succ_r| ->]; now auto.
+ - rewrite Pos.iter_succ. destruct Pos.iter. simpl in IH2. subst. reflexivity.
+Qed.
+
+(** Definitions of [FSet]/[FMap] from [Comparable] **)
+Require Import OrderedTypeAlt.
+Require FSetAVL.
+Require FMapAVL.
+Import OrderedType.
+
+Module Type ComparableM.
+ Parameter t : Type.
+ Declare Instance tComparable : Comparable t.
+End ComparableM.
+
+Module OrderedTypeAlt_from_ComparableM (C:ComparableM) <: OrderedTypeAlt.
+ Definition t := C.t.
+ Definition compare : t -> t -> comparison := compare.
+
+ Infix "?=" := compare (at level 70, no associativity).
+
+ Lemma compare_sym x y : (y?=x) = CompOpp (x?=y).
+ Proof. exact (Logic.eq_sym (compare_antisym x y)). Qed.
+ Lemma compare_trans c x y z :
+ (x?=y) = c -> (y?=z) = c -> (x?=z) = c.
+ Proof.
+ apply compare_trans.
+ Qed.
+End OrderedTypeAlt_from_ComparableM.
+
+Module OrderedType_from_ComparableM (C:ComparableM) <: OrderedType.
+ Module Alt := OrderedTypeAlt_from_ComparableM C.
+ Include (OrderedType_from_Alt Alt).
+End OrderedType_from_ComparableM.
diff --git a/cparser/MenhirLib/Automaton.v b/MenhirLib/Automaton.v
index fc995298..c310fc61 100644
--- a/cparser/MenhirLib/Automaton.v
+++ b/MenhirLib/Automaton.v
@@ -1,23 +1,20 @@
-(* *********************************************************************)
-(* *)
-(* The Compcert verified compiler *)
-(* *)
-(* Jacques-Henri Jourdan, INRIA Paris-Rocquencourt *)
-(* *)
-(* Copyright Institut National de Recherche en Informatique et en *)
-(* Automatique. All rights reserved. This file is distributed *)
-(* under the terms of the GNU General Public License as published by *)
-(* the Free Software Foundation, either version 2 of the License, or *)
-(* (at your option) any later version. This file is also distributed *)
-(* under the terms of the INRIA Non-Commercial License Agreement. *)
-(* *)
-(* *********************************************************************)
+(****************************************************************************)
+(* *)
+(* Menhir *)
+(* *)
+(* Jacques-Henri Jourdan, CNRS, LRI, Université Paris Sud *)
+(* *)
+(* Copyright Inria. All rights reserved. This file is distributed under *)
+(* the terms of the GNU Lesser General Public License as published by the *)
+(* Free Software Foundation, either version 3 of the License, or (at your *)
+(* option) any later version, as described in the file LICENSE. *)
+(* *)
+(****************************************************************************)
Require Grammar.
-Require Import Orders.
Require Export Alphabet.
-Require Export List.
-Require Export Syntax.
+From Coq Require Import Orders.
+From Coq Require Export List Syntax.
Module Type AutInit.
(** The grammar of the automaton. **)
diff --git a/MenhirLib/Grammar.v b/MenhirLib/Grammar.v
new file mode 100644
index 00000000..a371318b
--- /dev/null
+++ b/MenhirLib/Grammar.v
@@ -0,0 +1,162 @@
+(****************************************************************************)
+(* *)
+(* Menhir *)
+(* *)
+(* Jacques-Henri Jourdan, CNRS, LRI, Université Paris Sud *)
+(* *)
+(* Copyright Inria. All rights reserved. This file is distributed under *)
+(* the terms of the GNU Lesser General Public License as published by the *)
+(* Free Software Foundation, either version 3 of the License, or (at your *)
+(* option) any later version, as described in the file LICENSE. *)
+(* *)
+(****************************************************************************)
+
+From Coq Require Import List Syntax Orders.
+Require Import Alphabet.
+
+(** The terminal non-terminal alphabets of the grammar. **)
+Module Type Alphs.
+ Parameters terminal nonterminal : Type.
+ Declare Instance TerminalAlph: Alphabet terminal.
+ Declare Instance NonTerminalAlph: Alphabet nonterminal.
+End Alphs.
+
+(** Definition of the alphabet of symbols, given the alphabet of terminals
+ and the alphabet of non terminals **)
+Module Symbol(Import A:Alphs).
+
+ Inductive symbol :=
+ | T: terminal -> symbol
+ | NT: nonterminal -> symbol.
+
+ Program Instance SymbolAlph : Alphabet symbol :=
+ { AlphabetComparable := {| compare := fun x y =>
+ match x, y return comparison with
+ | T _, NT _ => Gt
+ | NT _, T _ => Lt
+ | T x, T y => compare x y
+ | NT x, NT y => compare x y
+ end |};
+ AlphabetFinite := {| all_list :=
+ map T all_list++map NT all_list |} }.
+ Next Obligation.
+ destruct x; destruct y; intuition; apply compare_antisym.
+ Qed.
+ Next Obligation.
+ destruct x; destruct y; destruct z; intuition; try discriminate.
+ apply (compare_trans _ t0); intuition.
+ apply (compare_trans _ n0); intuition.
+ Qed.
+ Next Obligation.
+ intros x y.
+ destruct x; destruct y; try discriminate; intros.
+ rewrite (compare_eq t t0); now intuition.
+ rewrite (compare_eq n n0); now intuition.
+ Defined.
+ Next Obligation.
+ rewrite in_app_iff.
+ destruct x; [left | right]; apply in_map; apply all_list_forall.
+ Qed.
+
+End Symbol.
+
+(** A curryfied function with multiple parameters **)
+Definition arrows_right: Type -> list Type -> Type :=
+ fold_right (fun A B => A -> B).
+
+Module Type T.
+ Include Alphs <+ Symbol.
+
+ (** [symbol_semantic_type] maps a symbols to the type of its semantic
+ values. **)
+ Parameter symbol_semantic_type: symbol -> Type.
+
+ (** The type of productions identifiers **)
+ Parameter production : Type.
+ Declare Instance ProductionAlph : Alphabet production.
+
+ (** Accessors for productions: left hand side, right hand side,
+ and semantic action. The semantic actions are given in the form
+ of curryfied functions, that take arguments in the reverse order. **)
+ Parameter prod_lhs: production -> nonterminal.
+ (* The RHS of a production is given in reversed order, so that symbols *)
+ Parameter prod_rhs_rev: production -> list symbol.
+ Parameter prod_action:
+ forall p:production,
+ arrows_right
+ (symbol_semantic_type (NT (prod_lhs p)))
+ (map symbol_semantic_type (prod_rhs_rev p)).
+
+ (** Tokens are the atomic elements of the input stream: they contain
+ a terminal and a semantic value of the type corresponding to this
+ terminal. *)
+ Parameter token : Type.
+ Parameter token_term : token -> terminal.
+ Parameter token_sem :
+ forall tok : token, symbol_semantic_type (T (token_term tok)).
+End T.
+
+Module Defs(Import G:T).
+
+ (** The semantics of a grammar is defined in two stages. First, we
+ define the notion of parse tree, which represents one way of
+ recognizing a word with a head symbol. Semantic values are stored
+ at the leaves.
+
+ This notion is defined in two mutually recursive flavours:
+ either for a single head symbol, or for a list of head symbols. *)
+ Inductive parse_tree:
+ forall (head_symbol:symbol) (word:list token), Type :=
+
+ (** Parse tree for a terminal symbol. *)
+ | Terminal_pt:
+ forall (tok:token), parse_tree (T (token_term tok)) [tok]
+
+ (** Parse tree for a non-terminal symbol. *)
+ | Non_terminal_pt:
+ forall (prod:production) {word:list token},
+ parse_tree_list (prod_rhs_rev prod) word ->
+ parse_tree (NT (prod_lhs prod)) word
+
+ (* Note : the list head_symbols_rev is reversed. *)
+ with parse_tree_list:
+ forall (head_symbols_rev:list symbol) (word:list token), Type :=
+
+ | Nil_ptl: parse_tree_list [] []
+
+ | Cons_ptl:
+ forall {head_symbolsq:list symbol} {wordq:list token},
+ parse_tree_list head_symbolsq wordq ->
+
+ forall {head_symbolt:symbol} {wordt:list token},
+ parse_tree head_symbolt wordt ->
+
+ parse_tree_list (head_symbolt::head_symbolsq) (wordq++wordt).
+
+ (** We can now finish the definition of the semantics of a grammar,
+ by giving the semantic value assotiated with a parse tree. *)
+ Fixpoint pt_sem {head_symbol word} (tree:parse_tree head_symbol word) :
+ symbol_semantic_type head_symbol :=
+ match tree with
+ | Terminal_pt tok => token_sem tok
+ | Non_terminal_pt prod ptl => ptl_sem ptl (prod_action prod)
+ end
+ with ptl_sem {A head_symbols word} (tree:parse_tree_list head_symbols word) :
+ arrows_right A (map symbol_semantic_type head_symbols) -> A :=
+ match tree with
+ | Nil_ptl => fun act => act
+ | Cons_ptl q t => fun act => ptl_sem q (act (pt_sem t))
+ end.
+
+ Fixpoint pt_size {head_symbol word} (tree:parse_tree head_symbol word) :=
+ match tree with
+ | Terminal_pt _ => 1
+ | Non_terminal_pt _ l => S (ptl_size l)
+ end
+ with ptl_size {head_symbols word} (tree:parse_tree_list head_symbols word) :=
+ match tree with
+ | Nil_ptl => 0
+ | Cons_ptl q t =>
+ pt_size t + ptl_size q
+ end.
+End Defs.
diff --git a/MenhirLib/Interpreter.v b/MenhirLib/Interpreter.v
new file mode 100644
index 00000000..568597ba
--- /dev/null
+++ b/MenhirLib/Interpreter.v
@@ -0,0 +1,453 @@
+(****************************************************************************)
+(* *)
+(* Menhir *)
+(* *)
+(* Jacques-Henri Jourdan, CNRS, LRI, Université Paris Sud *)
+(* *)
+(* Copyright Inria. All rights reserved. This file is distributed under *)
+(* the terms of the GNU Lesser General Public License as published by the *)
+(* Free Software Foundation, either version 3 of the License, or (at your *)
+(* option) any later version, as described in the file LICENSE. *)
+(* *)
+(****************************************************************************)
+
+From Coq Require Import List Syntax.
+From Coq.ssr Require Import ssreflect.
+Require Automaton.
+Require Import Alphabet Grammar Validator_safe.
+
+Module Make(Import A:Automaton.T).
+Module Import ValidSafe := Validator_safe.Make A.
+
+(** A few helpers for dependent types. *)
+
+(** Decidable propositions. *)
+Class Decidable (P : Prop) := decide : {P} + {~P}.
+Arguments decide _ {_}.
+
+(** A [Comparable] type has decidable equality. *)
+Instance comparable_decidable_eq T `{ComparableLeibnizEq T} (x y : T) :
+ Decidable (x = y).
+Proof.
+ unfold Decidable.
+ destruct (compare x y) eqn:EQ; [left; apply compare_eq; intuition | ..];
+ right; intros ->; by rewrite compare_refl in EQ.
+Defined.
+
+Instance list_decidable_eq T :
+ (forall x y : T, Decidable (x = y)) ->
+ (forall l1 l2 : list T, Decidable (l1 = l2)).
+Proof. unfold Decidable. decide equality. Defined.
+
+Ltac subst_existT :=
+ repeat
+ match goal with
+ | _ => progress subst
+ | H : @existT ?A ?P ?x ?y1 = @existT ?A ?P ?x ?y2 |- _ =>
+ let DEC := fresh in
+ assert (DEC : forall u1 u2 : A, Decidable (u1 = u2)) by apply _;
+ apply Eqdep_dec.inj_pair2_eq_dec in H; [|by apply DEC];
+ clear DEC
+ end.
+
+(** The interpreter is written using dependent types. In order to
+ avoid reducing proof terms while executing the parser, we thunk all
+ the propositions behind an arrow.
+ Note that thunkP is still in Prop so that it is erased by
+ extraction.
+ *)
+Definition thunkP (P : Prop) : Prop := True -> P.
+
+(** Sometimes, we actually need a reduced proof in a program (for
+ example when using an equality to cast a value). In that case,
+ instead of reducing the proof we already have, we reprove the
+ assertion by using decidability. *)
+Definition reprove {P} `{Decidable P} (p : thunkP P) : P :=
+ match decide P with
+ | left p => p
+ | right np => False_ind _ (np (p I))
+ end.
+
+(** Combination of reprove with eq_rect. *)
+Definition cast {T : Type} (F : T -> Type) {x y : T} (eq : thunkP (x = y))
+ {DEC : unit -> Decidable (x = y)}:
+ F x -> F y :=
+ fun a => eq_rect x F a y (@reprove _ (DEC ()) eq).
+
+Lemma cast_eq T F (x : T) (eq : thunkP (x = x)) `{forall x y, Decidable (x = y)} a :
+ cast F eq a = a.
+Proof. by rewrite /cast -Eqdep_dec.eq_rect_eq_dec. Qed.
+
+(** Input buffers and operations on them. **)
+CoInductive buffer : Type :=
+ Buf_cons { buf_head : token; buf_tail : buffer }.
+
+Delimit Scope buffer_scope with buf.
+Bind Scope buffer_scope with buffer.
+
+Infix "::" := Buf_cons (at level 60, right associativity) : buffer_scope.
+
+(** Concatenation of a list and an input buffer **)
+Fixpoint app_buf (l:list token) (buf:buffer) :=
+ match l with
+ | nil => buf
+ | cons t q => (t :: app_buf q buf)%buf
+ end.
+Infix "++" := app_buf (at level 60, right associativity) : buffer_scope.
+
+Lemma app_buf_assoc (l1 l2:list token) (buf:buffer) :
+ (l1 ++ (l2 ++ buf) = (l1 ++ l2) ++ buf)%buf.
+Proof. induction l1 as [|?? IH]=>//=. rewrite IH //. Qed.
+
+(** The type of a non initial state: the type of semantic values associated
+ with the last symbol of this state. *)
+Definition noninitstate_type state :=
+ symbol_semantic_type (last_symb_of_non_init_state state).
+
+(** The stack of the automaton : it can be either nil or contains a non
+ initial state, a semantic value for the symbol associted with this state,
+ and a nested stack. **)
+Definition stack := list (sigT noninitstate_type). (* eg. list {state & state_type state} *)
+
+Section Interpreter.
+
+Hypothesis safe: safe.
+
+(* Properties of the automaton deduced from safety validation. *)
+Proposition shift_head_symbs: shift_head_symbs.
+Proof. pose proof safe; unfold ValidSafe.safe in H; intuition. Qed.
+Proposition goto_head_symbs: goto_head_symbs.
+Proof. pose proof safe; unfold ValidSafe.safe in H; intuition. Qed.
+Proposition shift_past_state: shift_past_state.
+Proof. pose proof safe; unfold ValidSafe.safe in H; intuition. Qed.
+Proposition goto_past_state: goto_past_state.
+Proof. pose proof safe; unfold ValidSafe.safe in H; intuition. Qed.
+Proposition reduce_ok: reduce_ok.
+Proof. pose proof safe; unfold ValidSafe.safe in H; intuition. Qed.
+
+Variable init : initstate.
+
+(** The top state of a stack **)
+Definition state_of_stack (stack:stack): state :=
+ match stack with
+ | [] => init
+ | existT _ s _::_ => s
+ end.
+
+(** The stack of states of an automaton stack **)
+Definition state_stack_of_stack (stack:stack) :=
+ (List.map
+ (fun cell:sigT noninitstate_type => singleton_state_pred (projT1 cell))
+ stack ++ [singleton_state_pred init])%list.
+
+(** The stack of symbols of an automaton stack **)
+Definition symb_stack_of_stack (stack:stack) :=
+ List.map
+ (fun cell:sigT noninitstate_type => last_symb_of_non_init_state (projT1 cell))
+ stack.
+
+(** The stack invariant : it basically states that the assumptions on the
+ states are true. **)
+Inductive stack_invariant: stack -> Prop :=
+ | stack_invariant_constr:
+ forall stack,
+ prefix (head_symbs_of_state (state_of_stack stack))
+ (symb_stack_of_stack stack) ->
+ prefix_pred (head_states_of_state (state_of_stack stack))
+ (state_stack_of_stack stack) ->
+ stack_invariant_next stack ->
+ stack_invariant stack
+with stack_invariant_next: stack -> Prop :=
+ | stack_invariant_next_nil:
+ stack_invariant_next []
+ | stack_invariant_next_cons:
+ forall state_cur st stack_rec,
+ stack_invariant stack_rec ->
+ stack_invariant_next (existT _ state_cur st::stack_rec).
+
+(** [pop] pops some symbols from the stack. It returns the popped semantic
+ values using [sem_popped] as an accumulator and discards the popped
+ states.**)
+Fixpoint pop (symbols_to_pop:list symbol) {A:Type} (stk:stack) :
+ thunkP (prefix symbols_to_pop (symb_stack_of_stack stk)) ->
+ forall (action:arrows_right A (map symbol_semantic_type symbols_to_pop)),
+ stack * A.
+unshelve refine
+ (match symbols_to_pop
+ return
+ (thunkP (prefix symbols_to_pop (symb_stack_of_stack stk))) ->
+ forall (action:arrows_right A (map _ symbols_to_pop)), stack * A
+ with
+ | [] => fun _ action => (stk, action)
+ | t::q => fun Hp action =>
+ match stk
+ return thunkP (prefix (t::q) (symb_stack_of_stack stk)) -> stack * A
+ with
+ | existT _ state_cur sem::stack_rec => fun Hp =>
+ let sem_conv := cast symbol_semantic_type _ sem in
+ pop q _ stack_rec _ (action sem_conv)
+ | [] => fun Hp => False_rect _ _
+ end Hp
+ end).
+Proof.
+ - simpl in Hp. clear -Hp. abstract (intros _ ; specialize (Hp I); now inversion Hp).
+ - clear -Hp. abstract (specialize (Hp I); now inversion Hp).
+ - simpl in Hp. clear -Hp. abstract (intros _ ; specialize (Hp I); now inversion Hp).
+Defined.
+
+(* Equivalent declarative specification for pop, so that we avoid
+ (part of) the dependent types nightmare. *)
+Inductive pop_spec {A:Type} :
+ forall (symbols_to_pop:list symbol) (stk : stack)
+ (action : arrows_right A (map symbol_semantic_type symbols_to_pop))
+ (stk' : stack) (sem : A),
+ Prop :=
+ | Nil_pop_spec stk sem : pop_spec [] stk sem stk sem
+ | Cons_pop_spec symbols_to_pop st stk action sem stk' res :
+ pop_spec symbols_to_pop stk (action sem) stk' res ->
+ pop_spec (last_symb_of_non_init_state st::symbols_to_pop)
+ (existT _ st sem :: stk) action stk' res.
+
+Lemma pop_spec_ok {A:Type} symbols_to_pop stk Hp action stk' res:
+ pop symbols_to_pop stk Hp action = (stk', res) <->
+ pop_spec (A:=A) symbols_to_pop stk action stk' res.
+Proof.
+ revert stk Hp action.
+ induction symbols_to_pop as [|t symbols_to_pop IH]=>stk Hp action /=.
+ - split.
+ + intros [= <- <-]. constructor.
+ + intros H. inversion H. by subst_existT.
+ - destruct stk as [|[st sem]]=>/=; [by destruct pop_subproof0|].
+ remember (pop_subproof t symbols_to_pop stk st Hp) as EQ eqn:eq. clear eq.
+ generalize EQ. revert Hp action. rewrite <-(EQ I)=>Hp action ?.
+ rewrite cast_eq. rewrite IH. split.
+ + intros. by constructor.
+ + intros H. inversion H. by subst_existT.
+Qed.
+
+
+Lemma pop_preserves_invariant symbols_to_pop stk Hp A action :
+ stack_invariant stk ->
+ stack_invariant (fst (pop symbols_to_pop stk Hp (A:=A) action)).
+Proof.
+ revert stk Hp A action. induction symbols_to_pop as [|t q IH]=>//=.
+ intros stk Hp A action Hi.
+ destruct Hi as [stack Hp' Hpp [|state st stk']].
+ - destruct pop_subproof0.
+ - now apply IH.
+Qed.
+
+Lemma pop_state_valid symbols_to_pop stk Hp A action lpred :
+ prefix_pred lpred (state_stack_of_stack stk) ->
+ let stk' := fst (pop symbols_to_pop stk Hp (A:=A) action) in
+ state_valid_after_pop (state_of_stack stk') symbols_to_pop lpred.
+Proof.
+ revert stk Hp A action lpred. induction symbols_to_pop as [|t q IH]=>/=.
+ - intros stk Hp A a lpred Hpp. destruct lpred as [|pred lpred]; constructor.
+ inversion Hpp as [|? lpred' ? pred' Himpl Hpp' eq1 eq2]; subst.
+ specialize (Himpl (state_of_stack stk)).
+ destruct (pred' (state_of_stack stk)) as [] eqn:Heqpred'=>//.
+ destruct stk as [|[]]; simpl in *.
+ + inversion eq2; subst; clear eq2.
+ unfold singleton_state_pred in Heqpred'.
+ now rewrite compare_refl in Heqpred'; discriminate.
+ + inversion eq2; subst; clear eq2.
+ unfold singleton_state_pred in Heqpred'.
+ now rewrite compare_refl in Heqpred'; discriminate.
+ - intros stk Hp A a lpred Hpp. destruct stk as [|[] stk]=>//=.
+ + destruct pop_subproof0.
+ + destruct lpred as [|pred lpred]; [by constructor|].
+ constructor. apply IH. by inversion Hpp.
+Qed.
+
+(** [step_result] represents the result of one step of the automaton : it can
+ fail, accept or progress. [Fail_sr] means that the input is incorrect.
+ [Accept_sr] means that this is the last step of the automaton, and it
+ returns the semantic value of the input word. [Progress_sr] means that
+ some progress has been made, but new steps are needed in order to accept
+ a word.
+
+ For [Accept_sr] and [Progress_sr], the result contains the new input buffer.
+
+ [Fail_sr] means that the input word is rejected by the automaton. It is
+ different to [Err] (from the error monad), which mean that the automaton is
+ bogus and has perfomed a forbidden action. **)
+Inductive step_result :=
+ | Fail_sr: step_result
+ | Accept_sr: symbol_semantic_type (NT (start_nt init)) -> buffer -> step_result
+ | Progress_sr: stack -> buffer -> step_result.
+
+(** [reduce_step] does a reduce action :
+ - pops some elements from the stack
+ - execute the action of the production
+ - follows the goto for the produced non terminal symbol **)
+Definition reduce_step stk prod (buffer : buffer)
+ (Hval : thunkP (valid_for_reduce (state_of_stack stk) prod))
+ (Hi : thunkP (stack_invariant stk))
+ : step_result.
+refine
+ ((let '(stk', sem) as ss := pop (prod_rhs_rev prod) stk _ (prod_action prod)
+ return thunkP (state_valid_after_pop (state_of_stack (fst ss)) _
+ (head_states_of_state (state_of_stack stk))) -> _
+ in fun Hval' =>
+ match goto_table (state_of_stack stk') (prod_lhs prod) as goto
+ return (thunkP (goto = None ->
+ match state_of_stack stk' with
+ | Init i => prod_lhs prod = start_nt i
+ | Ninit _ => False
+ end)) -> _
+ with
+ | Some (exist _ state_new e) => fun _ =>
+ let sem := eq_rect _ _ sem _ e in
+ Progress_sr (existT noninitstate_type state_new sem::stk') buffer
+ | None => fun Hval =>
+ let sem := cast symbol_semantic_type _ sem in
+ Accept_sr sem buffer
+ end (fun _ => _))
+ (fun _ => pop_state_valid _ _ _ _ _ _ _)).
+Proof.
+ - clear -Hi Hval.
+ abstract (intros _; destruct Hi=>//; eapply prefix_trans; [by apply Hval|eassumption]).
+ - clear -Hval.
+ abstract (intros _; f_equal; specialize (Hval I eq_refl); destruct stk' as [|[]]=>//).
+ - simpl in Hval'. clear -Hval Hval'.
+ abstract (move : Hval => /(_ I) [_ /(_ _ (Hval' I))] Hval2 Hgoto; by rewrite Hgoto in Hval2).
+ - clear -Hi. abstract by destruct Hi.
+Defined.
+
+Lemma reduce_step_stack_invariant_preserved stk prod buffer Hv Hi stk' buffer':
+ reduce_step stk prod buffer Hv Hi = Progress_sr stk' buffer' ->
+ stack_invariant stk'.
+Proof.
+ unfold reduce_step.
+ match goal with
+ | |- context [pop ?symbols_to_pop stk ?Hp ?action] =>
+ assert (Hi':=pop_preserves_invariant symbols_to_pop stk Hp _ action (Hi I));
+ generalize (pop_state_valid symbols_to_pop stk Hp _ action)
+ end.
+ destruct pop as [stk0 sem]=>/=. simpl in Hi'. intros Hv'.
+ assert (Hgoto1:=goto_head_symbs (state_of_stack stk0) (prod_lhs prod)).
+ assert (Hgoto2:=goto_past_state (state_of_stack stk0) (prod_lhs prod)).
+ match goal with | |- context [fun _ : True => ?X] => generalize X end.
+ destruct goto_table as [[state_new e]|] eqn:EQgoto=>//.
+ intros _ [= <- <-]. constructor=>/=.
+ - constructor. eapply prefix_trans. apply Hgoto1. by destruct Hi'.
+ - unfold state_stack_of_stack; simpl; constructor.
+ + intros ?. by destruct singleton_state_pred.
+ + eapply prefix_pred_trans. apply Hgoto2. by destruct Hi'.
+ - by constructor.
+Qed.
+
+(** One step of parsing. **)
+Definition step stk buffer (Hi : thunkP (stack_invariant stk)): step_result :=
+ match action_table (state_of_stack stk) as a return
+ thunkP
+ match a return Prop with
+ | Default_reduce_act prod => _
+ | Lookahead_act awt => forall t : terminal,
+ match awt t with
+ | Reduce_act p => _
+ | _ => True
+ end
+ end -> _
+ with
+ | Default_reduce_act prod => fun Hv =>
+ reduce_step stk prod buffer Hv Hi
+ | Lookahead_act awt => fun Hv =>
+ match buf_head buffer with
+ | tok =>
+ match awt (token_term tok) as a return
+ thunkP match a return Prop with Reduce_act p => _ | _ => _ end -> _
+ with
+ | Shift_act state_new e => fun _ =>
+ let sem_conv := eq_rect _ symbol_semantic_type (token_sem tok) _ e in
+ Progress_sr (existT noninitstate_type state_new sem_conv::stk)
+ (buf_tail buffer)
+ | Reduce_act prod => fun Hv =>
+ reduce_step stk prod buffer Hv Hi
+ | Fail_act => fun _ =>
+ Fail_sr
+ end (fun _ => Hv I (token_term tok))
+ end
+ end (fun _ => reduce_ok _).
+
+Lemma step_stack_invariant_preserved stk buffer Hi stk' buffer':
+ step stk buffer Hi = Progress_sr stk' buffer' ->
+ stack_invariant stk'.
+Proof.
+ unfold step.
+ generalize (reduce_ok (state_of_stack stk))=>Hred.
+ assert (Hshift1 := shift_head_symbs (state_of_stack stk)).
+ assert (Hshift2 := shift_past_state (state_of_stack stk)).
+ destruct action_table as [prod|awt]=>/=.
+ - eauto using reduce_step_stack_invariant_preserved.
+ - set (term := token_term (buf_head buffer)).
+ generalize (Hred term). clear Hred. intros Hred.
+ specialize (Hshift1 term). specialize (Hshift2 term).
+ destruct (awt term) as [state_new e|prod|]=>//.
+ + intros [= <- <-]. constructor=>/=.
+ * constructor. eapply prefix_trans. apply Hshift1. by destruct Hi.
+ * unfold state_stack_of_stack; simpl; constructor.
+ -- intros ?. by destruct singleton_state_pred.
+ -- eapply prefix_pred_trans. apply Hshift2. by destruct Hi.
+ * constructor; by apply Hi.
+ + eauto using reduce_step_stack_invariant_preserved.
+Qed.
+
+(** The parsing use a [nat] fuel parameter [log_n_steps], so that we
+ do not have to prove terminaison, which is difficult.
+
+ Note that [log_n_steps] is *not* the fuel in the conventionnal
+ sense: this parameter contains the logarithm (in base 2) of the
+ number of steps to perform. Hence, a value of, e.g., 50 will
+ usually be enough to ensure termination. *)
+Fixpoint parse_fix stk buffer (log_n_steps : nat) (Hi : thunkP (stack_invariant stk)):
+ { sr : step_result |
+ forall stk' buffer', sr = Progress_sr stk' buffer' -> stack_invariant stk' } :=
+ match log_n_steps with
+ | O => exist _ (step stk buffer Hi)
+ (step_stack_invariant_preserved _ _ Hi)
+ | S log_n_steps =>
+ match parse_fix stk buffer log_n_steps Hi with
+ | exist _ (Progress_sr stk buffer) Hi' =>
+ parse_fix stk buffer log_n_steps (fun _ => Hi' _ buffer eq_refl)
+ | sr => sr
+ end
+ end.
+
+(** The final result of a parsing is either a failure (the automaton
+ has rejected the input word), either a timeout (the automaton has
+ spent all the given [2^log_n_steps]), either a parsed semantic value
+ with a rest of the input buffer.
+
+ Note that we do not make parse_result depend on start_nt for the
+ result type, so that this inductive is extracted without the use
+ of Obj.t in OCaml. **)
+Inductive parse_result {A : Type} :=
+ | Fail_pr: parse_result
+ | Timeout_pr: parse_result
+ | Parsed_pr: A -> buffer -> parse_result.
+Global Arguments parse_result _ : clear implicits.
+
+Definition parse (buffer : buffer) (log_n_steps : nat):
+ parse_result (symbol_semantic_type (NT (start_nt init))).
+refine (match proj1_sig (parse_fix [] buffer log_n_steps _) with
+ | Fail_sr => Fail_pr
+ | Accept_sr sem buffer' => Parsed_pr sem buffer'
+ | Progress_sr _ _ => Timeout_pr
+ end).
+Proof.
+ abstract (repeat constructor; intros; by destruct singleton_state_pred).
+Defined.
+
+End Interpreter.
+
+Arguments Fail_sr {init}.
+Arguments Accept_sr {init} _ _.
+Arguments Progress_sr {init} _ _.
+
+End Make.
+
+Module Type T(A:Automaton.T).
+ Include (Make A).
+End T.
diff --git a/MenhirLib/Interpreter_complete.v b/MenhirLib/Interpreter_complete.v
new file mode 100644
index 00000000..ec69592b
--- /dev/null
+++ b/MenhirLib/Interpreter_complete.v
@@ -0,0 +1,825 @@
+(****************************************************************************)
+(* *)
+(* Menhir *)
+(* *)
+(* Jacques-Henri Jourdan, CNRS, LRI, Université Paris Sud *)
+(* *)
+(* Copyright Inria. All rights reserved. This file is distributed under *)
+(* the terms of the GNU Lesser General Public License as published by the *)
+(* Free Software Foundation, either version 3 of the License, or (at your *)
+(* option) any later version, as described in the file LICENSE. *)
+(* *)
+(****************************************************************************)
+
+From Coq Require Import List Syntax Arith.
+From Coq.ssr Require Import ssreflect.
+Require Import Alphabet Grammar.
+Require Automaton Interpreter Validator_complete.
+
+Module Make(Import A:Automaton.T) (Import Inter:Interpreter.T A).
+Module Import Valid := Validator_complete.Make A.
+
+(** * Completeness Proof **)
+
+Section Completeness_Proof.
+
+Hypothesis safe: Inter.ValidSafe.safe.
+Hypothesis complete: complete.
+
+(* Properties of the automaton deduced from completeness validation. *)
+Proposition nullable_stable: nullable_stable.
+Proof. pose proof complete; unfold Valid.complete in H; intuition. Qed.
+Proposition first_stable: first_stable.
+Proof. pose proof complete; unfold Valid.complete in H; intuition. Qed.
+Proposition start_future: start_future.
+Proof. pose proof complete; unfold Valid.complete in H; intuition. Qed.
+Proposition terminal_shift: terminal_shift.
+Proof. pose proof complete; unfold Valid.complete in H; intuition. Qed.
+Proposition end_reduce: end_reduce.
+Proof. pose proof complete; unfold Valid.complete in H; intuition. Qed.
+Proposition start_goto: start_goto.
+Proof. pose proof complete; unfold Valid.complete in H; intuition. Qed.
+Proposition non_terminal_goto: non_terminal_goto.
+Proof. pose proof complete; unfold Valid.complete in H; intuition. Qed.
+Proposition non_terminal_closed: non_terminal_closed.
+Proof. pose proof complete; unfold Valid.complete in H; intuition. Qed.
+
+(** If the nullable predicate has been validated, then it is correct. **)
+Lemma nullable_correct head word :
+ word = [] -> parse_tree head word -> nullable_symb head = true
+with nullable_correct_list heads word :
+ word = [] ->
+ parse_tree_list heads word -> nullable_word heads = true.
+Proof.
+ - destruct 2=>//. assert (Hnull := nullable_stable prod).
+ erewrite nullable_correct_list in Hnull; eauto.
+ - intros Hword. destruct 1=>//=. destruct (app_eq_nil _ _ Hword).
+ eauto using andb_true_intro.
+Qed.
+
+(** Auxiliary lemma for first_correct. *)
+Lemma first_word_set_app t word1 word2 :
+ TerminalSet.In t (first_word_set (word1 ++ word2)) <->
+ TerminalSet.In t (first_word_set word1) \/
+ TerminalSet.In t (first_word_set word2) /\ nullable_word (rev word1) = true.
+Proof.
+ induction word1 as [|s word1 IH]=>/=.
+ - split; [tauto|]. move=>[/TerminalSet.empty_1 ?|[? _]]//.
+ - rewrite /nullable_word forallb_app /=. destruct nullable_symb=>/=.
+ + rewrite Bool.andb_true_r. split.
+ * move=>/TerminalSet.union_1. rewrite IH.
+ move=>[?|[?|[??]]]; auto using TerminalSet.union_2, TerminalSet.union_3.
+ * destruct IH.
+ move=>[/TerminalSet.union_1 [?|?]|[??]];
+ auto using TerminalSet.union_2, TerminalSet.union_3.
+ + rewrite Bool.andb_false_r. by intuition.
+Qed.
+
+(** If the first predicate has been validated, then it is correct. **)
+Lemma first_correct head word t q :
+ word = t::q ->
+ parse_tree head word ->
+ TerminalSet.In (token_term t) (first_symb_set head)
+with first_correct_list heads word t q :
+ word = t::q ->
+ parse_tree_list heads word ->
+ TerminalSet.In (token_term t) (first_word_set (rev' heads)).
+Proof.
+ - intros Hword. destruct 1=>//.
+ + inversion Hword. subst. apply TerminalSet.singleton_2, compare_refl.
+ + eapply first_stable. eauto.
+ - intros Hword. destruct 1 as [|symq wordq ptl symt wordt pt]=>//=.
+ rewrite /rev' -rev_alt /= first_word_set_app /= rev_involutive rev_alt.
+ destruct wordq; [right|left].
+ + destruct nullable_symb; eauto using TerminalSet.union_2, nullable_correct_list.
+ + inversion Hword. subst. fold (rev' symq). eauto.
+Qed.
+
+(** A PTL is compatible with a stack if the top of the stack contains
+ data representing to this PTL. *)
+Fixpoint ptl_stack_compat {symbs word}
+ (stk0 : stack) (ptl : parse_tree_list symbs word) (stk : stack) : Prop :=
+ match ptl with
+ | Nil_ptl => stk0 = stk
+ | @Cons_ptl _ _ ptl sym _ pt =>
+ match stk with
+ | [] => False
+ | existT _ _ sem::stk =>
+ ptl_stack_compat stk0 ptl stk /\
+ exists e,
+ sem = eq_rect _ symbol_semantic_type (pt_sem pt) _ e
+ end
+ end.
+
+(** .. and when a PTL is compatible with a stack, then calling the pop
+ function return the semantic value of this PTL. *)
+Lemma pop_stack_compat_pop_spec {A symbs word}
+ (ptl:parse_tree_list symbs word) (stk:stack) (stk0:stack) action :
+ ptl_stack_compat stk0 ptl stk ->
+ pop_spec symbs stk action stk0 (ptl_sem (A:=A) ptl action).
+Proof.
+ revert stk. induction ptl=>stk /= Hstk.
+ - subst. constructor.
+ - destruct stk as [|[st sem] stk]=>//. destruct Hstk as [Hstk [??]]. subst.
+ simpl. constructor. eauto.
+Qed.
+
+Variable init: initstate.
+
+(** In order to prove compleness, we first fix a word to be parsed
+ together with the content of the parser at the end of the parsing. *)
+Variable full_word: list token.
+Variable buffer_end: buffer.
+
+(** Completeness is proved by following the traversal of the parse
+ tree which is performed by the parser. Each step of parsing
+ correspond to one step of traversal. In order to represent the state
+ of the traversal, we define the notion of "dotted" parse tree, which
+ is a parse tree with one dot on one of its node. The place of the
+ dot represents the place of the next action to be executed.
+
+ Such a dotted parse tree is decomposed into two part: a "regular"
+ parse tree, which is the parse tree placed under the dot, and a
+ "parse tree zipper", which is the part of the parse tree placed
+ above the dot. Therefore, a parse tree zipper is a parse tree with a
+ hole. Moreover, for easier manipulation, a parse tree zipper is
+ represented "upside down". That is, the root of the parse tree is
+ actually a leaf of the zipper, while the root of the zipper is the
+ hole.
+ *)
+Inductive pt_zipper:
+ forall (hole_symb:symbol) (hole_word:list token), Type :=
+| Top_ptz:
+ pt_zipper (NT (start_nt init)) full_word
+| Cons_ptl_ptz:
+ forall {head_symbolsq:list symbol} {wordq:list token},
+ parse_tree_list head_symbolsq wordq ->
+
+ forall {head_symbolt:symbol} {wordt:list token},
+
+ ptl_zipper (head_symbolt::head_symbolsq) (wordq++wordt) ->
+ pt_zipper head_symbolt wordt
+with ptl_zipper:
+ forall (hole_symbs:list symbol) (hole_word:list token), Type :=
+| Non_terminal_pt_ptlz:
+ forall {p:production} {word:list token},
+ pt_zipper (NT (prod_lhs p)) word ->
+ ptl_zipper (prod_rhs_rev p) word
+
+| Cons_ptl_ptlz:
+ forall {head_symbolsq:list symbol} {wordq:list token},
+
+ forall {head_symbolt:symbol} {wordt:list token},
+ parse_tree head_symbolt wordt ->
+
+ ptl_zipper (head_symbolt::head_symbolsq) (wordq++wordt) ->
+
+ ptl_zipper head_symbolsq wordq.
+
+(** A dotted parse tree is the combination of a parse tree zipper with
+ a parse tree. It can be intwo flavors, depending on which is the next
+ action to be executed (shift or reduce). *)
+Inductive pt_dot: Type :=
+| Reduce_ptd: forall {prod word},
+ parse_tree_list (prod_rhs_rev prod) word ->
+ pt_zipper (NT (prod_lhs prod)) word ->
+ pt_dot
+| Shift_ptd: forall (tok : token) {symbolsq wordq},
+ parse_tree_list symbolsq wordq ->
+ ptl_zipper (T (token_term tok)::symbolsq) (wordq++[tok]) ->
+ pt_dot.
+
+(** We can compute the full semantic value of a parse tree when
+ represented as a dotted ptd. *)
+
+Fixpoint ptlz_sem {hole_symbs hole_word}
+ (ptlz:ptl_zipper hole_symbs hole_word) :
+ (forall A, arrows_right A (map symbol_semantic_type hole_symbs) -> A) ->
+ (symbol_semantic_type (NT (start_nt init))) :=
+ match ptlz with
+ | @Non_terminal_pt_ptlz prod _ ptz =>
+ fun k => ptz_sem ptz (k _ (prod_action prod))
+ | Cons_ptl_ptlz pt ptlz =>
+ fun k => ptlz_sem ptlz (fun _ f => k _ (f (pt_sem pt)))
+ end
+with ptz_sem {hole_symb hole_word}
+ (ptz:pt_zipper hole_symb hole_word):
+ symbol_semantic_type hole_symb -> symbol_semantic_type (NT (start_nt init)) :=
+ match ptz with
+ | Top_ptz => fun sem => sem
+ | Cons_ptl_ptz ptl ptlz =>
+ fun sem => ptlz_sem ptlz (fun _ f => ptl_sem ptl (f sem))
+ end.
+
+Definition ptd_sem (ptd : pt_dot) :=
+ match ptd with
+ | @Reduce_ptd prod _ ptl ptz =>
+ ptz_sem ptz (ptl_sem ptl (prod_action prod))
+ | Shift_ptd tok ptl ptlz =>
+ ptlz_sem ptlz (fun _ f => ptl_sem ptl (f (token_sem tok)))
+ end.
+
+(** The buffer associated with a dotted parse tree corresponds to the
+ buffer left to be read by the parser when at the state represented
+ by the dotted parse tree. *)
+Fixpoint ptlz_buffer {hole_symbs hole_word}
+ (ptlz:ptl_zipper hole_symbs hole_word): buffer :=
+ match ptlz with
+ | Non_terminal_pt_ptlz ptz =>
+ ptz_buffer ptz
+ | @Cons_ptl_ptlz _ _ _ wordt _ ptlz' =>
+ wordt ++ ptlz_buffer ptlz'
+ end
+with ptz_buffer {hole_symb hole_word}
+ (ptz:pt_zipper hole_symb hole_word): buffer :=
+ match ptz with
+ | Top_ptz => buffer_end
+ | Cons_ptl_ptz _ ptlz =>
+ ptlz_buffer ptlz
+ end.
+
+Definition ptd_buffer (ptd:pt_dot) :=
+ match ptd with
+ | Reduce_ptd _ ptz => ptz_buffer ptz
+ | @Shift_ptd tok _ wordq _ ptlz => (tok::ptlz_buffer ptlz)%buf
+ end.
+
+(** We are now ready to define the main invariant of the proof of
+ completeness: we need to specify when a stack is compatible with a
+ dotted parse tree. Informally, a stack is compatible with a dotted
+ parse tree when it is the concatenation stack fragments which are
+ compatible with each of the partially recognized productions
+ appearing in the parse tree zipper. Moreover, the head of each of
+ these stack fragment contains a state which has an item predicted by
+ the corresponding zipper.
+
+ More formally, the compatibility relation first needs the following
+ auxiliary definitions: *)
+Fixpoint ptlz_prod {hole_symbs hole_word}
+ (ptlz:ptl_zipper hole_symbs hole_word): production :=
+ match ptlz with
+ | @Non_terminal_pt_ptlz prod _ _ => prod
+ | Cons_ptl_ptlz _ ptlz' => ptlz_prod ptlz'
+ end.
+
+Fixpoint ptlz_future {hole_symbs hole_word}
+ (ptlz:ptl_zipper hole_symbs hole_word): list symbol :=
+ match ptlz with
+ | Non_terminal_pt_ptlz _ => []
+ | @Cons_ptl_ptlz _ _ s _ _ ptlz' => s::ptlz_future ptlz'
+ end.
+
+Fixpoint ptlz_lookahead {hole_symbs hole_word}
+ (ptlz:ptl_zipper hole_symbs hole_word) : terminal :=
+ match ptlz with
+ | Non_terminal_pt_ptlz ptz => token_term (buf_head (ptz_buffer ptz))
+ | Cons_ptl_ptlz _ ptlz' => ptlz_lookahead ptlz'
+ end.
+
+Fixpoint ptz_stack_compat {hole_symb hole_word}
+ (stk : stack) (ptz : pt_zipper hole_symb hole_word) : Prop :=
+ match ptz with
+ | Top_ptz => stk = []
+ | Cons_ptl_ptz ptl ptlz =>
+ exists stk0,
+ state_has_future (state_of_stack init stk) (ptlz_prod ptlz)
+ (hole_symb::ptlz_future ptlz) (ptlz_lookahead ptlz) /\
+ ptl_stack_compat stk0 ptl stk /\
+ ptlz_stack_compat stk0 ptlz
+ end
+with ptlz_stack_compat {hole_symbs hole_word}
+ (stk : stack) (ptlz : ptl_zipper hole_symbs hole_word) : Prop :=
+ match ptlz with
+ | Non_terminal_pt_ptlz ptz => ptz_stack_compat stk ptz
+ | Cons_ptl_ptlz _ ptlz => ptlz_stack_compat stk ptlz
+ end.
+
+Definition ptd_stack_compat (ptd:pt_dot) (stk:stack): Prop :=
+ match ptd with
+ | @Reduce_ptd prod _ ptl ptz =>
+ exists stk0,
+ state_has_future (state_of_stack init stk) prod []
+ (token_term (buf_head (ptz_buffer ptz))) /\
+ ptl_stack_compat stk0 ptl stk /\
+ ptz_stack_compat stk0 ptz
+ | Shift_ptd tok ptl ptlz =>
+ exists stk0,
+ state_has_future (state_of_stack init stk) (ptlz_prod ptlz)
+ (T (token_term tok) :: ptlz_future ptlz) (ptlz_lookahead ptlz) /\
+ ptl_stack_compat stk0 ptl stk /\
+ ptlz_stack_compat stk0 ptlz
+ end.
+
+Lemma ptz_stack_compat_cons_state_has_future {symbsq wordq symbt wordt} stk
+ (ptl : parse_tree_list symbsq wordq)
+ (ptlz : ptl_zipper (symbt :: symbsq) (wordq ++ wordt)) :
+ ptz_stack_compat stk (Cons_ptl_ptz ptl ptlz) ->
+ state_has_future (state_of_stack init stk) (ptlz_prod ptlz)
+ (symbt::ptlz_future ptlz) (ptlz_lookahead ptlz).
+Proof. move=>[stk0 [? [? ?]]] //. Qed.
+
+Lemma ptlz_future_ptlz_prod hole_symbs hole_word
+ (ptlz:ptl_zipper hole_symbs hole_word) :
+ rev_append (ptlz_future ptlz) hole_symbs = prod_rhs_rev (ptlz_prod ptlz).
+Proof. induction ptlz=>//=. Qed.
+
+Lemma ptlz_future_first {symbs word} (ptlz : ptl_zipper symbs word) :
+ TerminalSet.In (token_term (buf_head (ptlz_buffer ptlz)))
+ (first_word_set (ptlz_future ptlz)) \/
+ token_term (buf_head (ptlz_buffer ptlz)) = ptlz_lookahead ptlz /\
+ nullable_word (ptlz_future ptlz) = true.
+Proof.
+ induction ptlz as [|??? [|tok] pt ptlz IH]; [by auto| |]=>/=.
+ - rewrite (nullable_correct _ _ eq_refl pt).
+ destruct IH as [|[??]]; [left|right]=>/=; auto using TerminalSet.union_3.
+ - left. destruct nullable_symb; eauto using TerminalSet.union_2, first_correct.
+Qed.
+
+(** We now want to define what is the next dotted parse tree which is
+ to be handled after one action. Such dotted parse is built in two
+ steps: Not only we have to perform the action by completing the
+ parse tree, but we also have to prepare for the following step by
+ moving the dot down to place it in front of the next action to be
+ performed.
+*)
+Fixpoint build_pt_dot_from_pt {symb word}
+ (pt : parse_tree symb word) (ptz : pt_zipper symb word)
+ : pt_dot :=
+ match pt in parse_tree symb word
+ return pt_zipper symb word -> pt_dot
+ with
+ | Terminal_pt tok =>
+ fun ptz =>
+ let X :=
+ match ptz in pt_zipper symb word
+ return match symb with T term => True | NT _ => False end ->
+ { symbsq : list symbol &
+ { wordq : list token &
+ (parse_tree_list symbsq wordq *
+ ptl_zipper (symb :: symbsq) (wordq ++ word))%type } }
+ with
+ | Top_ptz => fun F => False_rect _ F
+ | Cons_ptl_ptz ptl ptlz => fun _ =>
+ existT _ _ (existT _ _ (ptl, ptlz))
+ end I
+ in
+ Shift_ptd tok (fst (projT2 (projT2 X))) (snd (projT2 (projT2 X)))
+ | Non_terminal_pt prod ptl => fun ptz =>
+ let is_notnil :=
+ match ptl in parse_tree_list w _
+ return option (match w return Prop with [] => False | _ => True end)
+ with
+ | Nil_ptl => None
+ | _ => Some I
+ end
+ in
+ match is_notnil with
+ | None => Reduce_ptd ptl ptz
+ | Some H => build_pt_dot_from_pt_rec ptl H (Non_terminal_pt_ptlz ptz)
+ end
+ end ptz
+with build_pt_dot_from_pt_rec {symbs word}
+ (ptl : parse_tree_list symbs word)
+ (Hsymbs : match symbs with [] => False | _ => True end)
+ (ptlz : ptl_zipper symbs word)
+ : pt_dot :=
+ match ptl in parse_tree_list symbs word
+ return match symbs with [] => False | _ => True end ->
+ ptl_zipper symbs word ->
+ pt_dot
+ with
+ | Nil_ptl => fun Hsymbs _ => False_rect _ Hsymbs
+ | Cons_ptl ptl' pt => fun _ =>
+ match ptl' in parse_tree_list symbsq wordq
+ return parse_tree_list symbsq wordq ->
+ ptl_zipper (_ :: symbsq) (wordq ++ _) ->
+ pt_dot
+ with
+ | Nil_ptl => fun _ ptlz =>
+ build_pt_dot_from_pt pt (Cons_ptl_ptz Nil_ptl ptlz)
+ | _ => fun ptl' ptlz =>
+ build_pt_dot_from_pt_rec ptl' I (Cons_ptl_ptlz pt ptlz)
+ end ptl'
+ end Hsymbs ptlz.
+
+Definition build_pt_dot_from_ptl {symbs word}
+ (ptl : parse_tree_list symbs word)
+ (ptlz : ptl_zipper symbs word)
+ : pt_dot :=
+ match ptlz in ptl_zipper symbs word
+ return parse_tree_list symbs word -> pt_dot
+ with
+ | Non_terminal_pt_ptlz ptz => fun ptl =>
+ Reduce_ptd ptl ptz
+ | Cons_ptl_ptlz pt ptlz => fun ptl =>
+ build_pt_dot_from_pt pt (Cons_ptl_ptz ptl ptlz)
+ end ptl.
+
+Definition next_ptd (ptd:pt_dot) : option pt_dot :=
+ match ptd with
+ | Shift_ptd tok ptl ptlz =>
+ Some (build_pt_dot_from_ptl (Cons_ptl ptl (Terminal_pt tok)) ptlz)
+ | Reduce_ptd ptl ptz =>
+ match ptz in pt_zipper symb word
+ return parse_tree symb word -> _
+ with
+ | Top_ptz => fun _ => None
+ | Cons_ptl_ptz ptl' ptlz => fun pt =>
+ Some (build_pt_dot_from_ptl (Cons_ptl ptl' pt) ptlz)
+ end (Non_terminal_pt _ ptl)
+ end.
+
+Fixpoint next_ptd_iter (ptd:pt_dot) (log_n_steps:nat) : option pt_dot :=
+ match log_n_steps with
+ | O => next_ptd ptd
+ | S log_n_steps =>
+ match next_ptd_iter ptd log_n_steps with
+ | None => None
+ | Some ptd => next_ptd_iter ptd log_n_steps
+ end
+ end.
+
+(** We prove that these functions behave well w.r.t. semantic values. *)
+Lemma sem_build_from_pt {symb word}
+ (pt : parse_tree symb word) (ptz : pt_zipper symb word) :
+ ptz_sem ptz (pt_sem pt)
+ = ptd_sem (build_pt_dot_from_pt pt ptz)
+with sem_build_from_pt_rec {symbs word}
+ (ptl : parse_tree_list symbs word) (ptlz : ptl_zipper symbs word)
+ Hsymbs :
+ ptlz_sem ptlz (fun _ f => ptl_sem ptl f)
+ = ptd_sem (build_pt_dot_from_pt_rec ptl Hsymbs ptlz).
+Proof.
+ - destruct pt as [tok|prod word ptl]=>/=.
+ + revert ptz. generalize [tok].
+ generalize (token_sem tok). generalize I.
+ change True with (match T (token_term tok) with T _ => True | NT _ => False end) at 1.
+ generalize (T (token_term tok)) => symb HT sem word ptz. by destruct ptz.
+ + match goal with
+ | |- context [match ?X with Some H => _ | None => _ end] => destruct X=>//
+ end.
+ by rewrite -sem_build_from_pt_rec.
+ - destruct ptl; [contradiction|].
+ specialize (sem_build_from_pt_rec _ _ ptl)=>/=. destruct ptl.
+ + by rewrite -sem_build_from_pt.
+ + by rewrite -sem_build_from_pt_rec.
+Qed.
+
+Lemma sem_build_from_ptl {symbs word}
+ (ptl : parse_tree_list symbs word) (ptlz : ptl_zipper symbs word) :
+ ptlz_sem ptlz (fun _ f => ptl_sem ptl f)
+ = ptd_sem (build_pt_dot_from_ptl ptl ptlz).
+Proof. destruct ptlz=>//=. by rewrite -sem_build_from_pt. Qed.
+
+Lemma sem_next_ptd (ptd : pt_dot) :
+ match next_ptd ptd with
+ | None => True
+ | Some ptd' => ptd_sem ptd = ptd_sem ptd'
+ end.
+Proof.
+ destruct ptd as [prod word ptl ptz|tok symbs word ptl ptlz] =>/=.
+ - change (ptl_sem ptl (prod_action prod))
+ with (pt_sem (Non_terminal_pt prod ptl)).
+ generalize (Non_terminal_pt prod ptl). clear ptl.
+ destruct ptz as [|?? ptl ?? ptlz]=>// pt. by rewrite -sem_build_from_ptl.
+ - by rewrite -sem_build_from_ptl.
+Qed.
+
+Lemma sem_next_ptd_iter (ptd : pt_dot) (log_n_steps : nat) :
+ match next_ptd_iter ptd log_n_steps with
+ | None => True
+ | Some ptd' => ptd_sem ptd = ptd_sem ptd'
+ end.
+Proof.
+ revert ptd.
+ induction log_n_steps as [|log_n_steps IH]; [by apply sem_next_ptd|]=>/= ptd.
+ assert (IH1 := IH ptd). destruct next_ptd_iter as [ptd'|]=>//.
+ specialize (IH ptd'). destruct next_ptd_iter=>//. congruence.
+Qed.
+
+(** We prove that these functions behave well w.r.t. xxx_buffer. *)
+Lemma ptd_buffer_build_from_pt {symb word}
+ (pt : parse_tree symb word) (ptz : pt_zipper symb word) :
+ (word ++ ptz_buffer ptz)%buf = ptd_buffer (build_pt_dot_from_pt pt ptz)
+with ptd_buffer_build_from_pt_rec {symbs word}
+ (ptl : parse_tree_list symbs word) (ptlz : ptl_zipper symbs word)
+ Hsymbs :
+ (word ++ ptlz_buffer ptlz)%buf = ptd_buffer (build_pt_dot_from_pt_rec ptl Hsymbs ptlz).
+Proof.
+ - destruct pt as [tok|prod word ptl]=>/=.
+ + f_equal. revert ptz. generalize [tok].
+ generalize (token_sem tok). generalize I.
+ change True with (match T (token_term tok) with T _ => True | NT _ => False end) at 1.
+ generalize (T (token_term tok)) => symb HT sem word ptz. by destruct ptz.
+ + match goal with
+ | |- context [match ?X with Some H => _ | None => _ end] => destruct X eqn:EQ
+ end.
+ * by rewrite -ptd_buffer_build_from_pt_rec.
+ * rewrite [X in (X ++ _)%buf](_ : word = []) //. clear -EQ. by destruct ptl.
+ - destruct ptl as [|?? ptl ?? pt]; [contradiction|].
+ specialize (ptd_buffer_build_from_pt_rec _ _ ptl).
+ destruct ptl.
+ + by rewrite /= -ptd_buffer_build_from_pt.
+ + by rewrite -ptd_buffer_build_from_pt_rec //= app_buf_assoc.
+Qed.
+
+Lemma ptd_buffer_build_from_ptl {symbs word}
+ (ptl : parse_tree_list symbs word) (ptlz : ptl_zipper symbs word) :
+ ptlz_buffer ptlz = ptd_buffer (build_pt_dot_from_ptl ptl ptlz).
+Proof.
+ destruct ptlz as [|???? pt]=>//=. by rewrite -ptd_buffer_build_from_pt.
+Qed.
+
+(** We prove that these functions behave well w.r.t. xxx_stack_compat. *)
+Lemma ptd_stack_compat_build_from_pt {symb word}
+ (pt : parse_tree symb word) (ptz : pt_zipper symb word)
+ (stk: stack) :
+ ptz_stack_compat stk ptz ->
+ ptd_stack_compat (build_pt_dot_from_pt pt ptz) stk
+with ptd_stack_compat_build_from_pt_rec {symbs word}
+ (ptl : parse_tree_list symbs word) (ptlz : ptl_zipper symbs word)
+ (stk : stack) Hsymbs :
+ ptlz_stack_compat stk ptlz ->
+ state_has_future (state_of_stack init stk) (ptlz_prod ptlz)
+ (rev' (prod_rhs_rev (ptlz_prod ptlz))) (ptlz_lookahead ptlz) ->
+ ptd_stack_compat (build_pt_dot_from_pt_rec ptl Hsymbs ptlz) stk.
+Proof.
+ - intros Hstk. destruct pt as [tok|prod word ptl]=>/=.
+ + revert ptz Hstk. generalize [tok]. generalize (token_sem tok). generalize I.
+ change True with (match T (token_term tok) with T _ => True | NT _ => False end) at 1.
+ generalize (T (token_term tok)) => symb HT sem word ptz. by destruct ptz.
+ + assert (state_has_future (state_of_stack init stk) prod
+ (rev' (prod_rhs_rev prod)) (token_term (buf_head (ptz_buffer ptz)))).
+ { revert ptz Hstk. remember (NT (prod_lhs prod)) eqn:EQ=>ptz.
+ destruct ptz as [|?? ptl0 ?? ptlz0].
+ - intros ->. apply start_future. congruence.
+ - subst. intros (stk0 & Hfut & _). apply non_terminal_closed in Hfut.
+ specialize (Hfut prod eq_refl).
+ destruct (ptlz_future_first ptlz0) as [Hfirst|[Hfirst Hnull]].
+ + destruct Hfut as [_ Hfut]. auto.
+ + destruct Hfut as [Hfut _]. by rewrite Hnull -Hfirst in Hfut. }
+ match goal with
+ | |- context [match ?X with Some H => _ | None => _ end] => destruct X eqn:EQ
+ end.
+ * by apply ptd_stack_compat_build_from_pt_rec.
+ * exists stk. destruct ptl=>//.
+ - intros Hstk Hfut. destruct ptl as [|?? ptl ?? pt]; [contradiction|].
+ specialize (ptd_stack_compat_build_from_pt_rec _ _ ptl). destruct ptl.
+ + eapply ptd_stack_compat_build_from_pt=>//. exists stk.
+ split; [|split]=>//; [].
+ by rewrite -ptlz_future_ptlz_prod rev_append_rev /rev' -rev_alt
+ rev_app_distr rev_involutive in Hfut.
+ + by apply ptd_stack_compat_build_from_pt_rec.
+Qed.
+
+Lemma ptd_stack_compat_build_from_ptl {symbs word}
+ (ptl : parse_tree_list symbs word) (ptlz : ptl_zipper symbs word)
+ (stk stk0: stack) :
+ ptlz_stack_compat stk0 ptlz ->
+ ptl_stack_compat stk0 ptl stk ->
+ state_has_future (state_of_stack init stk) (ptlz_prod ptlz)
+ (ptlz_future ptlz) (ptlz_lookahead ptlz) ->
+ ptd_stack_compat (build_pt_dot_from_ptl ptl ptlz) stk.
+Proof.
+ intros Hstk0 Hstk Hfut. destruct ptlz=>/=.
+ - eauto.
+ - apply ptd_stack_compat_build_from_pt=>/=. eauto.
+Qed.
+
+(** We can now proceed by proving that the invariant is preserved by
+ each step of parsing. We also prove that each step of parsing
+ follows next_ptd.
+
+ We start with reduce steps: *)
+Lemma reduce_step_next_ptd (prod : production) (word : list token)
+ (ptl : parse_tree_list (prod_rhs_rev prod) word)
+ (ptz : pt_zipper (NT (prod_lhs prod)) word)
+ (stk : stack)
+ Hval Hi :
+ ptd_stack_compat (Reduce_ptd ptl ptz) stk ->
+ match next_ptd (Reduce_ptd ptl ptz) with
+ | None =>
+ reduce_step init stk prod (ptz_buffer ptz) Hval Hi =
+ Accept_sr (ptd_sem (Reduce_ptd ptl ptz)) buffer_end
+ | Some ptd =>
+ exists stk',
+ reduce_step init stk prod (ptz_buffer ptz) Hval Hi =
+ Progress_sr stk' (ptd_buffer ptd) /\
+ ptd_stack_compat ptd stk'
+ end.
+Proof.
+ intros (stk0 & _ & Hstk & Hstk0).
+ apply pop_stack_compat_pop_spec with (action := prod_action prod) in Hstk.
+ rewrite <-pop_spec_ok with (Hp := reduce_step_subproof init stk prod Hval Hi) in Hstk.
+ unfold reduce_step.
+ match goal with
+ | |- context [pop_state_valid init ?A stk ?B ?C ?D ?E ?F] =>
+ generalize (pop_state_valid init A stk B C D E F)
+ end.
+ rewrite Hstk /=. intros Hv.
+ generalize (reduce_step_subproof1 init stk prod Hval stk0 (fun _ : True => Hv)).
+ clear Hval Hstk Hi Hv stk.
+ assert (Hgoto := fun fut prod' =>
+ non_terminal_goto (state_of_stack init stk0) prod' (NT (prod_lhs prod)::fut)).
+ simpl in Hgoto.
+ destruct goto_table as [[st Hst]|] eqn:Hgoto'.
+ - intros _.
+ assert (match ptz with Top_ptz => False | _ => True end).
+ { revert ptz Hst Hstk0 Hgoto'.
+ generalize (eq_refl (NT (prod_lhs prod))).
+ generalize (NT (prod_lhs prod)) at 1 3 5.
+ intros nt Hnt ptz. destruct ptz=>//. injection Hnt=> <- /= Hst -> /= Hg.
+ assert (Hsg := start_goto init). by rewrite Hg in Hsg. }
+ clear Hgoto'.
+
+ change (ptl_sem ptl (prod_action prod))
+ with (pt_sem (Non_terminal_pt prod ptl)).
+ generalize (Non_terminal_pt prod ptl). clear ptl.
+ destruct ptz as [|?? ptl ? ? ptlz]=>// pt.
+
+ subst=>/=. eexists _. split.
+ + f_equal. apply ptd_buffer_build_from_ptl.
+ + destruct Hstk0 as (stk0' & Hfut & Hstk0' & Hstk0).
+ apply (ptd_stack_compat_build_from_ptl _ _ _ stk0'); auto; [].
+ split=>//. by exists eq_refl.
+ - intros Hv. generalize (reduce_step_subproof0 _ prod _ (fun _ => Hv)).
+ intros EQnt. clear Hv Hgoto'.
+
+ change (ptl_sem ptl (prod_action prod))
+ with (pt_sem (Non_terminal_pt prod ptl)).
+ generalize (Non_terminal_pt prod ptl). clear ptl. destruct ptz.
+ + intros pt. f_equal. by rewrite cast_eq.
+ + edestruct Hgoto. eapply ptz_stack_compat_cons_state_has_future, Hstk0.
+Qed.
+
+Lemma step_next_ptd (ptd : pt_dot) (stk : stack) Hi :
+ ptd_stack_compat ptd stk ->
+ match next_ptd ptd with
+ | None =>
+ step safe init stk (ptd_buffer ptd) Hi =
+ Accept_sr (ptd_sem ptd) buffer_end
+ | Some ptd' =>
+ exists stk',
+ step safe init stk (ptd_buffer ptd) Hi =
+ Progress_sr stk' (ptd_buffer ptd') /\
+ ptd_stack_compat ptd' stk'
+ end.
+Proof.
+ intros Hstk. unfold step.
+ generalize (reduce_ok safe (state_of_stack init stk)).
+ destruct ptd as [prod word ptl ptz|tok symbs word ptl ptlz].
+ - assert (Hfut : state_has_future (state_of_stack init stk) prod []
+ (token_term (buf_head (ptz_buffer ptz)))).
+ { destruct Hstk as (? & ? & ?)=>//. }
+ assert (Hact := end_reduce _ _ _ _ Hfut).
+ destruct action_table as [?|awt]=>Hval /=.
+ + subst. by apply reduce_step_next_ptd.
+ + set (term := token_term (buf_head (ptz_buffer ptz))) in *.
+ generalize (Hval term). clear Hval. destruct (awt term)=>//. subst.
+ intros Hval. by apply reduce_step_next_ptd.
+ - destruct Hstk as (stk0 & Hfut & Hstk & Hstk0).
+ assert (Hact := terminal_shift _ _ _ _ Hfut). simpl in Hact. clear Hfut.
+ destruct action_table as [?|awt]=>//= /(_ (token_term tok)).
+ destruct awt as [st' EQ| |]=>// _. eexists. split.
+ + f_equal. rewrite -ptd_buffer_build_from_ptl //.
+ + apply (ptd_stack_compat_build_from_ptl _ _ _ stk0); simpl; eauto.
+Qed.
+
+(** We prove the completeness of the parser main loop. *)
+Lemma parse_fix_next_ptd_iter (ptd : pt_dot) (stk : stack) (log_n_steps : nat) Hi :
+ ptd_stack_compat ptd stk ->
+ match next_ptd_iter ptd log_n_steps with
+ | None =>
+ proj1_sig (parse_fix safe init stk (ptd_buffer ptd) log_n_steps Hi) =
+ Accept_sr (ptd_sem ptd) buffer_end
+ | Some ptd' =>
+ exists stk',
+ proj1_sig (parse_fix safe init stk (ptd_buffer ptd) log_n_steps Hi) =
+ Progress_sr stk' (ptd_buffer ptd') /\
+ ptd_stack_compat ptd' stk'
+ end.
+Proof.
+ revert ptd stk Hi.
+ induction log_n_steps as [|log_n_steps IH]; [by apply step_next_ptd|].
+ move => /= ptd stk Hi Hstk. assert (IH1 := IH ptd stk Hi Hstk).
+ assert (EQsem := sem_next_ptd_iter ptd log_n_steps).
+ destruct parse_fix as [sr Hi']. simpl in IH1.
+ destruct next_ptd_iter as [ptd'|].
+ - rewrite EQsem. destruct IH1 as (stk' & -> & Hstk'). by apply IH.
+ - by subst.
+Qed.
+
+(** The parser is defined by recursion over a fuel parameter. In the
+ completeness proof, we need to predict how much fuel is going to be
+ needed in order to prove that enough fuel gives rise to a successful
+ parsing.
+
+ To do so, of a dotted parse tree, which is the number of actions
+ left to be executed before complete parsing when the current state
+ is represented by the dotted parse tree. *)
+Fixpoint ptlz_cost {hole_symbs hole_word}
+ (ptlz:ptl_zipper hole_symbs hole_word) :=
+ match ptlz with
+ | Non_terminal_pt_ptlz ptz => ptz_cost ptz
+ | Cons_ptl_ptlz pt ptlz' => pt_size pt + ptlz_cost ptlz'
+ end
+with ptz_cost {hole_symb hole_word} (ptz:pt_zipper hole_symb hole_word) :=
+ match ptz with
+ | Top_ptz => 0
+ | Cons_ptl_ptz ptl ptlz' => 1 + ptlz_cost ptlz'
+ end.
+
+Definition ptd_cost (ptd:pt_dot) :=
+ match ptd with
+ | Reduce_ptd ptl ptz => ptz_cost ptz
+ | Shift_ptd _ ptl ptlz => 1 + ptlz_cost ptlz
+ end.
+
+Lemma ptd_cost_build_from_pt {symb word}
+ (pt : parse_tree symb word) (ptz : pt_zipper symb word) :
+ pt_size pt + ptz_cost ptz = S (ptd_cost (build_pt_dot_from_pt pt ptz))
+with ptd_cost_build_from_pt_rec {symbs word}
+ (ptl : parse_tree_list symbs word) (ptlz : ptl_zipper symbs word)
+ Hsymbs :
+ ptl_size ptl + ptlz_cost ptlz = ptd_cost (build_pt_dot_from_pt_rec ptl Hsymbs ptlz).
+Proof.
+ - destruct pt as [tok|prod word ptl']=>/=.
+ + revert ptz. generalize [tok]. generalize (token_sem tok). generalize I.
+ change True with (match T (token_term tok) with T _ => True | NT _ => False end) at 1.
+ generalize (T (token_term tok)) => symb HT sem word ptz. by destruct ptz.
+ + match goal with
+ | |- context [match ?X with Some H => _ | None => _ end] => destruct X eqn:EQ
+ end.
+ * rewrite -ptd_cost_build_from_pt_rec /= plus_n_Sm //.
+ * simpl. by destruct ptl'.
+ - destruct ptl as [|?? ptl ?? pt]; [contradiction|].
+ specialize (ptd_cost_build_from_pt_rec _ _ ptl). destruct ptl.
+ + apply eq_add_S. rewrite -ptd_cost_build_from_pt /=. ring.
+ + rewrite -ptd_cost_build_from_pt_rec //=. ring.
+Qed.
+
+Lemma ptd_cost_build_from_ptl {symbs word}
+ (ptl : parse_tree_list symbs word) (ptlz : ptl_zipper symbs word) :
+ ptlz_cost ptlz = ptd_cost (build_pt_dot_from_ptl ptl ptlz).
+Proof.
+ destruct ptlz=>//. apply eq_add_S. rewrite -ptd_cost_build_from_pt /=. ring.
+Qed.
+
+Lemma next_ptd_cost ptd:
+ match next_ptd ptd with
+ | None => ptd_cost ptd = 0
+ | Some ptd' => ptd_cost ptd = S (ptd_cost ptd')
+ end.
+Proof.
+ destruct ptd as [prod word ptl ptz|tok symbq wordq ptl ptlz] =>/=.
+ - generalize (Non_terminal_pt prod ptl). clear ptl.
+ destruct ptz as [|?? ptl ?? ptlz]=>// pt. by rewrite -ptd_cost_build_from_ptl.
+ - by rewrite -ptd_cost_build_from_ptl.
+Qed.
+
+Lemma next_ptd_iter_cost ptd log_n_steps :
+ match next_ptd_iter ptd log_n_steps with
+ | None => ptd_cost ptd < 2^log_n_steps
+ | Some ptd' => ptd_cost ptd = 2^log_n_steps + ptd_cost ptd'
+ end.
+Proof.
+ revert ptd. induction log_n_steps as [|log_n_steps IH]=>ptd /=.
+ - assert (Hptd := next_ptd_cost ptd). destruct next_ptd=>//. by rewrite Hptd.
+ - rewrite Nat.add_0_r. assert (IH1 := IH ptd). destruct next_ptd_iter as [ptd'|].
+ + specialize (IH ptd'). destruct next_ptd_iter as [ptd''|].
+ * by rewrite IH1 IH -!plus_assoc.
+ * rewrite IH1. by apply plus_lt_compat_l.
+ + by apply lt_plus_trans.
+Qed.
+
+(** We now prove the top-level parsing function. The only thing that
+ is left to be done is the initialization. To do so, we define the
+ initial dotted parse tree, depending on a full (top-level) parse tree. *)
+
+Variable full_pt : parse_tree (NT (start_nt init)) full_word.
+
+Theorem parse_complete log_n_steps:
+ match parse safe init (full_word ++ buffer_end) log_n_steps with
+ | Parsed_pr sem buff =>
+ sem = pt_sem full_pt /\ buff = buffer_end /\ pt_size full_pt <= 2^log_n_steps
+ | Timeout_pr => 2^log_n_steps < pt_size full_pt
+ | Fail_pr => False
+ end.
+Proof.
+ assert (Hstk : ptd_stack_compat (build_pt_dot_from_pt full_pt Top_ptz) []) by
+ by apply ptd_stack_compat_build_from_pt.
+ unfold parse.
+ assert (Hparse := parse_fix_next_ptd_iter _ _ log_n_steps (parse_subproof init) Hstk).
+ rewrite -ptd_buffer_build_from_pt -sem_build_from_pt /= in Hparse.
+ assert (Hcost := next_ptd_iter_cost (build_pt_dot_from_pt full_pt Top_ptz) log_n_steps).
+ destruct next_ptd_iter.
+ - destruct Hparse as (? & -> & ?). apply (f_equal S) in Hcost.
+ rewrite -ptd_cost_build_from_pt Nat.add_0_r in Hcost. rewrite Hcost.
+ apply le_lt_n_Sm, le_plus_l.
+ - rewrite Hparse. split; [|split]=>//. apply lt_le_S in Hcost.
+ by rewrite -ptd_cost_build_from_pt Nat.add_0_r in Hcost.
+Qed.
+
+End Completeness_Proof.
+
+End Make.
diff --git a/MenhirLib/Interpreter_correct.v b/MenhirLib/Interpreter_correct.v
new file mode 100644
index 00000000..1325f610
--- /dev/null
+++ b/MenhirLib/Interpreter_correct.v
@@ -0,0 +1,175 @@
+(****************************************************************************)
+(* *)
+(* Menhir *)
+(* *)
+(* Jacques-Henri Jourdan, CNRS, LRI, Université Paris Sud *)
+(* *)
+(* Copyright Inria. All rights reserved. This file is distributed under *)
+(* the terms of the GNU Lesser General Public License as published by the *)
+(* Free Software Foundation, either version 3 of the License, or (at your *)
+(* option) any later version, as described in the file LICENSE. *)
+(* *)
+(****************************************************************************)
+
+From Coq Require Import List Syntax.
+Require Import Alphabet.
+Require Grammar Automaton Interpreter.
+From Coq.ssr Require Import ssreflect.
+
+Module Make(Import A:Automaton.T) (Import Inter:Interpreter.T A).
+
+(** * Correctness of the interpreter **)
+
+(** We prove that, in any case, if the interpreter accepts returning a
+ semantic value, then this is a semantic value of the input **)
+
+Section Init.
+
+Variable init:initstate.
+
+(** [word_has_stack_semantics] relates a word with a stack, stating that the
+ word is a concatenation of words that have the semantic values stored in
+ the stack. **)
+Inductive word_has_stack_semantics:
+ forall (word:list token) (stack:stack), Prop :=
+ | Nil_stack_whss: word_has_stack_semantics [] []
+ | Cons_stack_whss:
+ forall (wordq:list token) (stackq:stack),
+ word_has_stack_semantics wordq stackq ->
+
+ forall (wordt:list token) (s:noninitstate)
+ (pt:parse_tree (last_symb_of_non_init_state s) wordt),
+
+ word_has_stack_semantics
+ (wordq++wordt) (existT noninitstate_type s (pt_sem pt)::stackq).
+
+(** [pop] preserves the invariant **)
+Lemma pop_spec_ptl A symbols_to_pop action word_stk stk (res : A) stk' :
+ pop_spec symbols_to_pop stk action stk' res ->
+ word_has_stack_semantics word_stk stk ->
+ exists word_stk' word_res (ptl:parse_tree_list symbols_to_pop word_res),
+ (word_stk' ++ word_res = word_stk)%list /\
+ word_has_stack_semantics word_stk' stk' /\
+ ptl_sem ptl action = res.
+Proof.
+ intros Hspec. revert word_stk.
+ induction Hspec as [stk sem|symbols_to_pop st stk action sem stk' res Hspec IH];
+ intros word_stk Hword_stk.
+ - exists word_stk, [], Nil_ptl. rewrite -app_nil_end. eauto.
+ - inversion Hword_stk. subst_existT.
+ edestruct IH as (word_stk' & word_res & ptl & ? & Hword_stk'' & ?); [eassumption|].
+ subst. eexists word_stk', (word_res ++ _)%list, (Cons_ptl ptl _).
+ split; [|split]=>//. rewrite app_assoc //.
+Qed.
+
+(** [reduce_step] preserves the invariant **)
+Lemma reduce_step_invariant (stk:stack) (prod:production) Hv Hi word buffer :
+ word_has_stack_semantics word stk ->
+ match reduce_step init stk prod buffer Hv Hi with
+ | Accept_sr sem buffer_new =>
+ exists pt : parse_tree (NT (start_nt init)) word,
+ buffer = buffer_new /\ pt_sem pt = sem
+ | Progress_sr stk' buffer_new =>
+ buffer = buffer_new /\ word_has_stack_semantics word stk'
+ | Fail_sr => True
+ end.
+Proof.
+ intros Hword_stk. unfold reduce_step.
+ match goal with
+ | |- context [pop_state_valid init ?stp stk ?x1 ?x2 ?x3 ?x4 ?x5] =>
+ generalize (pop_state_valid init stp stk x1 x2 x3 x4 x5)
+ end.
+ destruct pop as [stk' sem] eqn:Hpop=>/= Hv'.
+ apply pop_spec_ok in Hpop. apply pop_spec_ptl with (word_stk := word) in Hpop=>//.
+ destruct Hpop as (word1 & word2 & ptl & <- & Hword1 & <-).
+ generalize (reduce_step_subproof1 init stk prod Hv stk' (fun _ : True => Hv')).
+ destruct goto_table as [[st' EQ]|].
+ - intros _. split=>//.
+ change (ptl_sem ptl (prod_action prod)) with (pt_sem (Non_terminal_pt prod ptl)).
+ generalize (Non_terminal_pt prod ptl). rewrite ->EQ. intros pt. by constructor.
+ - intros Hstk'. destruct Hword1; [|by destruct Hstk'].
+ generalize (reduce_step_subproof0 init prod [] (fun _ : True => Hstk')).
+ simpl in Hstk'. rewrite -Hstk' // => EQ. rewrite cast_eq.
+ exists (Non_terminal_pt prod ptl). by split.
+Qed.
+
+(** [step] preserves the invariant **)
+Lemma step_invariant stk word buffer safe Hi :
+ word_has_stack_semantics word stk ->
+ match step safe init stk buffer Hi with
+ | Accept_sr sem buffer_new =>
+ exists word_new (pt:parse_tree (NT (start_nt init)) word_new),
+ (word ++ buffer = word_new ++ buffer_new)%buf /\
+ pt_sem pt = sem
+ | Progress_sr stk_new buffer_new =>
+ exists word_new,
+ (word ++ buffer = word_new ++ buffer_new)%buf /\
+ word_has_stack_semantics word_new stk_new
+ | Fail_sr => True
+ end.
+Proof.
+ intros Hword_stk. unfold step.
+ generalize (reduce_ok safe (state_of_stack init stk)).
+ destruct action_table as [prod|awt].
+ - intros Hv.
+ apply (reduce_step_invariant stk prod (fun _ => Hv) Hi word buffer) in Hword_stk.
+ destruct reduce_step=>//.
+ + destruct Hword_stk as (pt & <- & <-); eauto.
+ + destruct Hword_stk as [<- ?]; eauto.
+ - destruct buffer as [tok buffer]=>/=.
+ move=> /(_ (token_term tok)) Hv. destruct (awt (token_term tok)) as [st EQ|prod|]=>//.
+ + eexists _. split; [by apply app_buf_assoc with (l2 := [_])|].
+ change (token_sem tok) with (pt_sem (Terminal_pt tok)).
+ generalize (Terminal_pt tok). generalize [tok].
+ rewrite -> EQ=>word' pt /=. by constructor.
+ + apply (reduce_step_invariant stk prod (fun _ => Hv) Hi word (tok::buffer))
+ in Hword_stk.
+ destruct reduce_step=>//.
+ * destruct Hword_stk as (pt & <- & <-); eauto.
+ * destruct Hword_stk as [<- ?]; eauto.
+Qed.
+
+(** [step] preserves the invariant **)
+Lemma parse_fix_invariant stk word buffer safe log_n_steps Hi :
+ word_has_stack_semantics word stk ->
+ match proj1_sig (parse_fix safe init stk buffer log_n_steps Hi) with
+ | Accept_sr sem buffer_new =>
+ exists word_new (pt:parse_tree (NT (start_nt init)) word_new),
+ (word ++ buffer = word_new ++ buffer_new)%buf /\
+ pt_sem pt = sem
+ | Progress_sr stk_new buffer_new =>
+ exists word_new,
+ (word ++ buffer = word_new ++ buffer_new)%buf /\
+ word_has_stack_semantics word_new stk_new
+ | Fail_sr => True
+ end.
+Proof.
+ revert stk word buffer Hi.
+ induction log_n_steps as [|log_n_steps IH]=>/= stk word buffer Hi Hstk;
+ [by apply step_invariant|].
+ assert (IH1 := IH stk word buffer Hi Hstk).
+ destruct parse_fix as [[] Hi']=>/=; try by apply IH1.
+ destruct IH1 as (word' & -> & Hstk')=>//. by apply IH.
+Qed.
+
+(** The interpreter is correct : if it returns a semantic value, then the input
+ word has this semantic value.
+**)
+Theorem parse_correct safe buffer log_n_steps:
+ match parse safe init buffer log_n_steps with
+ | Parsed_pr sem buffer_new =>
+ exists word_new (pt:parse_tree (NT (start_nt init)) word_new),
+ buffer = (word_new ++ buffer_new)%buf /\
+ pt_sem pt = sem
+ | _ => True
+ end.
+Proof.
+ unfold parse.
+ assert (Hparse := parse_fix_invariant [] [] buffer safe log_n_steps
+ (parse_subproof init)).
+ destruct proj1_sig=>//. apply Hparse. constructor.
+Qed.
+
+End Init.
+
+End Make.
diff --git a/MenhirLib/Main.v b/MenhirLib/Main.v
new file mode 100644
index 00000000..f6158074
--- /dev/null
+++ b/MenhirLib/Main.v
@@ -0,0 +1,79 @@
+(****************************************************************************)
+(* *)
+(* Menhir *)
+(* *)
+(* Jacques-Henri Jourdan, CNRS, LRI, Université Paris Sud *)
+(* *)
+(* Copyright Inria. All rights reserved. This file is distributed under *)
+(* the terms of the GNU Lesser General Public License as published by the *)
+(* Free Software Foundation, either version 3 of the License, or (at your *)
+(* option) any later version, as described in the file LICENSE. *)
+(* *)
+(****************************************************************************)
+
+Require Grammar Automaton Interpreter_correct Interpreter_complete.
+From Coq Require Import Syntax Arith.
+
+Module Make(Export Aut:Automaton.T).
+Export Aut.Gram.
+Export Aut.GramDefs.
+
+Module Import Inter := Interpreter.Make Aut.
+Module Correct := Interpreter_correct.Make Aut Inter.
+Module Complete := Interpreter_complete.Make Aut Inter.
+
+Definition complete_validator:unit->bool := Complete.Valid.is_complete.
+Definition safe_validator:unit->bool := ValidSafe.is_safe.
+Definition parse (safe:safe_validator ()=true) init log_n_steps buffer :
+ parse_result (symbol_semantic_type (NT (start_nt init))):=
+ parse (ValidSafe.safe_is_validator safe) init buffer log_n_steps.
+
+(** Correction theorem. **)
+Theorem parse_correct
+ (safe:safe_validator ()= true) init log_n_steps buffer:
+ match parse safe init log_n_steps buffer with
+ | Parsed_pr sem buffer_new =>
+ exists word (pt : parse_tree (NT (start_nt init)) word),
+ buffer = (word ++ buffer_new)%buf /\
+ pt_sem pt = sem
+ | _ => True
+ end.
+Proof. apply Correct.parse_correct. Qed.
+
+(** Completeness theorem. **)
+Theorem parse_complete
+ (safe:safe_validator () = true) init log_n_steps word buffer_end:
+ complete_validator () = true ->
+ forall tree:parse_tree (NT (start_nt init)) word,
+ match parse safe init log_n_steps (word ++ buffer_end) with
+ | Fail_pr => False
+ | Parsed_pr sem_res buffer_end_res =>
+ sem_res = pt_sem tree /\ buffer_end_res = buffer_end /\
+ pt_size tree <= 2^log_n_steps
+ | Timeout_pr => 2^log_n_steps < pt_size tree
+ end.
+Proof.
+ intros. now apply Complete.parse_complete, Complete.Valid.complete_is_validator.
+Qed.
+
+(** Unambiguity theorem. **)
+Theorem unambiguity:
+ safe_validator () = true -> complete_validator () = true -> inhabited token ->
+ forall init word,
+ forall (tree1 tree2:parse_tree (NT (start_nt init)) word),
+ pt_sem tree1 = pt_sem tree2.
+Proof.
+ intros Hsafe Hcomp [tok] init word tree1 tree2.
+ pose (buf_end := cofix buf_end := (tok :: buf_end)%buf).
+ assert (Hcomp1 := parse_complete Hsafe init (pt_size tree1) word buf_end
+ Hcomp tree1).
+ assert (Hcomp2 := parse_complete Hsafe init (pt_size tree1) word buf_end
+ Hcomp tree2).
+ destruct parse.
+ - destruct Hcomp1.
+ - exfalso. eapply PeanoNat.Nat.lt_irrefl. etransitivity; [|apply Hcomp1].
+ eapply Nat.pow_gt_lin_r. constructor.
+ - destruct Hcomp1 as [-> _], Hcomp2 as [-> _]. reflexivity.
+Qed.
+
+End Make.
diff --git a/MenhirLib/Validator_classes.v b/MenhirLib/Validator_classes.v
new file mode 100644
index 00000000..c043cb73
--- /dev/null
+++ b/MenhirLib/Validator_classes.v
@@ -0,0 +1,74 @@
+(****************************************************************************)
+(* *)
+(* Menhir *)
+(* *)
+(* Jacques-Henri Jourdan, CNRS, LRI, Université Paris Sud *)
+(* *)
+(* Copyright Inria. All rights reserved. This file is distributed under *)
+(* the terms of the GNU Lesser General Public License as published by the *)
+(* Free Software Foundation, either version 3 of the License, or (at your *)
+(* option) any later version, as described in the file LICENSE. *)
+(* *)
+(****************************************************************************)
+
+From Coq Require Import List.
+From Coq.ssr Require Import ssreflect.
+Require Import Alphabet.
+
+Class IsValidator (P : Prop) (b : bool) :=
+ is_validator : b = true -> P.
+Hint Mode IsValidator + -.
+
+Instance is_validator_true : IsValidator True true.
+Proof. done. Qed.
+
+Instance is_validator_false : IsValidator False false.
+Proof. done. Qed.
+
+Instance is_validator_eq_true b :
+ IsValidator (b = true) b.
+Proof. done. Qed.
+
+Instance is_validator_and P1 b1 P2 b2 `{IsValidator P1 b1} `{IsValidator P2 b2}:
+ IsValidator (P1 /\ P2) (if b1 then b2 else false).
+Proof. split; destruct b1, b2; auto using is_validator. Qed.
+
+Instance is_validator_comparable_leibniz_eq A (C:Comparable A) (x y : A) :
+ ComparableLeibnizEq C ->
+ IsValidator (x = y) (compare_eqb x y).
+Proof. intros ??. by apply compare_eqb_iff. Qed.
+
+Instance is_validator_comparable_eq_impl A `(Comparable A) (x y : A) P b :
+ IsValidator P b ->
+ IsValidator (x = y -> P) (if compare_eqb x y then b else true).
+Proof.
+ intros Hval Val ->. rewrite /compare_eqb compare_refl in Val. auto.
+Qed.
+
+Lemma is_validator_forall_finite A P b `(Finite A) :
+ (forall (x : A), IsValidator (P x) (b x)) ->
+ IsValidator (forall (x : A), P x) (forallb b all_list).
+Proof.
+ move=> ? /forallb_forall.
+ auto using all_list_forall, is_validator, forallb_forall.
+Qed.
+(* We do not use an instance directly here, because we need somehow to
+ force Coq to instantiate b with a lambda. *)
+Hint Extern 2 (IsValidator (forall x : ?A, _) _) =>
+ eapply (is_validator_forall_finite _ _ (fun (x:A) => _))
+ : typeclass_instances.
+
+(* Hint for synthetizing pattern-matching. *)
+Hint Extern 2 (IsValidator (match ?u with _ => _ end) ?b0) =>
+ let b := fresh "b" in
+ unshelve notypeclasses refine (let b : bool := _ in _);
+ [destruct u; intros; shelve|]; (* Synthetize `match .. with` in the validator. *)
+ unify b b0;
+ unfold b; destruct u; clear b
+ : typeclass_instances.
+
+(* Hint for unfolding definitions. This is necessary because many
+ hints for IsValidator use [Hint Extern], which do not automatically
+ unfold identifiers. *)
+Hint Extern 100 (IsValidator ?X _) => unfold X
+ : typeclass_instances.
diff --git a/MenhirLib/Validator_complete.v b/MenhirLib/Validator_complete.v
new file mode 100644
index 00000000..ebb74500
--- /dev/null
+++ b/MenhirLib/Validator_complete.v
@@ -0,0 +1,394 @@
+(****************************************************************************)
+(* *)
+(* Menhir *)
+(* *)
+(* Jacques-Henri Jourdan, CNRS, LRI, Université Paris Sud *)
+(* *)
+(* Copyright Inria. All rights reserved. This file is distributed under *)
+(* the terms of the GNU Lesser General Public License as published by the *)
+(* Free Software Foundation, either version 3 of the License, or (at your *)
+(* option) any later version, as described in the file LICENSE. *)
+(* *)
+(****************************************************************************)
+
+From Coq Require Import List Syntax Derive.
+From Coq.ssr Require Import ssreflect.
+Require Automaton.
+Require Import Alphabet Validator_classes.
+
+Module Make(Import A:Automaton.T).
+
+(** We instantiate some sets/map. **)
+Module TerminalComparableM <: ComparableM.
+ Definition t := terminal.
+ Instance tComparable : Comparable t := _.
+End TerminalComparableM.
+Module TerminalOrderedType := OrderedType_from_ComparableM TerminalComparableM.
+Module StateProdPosComparableM <: ComparableM.
+ Definition t := (state*production*nat)%type.
+ Instance tComparable : Comparable t := _.
+End StateProdPosComparableM.
+Module StateProdPosOrderedType :=
+ OrderedType_from_ComparableM StateProdPosComparableM.
+
+Module TerminalSet := FSetAVL.Make TerminalOrderedType.
+Module StateProdPosMap := FMapAVL.Make StateProdPosOrderedType.
+
+(** Nullable predicate for symbols and list of symbols. **)
+Definition nullable_symb (symbol:symbol) :=
+ match symbol with
+ | NT nt => nullable_nterm nt
+ | _ => false
+ end.
+
+Definition nullable_word (word:list symbol) :=
+ forallb nullable_symb word.
+
+(** First predicate for non terminal, symbols and list of symbols, given as FSets. **)
+Definition first_nterm_set (nterm:nonterminal) :=
+ fold_left (fun acc t => TerminalSet.add t acc)
+ (first_nterm nterm) TerminalSet.empty.
+
+Definition first_symb_set (symbol:symbol) :=
+ match symbol with
+ | NT nt => first_nterm_set nt
+ | T t => TerminalSet.singleton t
+ end.
+
+Fixpoint first_word_set (word:list symbol) :=
+ match word with
+ | [] => TerminalSet.empty
+ | t::q =>
+ if nullable_symb t then
+ TerminalSet.union (first_symb_set t) (first_word_set q)
+ else
+ first_symb_set t
+ end.
+
+(** Small helper for finding the part of an item that is after the dot. **)
+Definition future_of_prod prod dot_pos : list symbol :=
+ (fix loop n lst :=
+ match n with
+ | O => lst
+ | S x => match loop x lst with [] => [] | _::q => q end
+ end)
+ dot_pos (rev' (prod_rhs_rev prod)).
+
+(** We build a fast map to store all the items of all the states. **)
+Definition items_map (_:unit): StateProdPosMap.t TerminalSet.t :=
+ fold_left (fun acc state =>
+ fold_left (fun acc item =>
+ let key := (state, prod_item item, dot_pos_item item) in
+ let data := fold_left (fun acc t => TerminalSet.add t acc)
+ (lookaheads_item item) TerminalSet.empty
+ in
+ let old :=
+ match StateProdPosMap.find key acc with
+ | Some x => x | None => TerminalSet.empty
+ end
+ in
+ StateProdPosMap.add key (TerminalSet.union data old) acc
+ ) (items_of_state state) acc
+ ) all_list (StateProdPosMap.empty TerminalSet.t).
+
+(** We need to avoid computing items_map each time we need it. To that
+ purpose, we declare a typeclass specifying that some map is equal to
+ items_map. *)
+Class IsItemsMap m := is_items_map : m = items_map ().
+
+(** Accessor. **)
+Definition find_items_map items_map state prod dot_pos : TerminalSet.t :=
+ match StateProdPosMap.find (state, prod, dot_pos) items_map with
+ | None => TerminalSet.empty
+ | Some x => x
+ end.
+
+Definition state_has_future state prod (fut:list symbol) (lookahead:terminal) :=
+ exists dot_pos:nat,
+ fut = future_of_prod prod dot_pos /\
+ TerminalSet.In lookahead (find_items_map (items_map ()) state prod dot_pos).
+
+(** Iterator over items. **)
+Definition forallb_items items_map (P:state -> production -> nat -> TerminalSet.t -> bool): bool:=
+ StateProdPosMap.fold (fun key set acc =>
+ match key with (st, p, pos) => (acc && P st p pos set)%bool end
+ ) items_map true.
+
+(** Typeclass instances for synthetizing the validator. *)
+
+Instance is_validator_subset S1 S2 :
+ IsValidator (TerminalSet.Subset S1 S2) (TerminalSet.subset S1 S2).
+Proof. intros ?. by apply TerminalSet.subset_2. Qed.
+
+(* While the specification of the validator always quantify over
+ possible lookahead tokens individually, the validator usually
+ handles lookahead sets directly instead, for better performances.
+
+ For instance, the validator for [state_has_future], which speaks
+ about one single lookahead token is a subset operation:
+*)
+Lemma is_validator_state_has_future_subset st prod pos lookahead lset im fut :
+ TerminalSet.In lookahead lset ->
+ fut = future_of_prod prod pos ->
+ IsItemsMap im ->
+ IsValidator (state_has_future st prod fut lookahead)
+ (TerminalSet.subset lset (find_items_map im st prod pos)).
+Proof.
+ intros ? -> -> HSS%TerminalSet.subset_2. exists pos. split=>//. by apply HSS.
+Qed.
+(* We do not declare this lemma as an instance, and use [Hint Extern]
+ instead, because the typeclass mechanism has trouble instantiating
+ some evars if we do not explicitely call [eassumption]. *)
+Hint Extern 2 (IsValidator (state_has_future _ _ _ _) _) =>
+ eapply is_validator_state_has_future_subset; [eassumption|eassumption || reflexivity|]
+: typeclass_instances.
+
+(* As said previously, we manipulate lookahead terminal sets instead of
+ lookahead individually. Hence, when we quantify over a lookahead set
+ in the specification, we do not do anything in the executable
+ validator.
+
+ This instance is used for [non_terminal_closed]. *)
+Instance is_validator_forall_lookahead_set lset P b:
+ (forall lookahead, TerminalSet.In lookahead lset -> IsValidator (P lookahead) b) ->
+ IsValidator (forall lookahead, TerminalSet.In lookahead lset -> P lookahead) b.
+Proof. unfold IsValidator. firstorder. Qed.
+
+
+(* Dually, we sometimes still need to explicitelly iterate over a
+ lookahead set. This is what this lemma allows.
+ Used only in [end_reduce]. *)
+Lemma is_validator_iterate_lset P b lookahead lset :
+ TerminalSet.In lookahead lset ->
+ IsValidator P (b lookahead) ->
+ IsValidator P (TerminalSet.fold (fun lookahead acc =>
+ if acc then b lookahead else false) lset true).
+Proof.
+ intros Hlset%TerminalSet.elements_1 Hval Val. apply Hval.
+ revert Val. rewrite TerminalSet.fold_1. generalize true at 1. clear -Hlset.
+ induction Hlset as [? l <-%compare_eq|? l ? IH]=> /= b' Val.
+ - destruct (b lookahead). by destruct b'. exfalso. by induction l; destruct b'.
+ - eauto.
+Qed.
+Hint Extern 100 (IsValidator _ _) =>
+ match goal with
+ | H : TerminalSet.In ?lookahead ?lset |- _ =>
+ eapply (is_validator_iterate_lset _ (fun lookahead => _) _ _ H); clear H
+ end
+: typeclass_instances.
+
+(* We often quantify over all the items of all the states of the
+ automaton. This lemma and the accompanying [Hint Resolve]
+ declaration allow generating the corresponding executable
+ validator.
+
+ Note that it turns out that, in all the uses of this pattern, the
+ first thing we do for each item is pattern-matching over the
+ future. This lemma also embbed this pattern-matching, which makes
+ it possible to get the hypothesis [fut' = future_of_prod prod (S pos)]
+ in the non-nil branch.
+
+ Moreover, note, again, that while the specification quantifies over
+ lookahead terminals individually, the code provides lookahead sets
+ instead. *)
+Lemma is_validator_forall_items P1 b1 P2 b2 im :
+ IsItemsMap im ->
+
+ (forall st prod lookahead lset pos,
+ TerminalSet.In lookahead lset ->
+ [] = future_of_prod prod pos ->
+ IsValidator (P1 st prod lookahead) (b1 st prod pos lset)) ->
+
+ (forall st prod pos lookahead lset s fut',
+ TerminalSet.In lookahead lset ->
+ fut' = future_of_prod prod (S pos) ->
+ IsValidator (P2 st prod lookahead s fut') (b2 st prod pos lset s fut')) ->
+
+ IsValidator (forall st prod fut lookahead,
+ state_has_future st prod fut lookahead ->
+ match fut with
+ | [] => P1 st prod lookahead
+ | s :: fut' => P2 st prod lookahead s fut'
+ end)
+ (forallb_items im (fun st prod pos lset =>
+ match future_of_prod prod pos with
+ | [] => b1 st prod pos lset
+ | s :: fut' => b2 st prod pos lset s fut'
+ end)).
+Proof.
+ intros -> Hval1 Hval2 Val st prod fut lookahead (pos & -> & Hlookahead).
+ rewrite /forallb_items StateProdPosMap.fold_1 in Val.
+ assert (match future_of_prod prod pos with
+ | [] => b1 st prod pos (find_items_map (items_map ()) st prod pos)
+ | s :: fut' => b2 st prod pos (find_items_map (items_map ()) st prod pos) s fut'
+ end = true).
+ - unfold find_items_map in *.
+ assert (Hfind := @StateProdPosMap.find_2 _ (items_map ()) (st, prod, pos)).
+ destruct StateProdPosMap.find as [lset|]; [|by edestruct (TerminalSet.empty_1); eauto].
+ specialize (Hfind _ eq_refl). apply StateProdPosMap.elements_1 in Hfind.
+ revert Val. generalize true at 1.
+ induction Hfind as [[? ?] l [?%compare_eq ?]|??? IH]=>?.
+ + simpl in *; subst.
+ match goal with |- _ -> ?X = true => destruct X end; [done|].
+ rewrite Bool.andb_false_r. clear. induction l as [|[[[??]?]?] l IH]=>//.
+ + apply IH.
+ - destruct future_of_prod eqn:EQ. by eapply Hval1; eauto.
+ eapply Hval2 with (pos := pos); eauto; [].
+ revert EQ. unfold future_of_prod=>-> //.
+Qed.
+(* We need a hint for expplicitely instantiating b1 and b2 with lambdas. *)
+Hint Extern 0 (IsValidator
+ (forall st prod fut lookahead,
+ state_has_future st prod fut lookahead -> _)
+ _) =>
+ eapply (is_validator_forall_items _ (fun st prod pos lset => _)
+ _ (fun st prod pos lset s fut' => _))
+ : typeclass_instances.
+
+(* Used in [start_future] only. *)
+Instance is_validator_forall_state_has_future im st prod :
+ IsItemsMap im ->
+ IsValidator
+ (forall look, state_has_future st prod (rev' (prod_rhs_rev prod)) look)
+ (let lookaheads := find_items_map im st prod 0 in
+ forallb (fun t => TerminalSet.mem t lookaheads) all_list).
+Proof.
+ move=> -> /forallb_forall Val look.
+ specialize (Val look (all_list_forall _)). exists 0. split=>//.
+ by apply TerminalSet.mem_2.
+Qed.
+
+(** * Validation for completeness **)
+
+(** The nullable predicate is a fixpoint : it is correct. **)
+Definition nullable_stable :=
+ forall p:production,
+ if nullable_word (prod_rhs_rev p) then
+ nullable_nterm (prod_lhs p) = true
+ else True.
+
+(** The first predicate is a fixpoint : it is correct. **)
+Definition first_stable:=
+ forall (p:production),
+ TerminalSet.Subset (first_word_set (rev' (prod_rhs_rev p)))
+ (first_nterm_set (prod_lhs p)).
+
+(** The initial state has all the S=>.u items, where S is the start non-terminal **)
+Definition start_future :=
+ forall (init:initstate) (p:production),
+ prod_lhs p = start_nt init ->
+ forall (t:terminal),
+ state_has_future init p (rev' (prod_rhs_rev p)) t.
+
+(** If a state contains an item of the form A->_.av[[b]], where a is a
+ terminal, then reading an a does a [Shift_act], to a state containing
+ an item of the form A->_.v[[b]]. **)
+Definition terminal_shift :=
+ forall (s1:state) prod fut lookahead,
+ state_has_future s1 prod fut lookahead ->
+ match fut with
+ | T t::q =>
+ match action_table s1 with
+ | Lookahead_act awp =>
+ match awp t with
+ | Shift_act s2 _ =>
+ state_has_future s2 prod q lookahead
+ | _ => False
+ end
+ | _ => False
+ end
+ | _ => True
+ end.
+
+(** If a state contains an item of the form A->_.[[a]], then either we do a
+ [Default_reduce_act] of the corresponding production, either a is a
+ terminal (ie. there is a lookahead terminal), and reading a does a
+ [Reduce_act] of the corresponding production. **)
+Definition end_reduce :=
+ forall (s:state) prod fut lookahead,
+ state_has_future s prod fut lookahead ->
+ match fut with
+ | [] =>
+ match action_table s with
+ | Default_reduce_act p => p = prod
+ | Lookahead_act awt =>
+ match awt lookahead with
+ | Reduce_act p => p = prod
+ | _ => False
+ end
+ end
+ | _ => True
+ end.
+
+Definition is_end_reduce items_map :=
+ forallb_items items_map (fun s prod pos lset =>
+ match future_of_prod prod pos with
+ | [] =>
+ match action_table s with
+ | Default_reduce_act p => compare_eqb p prod
+ | Lookahead_act awt =>
+ TerminalSet.fold (fun lookahead acc =>
+ match awt lookahead with
+ | Reduce_act p => (acc && compare_eqb p prod)%bool
+ | _ => false
+ end) lset true
+ end
+ | _ => true
+ end).
+
+(** If a state contains an item of the form A->_.Bv[[b]], where B is a
+ non terminal, then the goto table says we have to go to a state containing
+ an item of the form A->_.v[[b]]. **)
+Definition non_terminal_goto :=
+ forall (s1:state) prod fut lookahead,
+ state_has_future s1 prod fut lookahead ->
+ match fut with
+ | NT nt::q =>
+ match goto_table s1 nt with
+ | Some (exist _ s2 _) =>
+ state_has_future s2 prod q lookahead
+ | None => False
+ end
+ | _ => True
+ end.
+
+Definition start_goto :=
+ forall (init:initstate),
+ match goto_table init (start_nt init) with
+ | None => True
+ | Some _ => False
+ end.
+
+(** Closure property of item sets : if a state contains an item of the form
+ A->_.Bv[[b]], then for each production B->u and each terminal a of
+ first(vb), the state contains an item of the form B->_.u[[a]] **)
+Definition non_terminal_closed :=
+ forall s1 prod fut lookahead,
+ state_has_future s1 prod fut lookahead ->
+ match fut with
+ | NT nt::q =>
+ forall p, prod_lhs p = nt ->
+ (if nullable_word q then
+ state_has_future s1 p (future_of_prod p 0) lookahead
+ else True) /\
+ (forall lookahead2,
+ TerminalSet.In lookahead2 (first_word_set q) ->
+ state_has_future s1 p (future_of_prod p 0) lookahead2)
+ | _ => True
+ end.
+
+(** The automaton is complete **)
+Definition complete :=
+ nullable_stable /\ first_stable /\ start_future /\ terminal_shift
+ /\ end_reduce /\ non_terminal_goto /\ start_goto /\ non_terminal_closed.
+
+Derive is_complete_0
+SuchThat (forall im, IsItemsMap im -> IsValidator complete (is_complete_0 im))
+As complete_0_is_validator.
+Proof. intros im. subst is_complete_0. instantiate (1:=fun im => _). apply _. Qed.
+
+Definition is_complete (_:unit) := is_complete_0 (items_map ()).
+Lemma complete_is_validator : IsValidator complete (is_complete ()).
+Proof. by apply complete_0_is_validator. Qed.
+
+End Make.
diff --git a/MenhirLib/Validator_safe.v b/MenhirLib/Validator_safe.v
new file mode 100644
index 00000000..2d2ea4b3
--- /dev/null
+++ b/MenhirLib/Validator_safe.v
@@ -0,0 +1,234 @@
+(****************************************************************************)
+(* *)
+(* Menhir *)
+(* *)
+(* Jacques-Henri Jourdan, CNRS, LRI, Université Paris Sud *)
+(* *)
+(* Copyright Inria. All rights reserved. This file is distributed under *)
+(* the terms of the GNU Lesser General Public License as published by the *)
+(* Free Software Foundation, either version 3 of the License, or (at your *)
+(* option) any later version, as described in the file LICENSE. *)
+(* *)
+(****************************************************************************)
+
+From Coq Require Import List Syntax Derive.
+From Coq.ssr Require Import ssreflect.
+Require Automaton.
+Require Import Alphabet Validator_classes.
+
+Module Make(Import A:Automaton.T).
+
+(** The singleton predicate for states **)
+Definition singleton_state_pred (state:state) :=
+ (fun state' => match compare state state' with Eq => true |_ => false end).
+
+(** [past_state_of_non_init_state], extended for all states. **)
+Definition past_state_of_state (state:state) :=
+ match state with
+ | Init _ => []
+ | Ninit nis => past_state_of_non_init_state nis
+ end.
+
+(** Concatenations of last and past **)
+Definition head_symbs_of_state (state:state) :=
+ match state with
+ | Init _ => []
+ | Ninit s =>
+ last_symb_of_non_init_state s::past_symb_of_non_init_state s
+ end.
+Definition head_states_of_state (state:state) :=
+ singleton_state_pred state::past_state_of_state state.
+
+(** * Validation for correctness **)
+
+(** Prefix predicate between two lists of symbols. **)
+Inductive prefix: list symbol -> list symbol -> Prop :=
+| prefix_nil: forall l, prefix [] l
+| prefix_cons: forall l1 l2 x, prefix l1 l2 -> prefix (x::l1) (x::l2).
+
+(** [prefix] is transitive **)
+Lemma prefix_trans:
+ forall (l1 l2 l3:list symbol), prefix l1 l2 -> prefix l2 l3 -> prefix l1 l3.
+Proof.
+ intros l1 l2 l3 H1 H2. revert l3 H2.
+ induction H1; [now constructor|]. inversion 1. subst. constructor. eauto.
+Qed.
+
+Fixpoint is_prefix (l1 l2:list symbol) :=
+ match l1, l2 with
+ | [], _ => true
+ | t1::q1, t2::q2 => (compare_eqb t1 t2 && is_prefix q1 q2)%bool
+ | _::_, [] => false
+ end.
+
+Instance prefix_is_validator l1 l2 : IsValidator (prefix l1 l2) (is_prefix l1 l2).
+Proof.
+ revert l2. induction l1 as [|x1 l1 IH]=>l2 Hpref.
+ - constructor.
+ - destruct l2 as [|x2 l2]=>//.
+ move: Hpref=> /andb_prop [/compare_eqb_iff -> /IH ?]. by constructor.
+Qed.
+
+(** If we shift, then the known top symbols of the destination state is
+ a prefix of the known top symbols of the source state, with the new
+ symbol added. **)
+Definition shift_head_symbs :=
+ forall s,
+ match action_table s with
+ | Lookahead_act awp => forall t,
+ match awp t with
+ | Shift_act s2 _ =>
+ prefix (past_symb_of_non_init_state s2) (head_symbs_of_state s)
+ | _ => True
+ end
+ | _ => True
+ end.
+
+(** When a goto happens, then the known top symbols of the destination state
+ is a prefix of the known top symbols of the source state, with the new
+ symbol added. **)
+Definition goto_head_symbs :=
+ forall s nt,
+ match goto_table s nt with
+ | Some (exist _ s2 _) =>
+ prefix (past_symb_of_non_init_state s2) (head_symbs_of_state s)
+ | None => True
+ end.
+
+(** We have to say the same kind of checks for the assumptions about the
+ states stack. However, theses assumptions are predicates. So we define
+ a notion of "prefix" over predicates lists, that means, basically, that
+ an assumption entails another **)
+Inductive prefix_pred: list (state->bool) -> list (state->bool) -> Prop :=
+ | prefix_pred_nil: forall l, prefix_pred [] l
+ | prefix_pred_cons: forall l1 l2 f1 f2,
+ (forall x, implb (f2 x) (f1 x) = true) ->
+ prefix_pred l1 l2 -> prefix_pred (f1::l1) (f2::l2).
+
+(** [prefix_pred] is transitive **)
+Lemma prefix_pred_trans:
+ forall (l1 l2 l3:list (state->bool)),
+ prefix_pred l1 l2 -> prefix_pred l2 l3 -> prefix_pred l1 l3.
+Proof.
+ intros l1 l2 l3 H1 H2. revert l3 H2.
+ induction H1 as [|l1 l2 f1 f2 Hf2f1]; [now constructor|].
+ intros l3. inversion 1 as [|??? f3 Hf3f2]. subst. constructor; [|now eauto].
+ intros x. specialize (Hf3f2 x). specialize (Hf2f1 x).
+ repeat destruct (_ x); auto.
+Qed.
+
+Fixpoint is_prefix_pred (l1 l2:list (state->bool)) :=
+ match l1, l2 with
+ | [], _ => true
+ | f1::q1, f2::q2 =>
+ (forallb (fun x => implb (f2 x) (f1 x)) all_list
+ && is_prefix_pred q1 q2)%bool
+ | _::_, [] => false
+ end.
+
+Instance prefix_pred_is_validator l1 l2 :
+ IsValidator (prefix_pred l1 l2) (is_prefix_pred l1 l2).
+Proof.
+ revert l2. induction l1 as [|x1 l1 IH]=>l2 Hpref.
+ - constructor.
+ - destruct l2 as [|x2 l2]=>//.
+ move: Hpref=> /andb_prop [/forallb_forall ? /IH ?].
+ constructor; auto using all_list_forall.
+Qed.
+
+(** The assumptions about state stack is conserved when we shift **)
+Definition shift_past_state :=
+ forall s,
+ match action_table s with
+ | Lookahead_act awp => forall t,
+ match awp t with
+ | Shift_act s2 _ =>
+ prefix_pred (past_state_of_non_init_state s2)
+ (head_states_of_state s)
+ | _ => True
+ end
+ | _ => True
+ end.
+
+(** The assumptions about state stack is conserved when we do a goto **)
+Definition goto_past_state :=
+ forall s nt,
+ match goto_table s nt with
+ | Some (exist _ s2 _) =>
+ prefix_pred (past_state_of_non_init_state s2)
+ (head_states_of_state s)
+ | None => True
+ end.
+
+(** What states are possible after having popped these symbols from the
+ stack, given the annotation of the current state ? **)
+Inductive state_valid_after_pop (s:state):
+ list symbol -> list (state -> bool) -> Prop :=
+ | state_valid_after_pop_nil1:
+ forall p pl, p s = true -> state_valid_after_pop s [] (p::pl)
+ | state_valid_after_pop_nil2:
+ forall sl, state_valid_after_pop s sl []
+ | state_valid_after_pop_cons:
+ forall st sq p pl, state_valid_after_pop s sq pl ->
+ state_valid_after_pop s (st::sq) (p::pl).
+
+Fixpoint is_state_valid_after_pop (state:state) (to_pop:list symbol) annot :=
+ match annot, to_pop with
+ | [], _ => true
+ | p::_, [] => p state
+ | p::pl, s::sl => is_state_valid_after_pop state sl pl
+ end.
+
+Instance impl_is_state_valid_after_pop_is_validator state sl pl P b :
+ IsValidator P b ->
+ IsValidator (state_valid_after_pop state sl pl -> P)
+ (if is_state_valid_after_pop state sl pl then b else true).
+Proof.
+ destruct (is_state_valid_after_pop state0 sl pl) eqn:EQ.
+ - intros ??. auto using is_validator.
+ - intros _ _ Hsvap. exfalso. induction Hsvap=>//; [simpl in EQ; congruence|].
+ by destruct sl.
+Qed.
+
+(** A state is valid for reducing a production when :
+ - The assumptions on the state are such that we will find the right hand
+ side of the production on the stack.
+ - We will be able to do a goto after having popped the right hand side.
+**)
+Definition valid_for_reduce (state:state) prod :=
+ prefix (prod_rhs_rev prod) (head_symbs_of_state state) /\
+ forall state_new,
+ state_valid_after_pop state_new
+ (prod_rhs_rev prod) (head_states_of_state state) ->
+ match goto_table state_new (prod_lhs prod) with
+ | None =>
+ match state_new with
+ | Init i => prod_lhs prod = start_nt i
+ | Ninit _ => False
+ end
+ | _ => True
+ end.
+
+(** All the states that does a reduce are valid for reduction **)
+Definition reduce_ok :=
+ forall s,
+ match action_table s with
+ | Lookahead_act awp =>
+ forall t, match awp t with
+ | Reduce_act p => valid_for_reduce s p
+ | _ => True
+ end
+ | Default_reduce_act p => valid_for_reduce s p
+ end.
+
+(** The automaton is safe **)
+Definition safe :=
+ shift_head_symbs /\ goto_head_symbs /\ shift_past_state /\
+ goto_past_state /\ reduce_ok.
+
+Derive is_safe
+SuchThat (IsValidator safe (is_safe ()))
+As safe_is_validator.
+Proof. subst is_safe. instantiate (1:=fun _ => _). apply _. Qed.
+
+End Make.
diff --git a/configure b/configure
index e6cdcf04..22581b02 100755
--- a/configure
+++ b/configure
@@ -553,15 +553,12 @@ else
ocaml_opt_comp=false
fi
-MENHIR_REQUIRED=20161201
-MENHIR_NEW_API=20180530
-MENHIR_MAX=20181113
-menhir_flags=''
+MENHIR_REQUIRED=20190626
echo "Testing Menhir... " | tr -d '\n'
menhir_ver=`menhir --version 2>/dev/null | sed -n -e 's/^.*version \([0-9]*\).*$/\1/p'`
case "$menhir_ver" in
20[0-9][0-9][0-9][0-9][0-9][0-9])
- if test "$menhir_ver" -ge $MENHIR_REQUIRED -a "$menhir_ver" -le $MENHIR_MAX; then
+ if test "$menhir_ver" -ge $MENHIR_REQUIRED; then
echo "version $menhir_ver -- good!"
menhir_include_dir=`menhir --suggest-menhirLib`
if test -z "$menhir_include_dir"; then
@@ -570,12 +567,9 @@ case "$menhir_ver" in
echo "Consider using the OPAM package for Menhir."
missingtools=true
fi
- if test "$menhir_ver" -ge $MENHIR_NEW_API; then
- menhir_flags="--coq-lib-path compcert.cparser.MenhirLib"
- fi
else
echo "version $menhir_ver -- UNSUPPORTED"
- echo "Error: CompCert requires a version of Menhir between $MENHIR_REQUIRED and $MENHIR_MAX, included."
+ echo "Error: CompCert requires a version greater or equal to $MENHIR_REQUIRED."
missingtools=true
fi;;
*)
@@ -639,7 +633,8 @@ echo "-R lib compcert.lib \
-R driver compcert.driver \
-R flocq compcert.flocq \
-R exportclight compcert.exportclight \
--R cparser compcert.cparser" > _CoqProject
+-R cparser compcert.cparser \
+-R MenhirLib compcert.MenhirLib" > _CoqProject
case $arch in
x86)
echo "-R x86_${bitsize} compcert.x86_${bitsize}" >> _CoqProject
@@ -661,7 +656,6 @@ SHAREDIR=$sharedir
COQDEVDIR=$coqdevdir
OCAML_OPT_COMP=$ocaml_opt_comp
MENHIR_INCLUDES=-I "$menhir_include_dir"
-MENHIR_FLAGS=$menhir_flags
COMPFLAGS=-bin-annot
EOF
diff --git a/cparser/Cabs.v b/cparser/Cabs.v
index 5865ab69..5f12e8a1 100644
--- a/cparser/Cabs.v
+++ b/cparser/Cabs.v
@@ -20,7 +20,7 @@ Parameter string : Type.
(* OCaml's int64 type, used to represent individual characters in literals. *)
Parameter char_code : Type.
(* Context information. *)
-Parameter cabsloc : Type.
+Parameter loc : Type.
Record floatInfo := {
isHex_FI:bool;
@@ -51,7 +51,7 @@ Inductive typeSpecifier := (* Merge all specifiers into one type *)
* They also have a list of __attribute__s that appeared between the
* keyword and the type name (definitions only) *)
| Tstruct_union : structOrUnion -> option string -> option (list field_group) -> list attribute -> typeSpecifier
- | Tenum : option string -> option (list (string * option expression * cabsloc)) -> list attribute -> typeSpecifier
+ | Tenum : option string -> option (list (string * option expression * loc)) -> list attribute -> typeSpecifier
with storage :=
AUTO | STATIC | EXTERN | REGISTER | TYPEDEF
@@ -87,18 +87,18 @@ with decl_type :=
| PROTO_OLD : decl_type -> list string -> decl_type
with parameter :=
- | PARAM : list spec_elem -> option string -> decl_type -> list attribute -> cabsloc -> parameter
+ | PARAM : list spec_elem -> option string -> decl_type -> list attribute -> loc -> parameter
(* The optional expression is the bitfield *)
with field_group :=
- | Field_group : list spec_elem -> list (option name * option expression) -> cabsloc -> field_group
+ | Field_group : list spec_elem -> list (option name * option expression) -> loc -> field_group
(* The decl_type is in the order in which they are printed. Only the name of
* the declared identifier is pulled out. *)
(* e.g: in "int *x", "*x" is the declarator; "x" will be pulled out as *)
(* the string, and decl_type will be PTR([], JUSTBASE) *)
with name :=
- | Name : string -> decl_type -> list attribute -> cabsloc -> name
+ | Name : string -> decl_type -> list attribute -> loc -> name
(* A variable declarator ("name") with an initializer *)
with init_name :=
@@ -161,9 +161,9 @@ with initwhat :=
| ATINDEX_INIT : expression -> initwhat
with attribute :=
- | GCC_ATTR : list gcc_attribute -> cabsloc -> attribute
- | PACKED_ATTR : list expression -> cabsloc -> attribute
- | ALIGNAS_ATTR : list expression -> cabsloc -> attribute
+ | GCC_ATTR : list gcc_attribute -> loc -> attribute
+ | PACKED_ATTR : list expression -> loc -> attribute
+ | ALIGNAS_ATTR : list expression -> loc -> attribute
with gcc_attribute :=
| GCC_ATTR_EMPTY
@@ -194,31 +194,31 @@ Definition asm_flag := (bool * list char_code)%type.
** Declaration definition (at toplevel)
*)
Inductive definition :=
- | FUNDEF : list spec_elem -> name -> list definition -> statement -> cabsloc -> definition
- | DECDEF : init_name_group -> cabsloc -> definition (* global variable(s), or function prototype *)
- | PRAGMA : string -> cabsloc -> definition
+ | FUNDEF : list spec_elem -> name -> list definition -> statement -> loc -> definition
+ | DECDEF : init_name_group -> loc -> definition (* global variable(s), or function prototype *)
+ | PRAGMA : string -> loc -> definition
(*
** statements
*)
with statement :=
- | NOP : cabsloc -> statement
- | COMPUTATION : expression -> cabsloc -> statement
- | BLOCK : list statement -> cabsloc -> statement
- | If : expression -> statement -> option statement -> cabsloc -> statement
- | WHILE : expression -> statement -> cabsloc -> statement
- | DOWHILE : expression -> statement -> cabsloc -> statement
- | FOR : option for_clause -> option expression -> option expression -> statement -> cabsloc -> statement
- | BREAK : cabsloc -> statement
- | CONTINUE : cabsloc -> statement
- | RETURN : option expression -> cabsloc -> statement
- | SWITCH : expression -> statement -> cabsloc -> statement
- | CASE : expression -> statement -> cabsloc -> statement
- | DEFAULT : statement -> cabsloc -> statement
- | LABEL : string -> statement -> cabsloc -> statement
- | GOTO : string -> cabsloc -> statement
- | ASM : list cvspec -> bool -> list char_code -> list asm_operand -> list asm_operand -> list asm_flag -> cabsloc -> statement
+ | NOP : loc -> statement
+ | COMPUTATION : expression -> loc -> statement
+ | BLOCK : list statement -> loc -> statement
+ | If : expression -> statement -> option statement -> loc -> statement
+ | WHILE : expression -> statement -> loc -> statement
+ | DOWHILE : expression -> statement -> loc -> statement
+ | FOR : option for_clause -> option expression -> option expression -> statement -> loc -> statement
+ | BREAK : loc -> statement
+ | CONTINUE : loc -> statement
+ | RETURN : option expression -> loc -> statement
+ | SWITCH : expression -> statement -> loc -> statement
+ | CASE : expression -> statement -> loc -> statement
+ | DEFAULT : statement -> loc -> statement
+ | LABEL : string -> statement -> loc -> statement
+ | GOTO : string -> loc -> statement
+ | ASM : list cvspec -> bool -> list char_code -> list asm_operand -> list asm_operand -> list asm_flag -> loc -> statement
| DEFINITION : definition -> statement (*definition or declaration of a variable or type*)
with for_clause :=
diff --git a/cparser/Cabshelper.ml b/cparser/Cabshelper.ml
index 958f242c..22f3b3c7 100644
--- a/cparser/Cabshelper.ml
+++ b/cparser/Cabshelper.ml
@@ -16,11 +16,6 @@
open Cabs
-let cabslu = {lineno = -10;
- filename = "cabs loc unknown";
- byteno = -10;
- ident = 0}
-
(*********** HELPER FUNCTIONS **********)
let rec isStatic = function
@@ -44,13 +39,13 @@ let rec isTypedef = function
| _ :: rest -> isTypedef rest
-let get_definitionloc (d : definition) : cabsloc =
+let get_definitionloc (d : definition) : loc =
match d with
| FUNDEF(_, _, _, _, l) -> l
| DECDEF(_, l) -> l
| PRAGMA(_, l) -> l
-let get_statementloc (s : statement) : cabsloc =
+let get_statementloc (s : statement) : loc =
begin
match s with
| NOP(loc) -> loc
@@ -72,8 +67,8 @@ begin
| ASM(_,_,_,_,_,_,loc) -> loc
end
-let string_of_cabsloc l =
+let string_of_loc l =
Printf.sprintf "%s:%d" l.filename l.lineno
-let format_cabsloc pp l =
+let format_loc pp l =
Format.fprintf pp "%s:%d" l.filename l.lineno
diff --git a/cparser/Elab.ml b/cparser/Elab.ml
index 4d27598f..b2934c67 100644
--- a/cparser/Elab.ml
+++ b/cparser/Elab.ml
@@ -258,7 +258,7 @@ let enter_or_refine_function loc env id sto ty =
(* Forward declarations *)
-let elab_expr_f : (cabsloc -> Env.t -> Cabs.expression -> C.exp * Env.t) ref
+let elab_expr_f : (Cabs.loc -> Env.t -> Cabs.expression -> C.exp * Env.t) ref
= ref (fun _ _ _ -> assert false)
let elab_funbody_f : (C.typ -> bool -> bool -> Env.t -> statement -> C.stmt) ref
@@ -2708,7 +2708,7 @@ let elab_fundef genv spec name defs body loc =
(* Definitions *)
let elab_decdef (for_loop: bool) (local: bool) (nonstatic_inline: bool)
(env: Env.t) ((spec, namelist): Cabs.init_name_group)
- (loc: Cabs.cabsloc) : decl list * Env.t =
+ (loc: Cabs.loc) : decl list * Env.t =
let (sto, inl, noret, tydef, bty, env') =
elab_specifier ~only:(namelist=[]) loc env spec in
(* Sanity checks on storage class *)
diff --git a/cparser/Elab.mli b/cparser/Elab.mli
index f701e8c5..59c5efc1 100644
--- a/cparser/Elab.mli
+++ b/cparser/Elab.mli
@@ -18,8 +18,8 @@ val elab_file : Cabs.definition list -> C.program
definitions as produced by the parser into a program in C abstract
syntax. *)
-val elab_int_constant : Cabs.cabsloc -> string -> int64 * C.ikind
+val elab_int_constant : Cabs.loc -> string -> int64 * C.ikind
val elab_float_constant : Cabs.floatInfo -> C.float_cst * C.fkind
-val elab_char_constant : Cabs.cabsloc -> bool -> int64 list -> int64
+val elab_char_constant : Cabs.loc -> bool -> int64 list -> int64
(* These auxiliary functions are exported so that they can be reused
in other projects that deal with C-style source languages. *)
diff --git a/cparser/Lexer.mll b/cparser/Lexer.mll
index 7cf22eef..346477b5 100644
--- a/cparser/Lexer.mll
+++ b/cparser/Lexer.mll
@@ -20,7 +20,7 @@ open Pre_parser_aux
module SSet = Set.Make(String)
-let lexicon : (string, Cabs.cabsloc -> token) Hashtbl.t = Hashtbl.create 17
+let lexicon : (string, Cabs.loc -> token) Hashtbl.t = Hashtbl.create 17
let ignored_keywords : SSet.t ref = ref SSet.empty
let reserved_keyword loc id =
@@ -434,10 +434,7 @@ and singleline_comment = parse
| _ { singleline_comment lexbuf }
{
- open Streams
- open Specif
- open Parser
- open !Aut.GramDefs
+ open Parser.MenhirLibParser.Inter
(* This is the main entry point to the lexer. *)
@@ -463,8 +460,8 @@ and singleline_comment = parse
curr_id := None;
let loc = currentLoc lexbuf in
let token =
- if SSet.mem id !types_context then TYPEDEF_NAME (id, ref TypedefId, loc)
- else VAR_NAME (id, ref VarId, loc)
+ if SSet.mem id !types_context then Pre_parser.TYPEDEF_NAME (id, ref TypedefId, loc)
+ else Pre_parser.VAR_NAME (id, ref VarId, loc)
in
Queue.push token tokens;
token
@@ -497,133 +494,129 @@ and singleline_comment = parse
(* [tokens_stream filename text] runs the pre_parser and produces a stream
of (appropriately classified) tokens. *)
- let tokens_stream filename text : token coq_Stream =
+ let tokens_stream filename text : buffer =
let tokens = Queue.create () in
let buffer = ref ErrorReports.Zero in
invoke_pre_parser filename text (lexer tokens buffer) buffer;
- let rec compute_token_stream () =
- let loop t v =
- Cons (Coq_existT (t, Obj.magic v), Lazy.from_fun compute_token_stream)
- in
+ let rec compute_buffer () =
+ let loop t = Buf_cons (t, Lazy.from_fun compute_buffer) in
match Queue.pop tokens with
- | ADD_ASSIGN loc -> loop ADD_ASSIGN't loc
- | AND loc -> loop AND't loc
- | ANDAND loc -> loop ANDAND't loc
- | AND_ASSIGN loc -> loop AND_ASSIGN't loc
- | AUTO loc -> loop AUTO't loc
- | BANG loc -> loop BANG't loc
- | BAR loc -> loop BAR't loc
- | BARBAR loc -> loop BARBAR't loc
- | UNDERSCORE_BOOL loc -> loop UNDERSCORE_BOOL't loc
- | BREAK loc -> loop BREAK't loc
- | BUILTIN_VA_ARG loc -> loop BUILTIN_VA_ARG't loc
- | BUILTIN_OFFSETOF loc -> loop BUILTIN_OFFSETOF't loc
- | CASE loc -> loop CASE't loc
- | CHAR loc -> loop CHAR't loc
- | COLON loc -> loop COLON't loc
- | COMMA loc -> loop COMMA't loc
- | CONST loc -> loop CONST't loc
- | CONSTANT (cst, loc) -> loop CONSTANT't (cst, loc)
- | CONTINUE loc -> loop CONTINUE't loc
- | DEC loc -> loop DEC't loc
- | DEFAULT loc -> loop DEFAULT't loc
- | DIV_ASSIGN loc -> loop DIV_ASSIGN't loc
- | DO loc -> loop DO't loc
- | DOT loc -> loop DOT't loc
- | DOUBLE loc -> loop DOUBLE't loc
- | ELLIPSIS loc -> loop ELLIPSIS't loc
- | ELSE loc -> loop ELSE't loc
- | ENUM loc -> loop ENUM't loc
- | EOF -> loop EOF't ()
- | EQ loc -> loop EQ't loc
- | EQEQ loc -> loop EQEQ't loc
- | EXTERN loc -> loop EXTERN't loc
- | FLOAT loc -> loop FLOAT't loc
- | FOR loc -> loop FOR't loc
- | GEQ loc -> loop GEQ't loc
- | GOTO loc -> loop GOTO't loc
- | GT loc -> loop GT't loc
- | HAT loc -> loop HAT't loc
- | IF loc -> loop IF't loc
- | INC loc -> loop INC't loc
- | INLINE loc -> loop INLINE't loc
- | INT loc -> loop INT't loc
- | LBRACE loc -> loop LBRACE't loc
- | LBRACK loc -> loop LBRACK't loc
- | LEFT loc -> loop LEFT't loc
- | LEFT_ASSIGN loc -> loop LEFT_ASSIGN't loc
- | LEQ loc -> loop LEQ't loc
- | LONG loc -> loop LONG't loc
- | LPAREN loc -> loop LPAREN't loc
- | LT loc -> loop LT't loc
- | MINUS loc -> loop MINUS't loc
- | MOD_ASSIGN loc -> loop MOD_ASSIGN't loc
- | MUL_ASSIGN loc -> loop MUL_ASSIGN't loc
- | NEQ loc -> loop NEQ't loc
- | NORETURN loc -> loop NORETURN't loc
- | OR_ASSIGN loc -> loop OR_ASSIGN't loc
- | PACKED loc -> loop PACKED't loc
- | PERCENT loc -> loop PERCENT't loc
- | PLUS loc -> loop PLUS't loc
- | PTR loc -> loop PTR't loc
- | QUESTION loc -> loop QUESTION't loc
- | RBRACE loc -> loop RBRACE't loc
- | RBRACK loc -> loop RBRACK't loc
- | REGISTER loc -> loop REGISTER't loc
- | RESTRICT loc -> loop RESTRICT't loc
- | RETURN loc -> loop RETURN't loc
- | RIGHT loc -> loop RIGHT't loc
- | RIGHT_ASSIGN loc -> loop RIGHT_ASSIGN't loc
- | RPAREN loc -> loop RPAREN't loc
- | SEMICOLON loc -> loop SEMICOLON't loc
- | SHORT loc -> loop SHORT't loc
- | SIGNED loc -> loop SIGNED't loc
- | SIZEOF loc -> loop SIZEOF't loc
- | SLASH loc -> loop SLASH't loc
- | STAR loc -> loop STAR't loc
- | STATIC loc -> loop STATIC't loc
- | STRING_LITERAL (wide, str, loc) ->
+ | Pre_parser.ADD_ASSIGN loc -> loop (Parser.ADD_ASSIGN loc)
+ | Pre_parser.AND loc -> loop (Parser.AND loc)
+ | Pre_parser.ANDAND loc -> loop (Parser.ANDAND loc)
+ | Pre_parser.AND_ASSIGN loc -> loop (Parser.AND_ASSIGN loc)
+ | Pre_parser.AUTO loc -> loop (Parser.AUTO loc)
+ | Pre_parser.BANG loc -> loop (Parser.BANG loc)
+ | Pre_parser.BAR loc -> loop (Parser.BAR loc)
+ | Pre_parser.BARBAR loc -> loop (Parser.BARBAR loc)
+ | Pre_parser.UNDERSCORE_BOOL loc -> loop (Parser.UNDERSCORE_BOOL loc)
+ | Pre_parser.BREAK loc -> loop (Parser.BREAK loc)
+ | Pre_parser.BUILTIN_VA_ARG loc -> loop (Parser.BUILTIN_VA_ARG loc)
+ | Pre_parser.BUILTIN_OFFSETOF loc -> loop (Parser.BUILTIN_OFFSETOF loc)
+ | Pre_parser.CASE loc -> loop (Parser.CASE loc)
+ | Pre_parser.CHAR loc -> loop (Parser.CHAR loc)
+ | Pre_parser.COLON loc -> loop (Parser.COLON loc)
+ | Pre_parser.COMMA loc -> loop (Parser.COMMA loc)
+ | Pre_parser.CONST loc -> loop (Parser.CONST loc)
+ | Pre_parser.CONSTANT (cst, loc) -> loop (Parser.CONSTANT (cst, loc))
+ | Pre_parser.CONTINUE loc -> loop (Parser.CONTINUE loc)
+ | Pre_parser.DEC loc -> loop (Parser.DEC loc)
+ | Pre_parser.DEFAULT loc -> loop (Parser.DEFAULT loc)
+ | Pre_parser.DIV_ASSIGN loc -> loop (Parser.DIV_ASSIGN loc)
+ | Pre_parser.DO loc -> loop (Parser.DO loc)
+ | Pre_parser.DOT loc -> loop (Parser.DOT loc)
+ | Pre_parser.DOUBLE loc -> loop (Parser.DOUBLE loc)
+ | Pre_parser.ELLIPSIS loc -> loop (Parser.ELLIPSIS loc)
+ | Pre_parser.ELSE loc -> loop (Parser.ELSE loc)
+ | Pre_parser.ENUM loc -> loop (Parser.ENUM loc)
+ | Pre_parser.EOF -> loop (Parser.EOF ())
+ | Pre_parser.EQ loc -> loop (Parser.EQ loc)
+ | Pre_parser.EQEQ loc -> loop (Parser.EQEQ loc)
+ | Pre_parser.EXTERN loc -> loop (Parser.EXTERN loc)
+ | Pre_parser.FLOAT loc -> loop (Parser.FLOAT loc)
+ | Pre_parser.FOR loc -> loop (Parser.FOR loc)
+ | Pre_parser.GEQ loc -> loop (Parser.GEQ loc)
+ | Pre_parser.GOTO loc -> loop (Parser.GOTO loc)
+ | Pre_parser.GT loc -> loop (Parser.GT loc)
+ | Pre_parser.HAT loc -> loop (Parser.HAT loc)
+ | Pre_parser.IF loc -> loop (Parser.IF_ loc)
+ | Pre_parser.INC loc -> loop (Parser.INC loc)
+ | Pre_parser.INLINE loc -> loop (Parser.INLINE loc)
+ | Pre_parser.INT loc -> loop (Parser.INT loc)
+ | Pre_parser.LBRACE loc -> loop (Parser.LBRACE loc)
+ | Pre_parser.LBRACK loc -> loop (Parser.LBRACK loc)
+ | Pre_parser.LEFT loc -> loop (Parser.LEFT loc)
+ | Pre_parser.LEFT_ASSIGN loc -> loop (Parser.LEFT_ASSIGN loc)
+ | Pre_parser.LEQ loc -> loop (Parser.LEQ loc)
+ | Pre_parser.LONG loc -> loop (Parser.LONG loc)
+ | Pre_parser.LPAREN loc -> loop (Parser.LPAREN loc)
+ | Pre_parser.LT loc -> loop (Parser.LT loc)
+ | Pre_parser.MINUS loc -> loop (Parser.MINUS loc)
+ | Pre_parser.MOD_ASSIGN loc -> loop (Parser.MOD_ASSIGN loc)
+ | Pre_parser.MUL_ASSIGN loc -> loop (Parser.MUL_ASSIGN loc)
+ | Pre_parser.NEQ loc -> loop (Parser.NEQ loc)
+ | Pre_parser.NORETURN loc -> loop (Parser.NORETURN loc)
+ | Pre_parser.OR_ASSIGN loc -> loop (Parser.OR_ASSIGN loc)
+ | Pre_parser.PACKED loc -> loop (Parser.PACKED loc)
+ | Pre_parser.PERCENT loc -> loop (Parser.PERCENT loc)
+ | Pre_parser.PLUS loc -> loop (Parser.PLUS loc)
+ | Pre_parser.PTR loc -> loop (Parser.PTR loc)
+ | Pre_parser.QUESTION loc -> loop (Parser.QUESTION loc)
+ | Pre_parser.RBRACE loc -> loop (Parser.RBRACE loc)
+ | Pre_parser.RBRACK loc -> loop (Parser.RBRACK loc)
+ | Pre_parser.REGISTER loc -> loop (Parser.REGISTER loc)
+ | Pre_parser.RESTRICT loc -> loop (Parser.RESTRICT loc)
+ | Pre_parser.RETURN loc -> loop (Parser.RETURN loc)
+ | Pre_parser.RIGHT loc -> loop (Parser.RIGHT loc)
+ | Pre_parser.RIGHT_ASSIGN loc -> loop (Parser.RIGHT_ASSIGN loc)
+ | Pre_parser.RPAREN loc -> loop (Parser.RPAREN loc)
+ | Pre_parser.SEMICOLON loc -> loop (Parser.SEMICOLON loc)
+ | Pre_parser.SHORT loc -> loop (Parser.SHORT loc)
+ | Pre_parser.SIGNED loc -> loop (Parser.SIGNED loc)
+ | Pre_parser.SIZEOF loc -> loop (Parser.SIZEOF loc)
+ | Pre_parser.SLASH loc -> loop (Parser.SLASH loc)
+ | Pre_parser.STAR loc -> loop (Parser.STAR loc)
+ | Pre_parser.STATIC loc -> loop (Parser.STATIC loc)
+ | Pre_parser.STRING_LITERAL (wide, str, loc) ->
(* Merge consecutive string literals *)
let rec doConcat wide str =
- try
- match Queue.peek tokens with
- | STRING_LITERAL (wide', str', loc) ->
- ignore (Queue.pop tokens);
- let (wide'', str'') = doConcat wide' str' in
- if str'' <> []
- then (wide || wide'', str @ str'')
- else (wide, str)
- | _ ->
- (wide, str)
- with Queue.Empty -> (wide, str) in
- let (wide', str') = doConcat wide str in
- loop STRING_LITERAL't ((wide', str'), loc)
- | STRUCT loc -> loop STRUCT't loc
- | SUB_ASSIGN loc -> loop SUB_ASSIGN't loc
- | SWITCH loc -> loop SWITCH't loc
- | TILDE loc -> loop TILDE't loc
- | TYPEDEF loc -> loop TYPEDEF't loc
- | TYPEDEF_NAME (id, typ, loc)
- | VAR_NAME (id, typ, loc) ->
- let terminal = match !typ with
- | VarId -> VAR_NAME't
- | TypedefId -> TYPEDEF_NAME't
- | OtherId -> OTHER_NAME't
+ match Queue.peek tokens with
+ | Pre_parser.STRING_LITERAL (wide', str', loc) ->
+ ignore (Queue.pop tokens);
+ let (wide'', str'') = doConcat wide' str' in
+ if str'' <> []
+ then (wide || wide'', str @ str'')
+ else (wide, str)
+ | _ -> (wide, str)
+ | exception Queue.Empty -> (wide, str)
in
- loop terminal (id, loc)
- | UNION loc -> loop UNION't loc
- | UNSIGNED loc -> loop UNSIGNED't loc
- | VOID loc -> loop VOID't loc
- | VOLATILE loc -> loop VOLATILE't loc
- | WHILE loc -> loop WHILE't loc
- | XOR_ASSIGN loc -> loop XOR_ASSIGN't loc
- | ALIGNAS loc -> loop ALIGNAS't loc
- | ALIGNOF loc -> loop ALIGNOF't loc
- | ATTRIBUTE loc -> loop ATTRIBUTE't loc
- | ASM loc -> loop ASM't loc
- | PRAGMA (s, loc) -> loop PRAGMA't (s, loc)
- | PRE_NAME _ -> assert false
+ let (wide', str') = doConcat wide str in
+ loop (Parser.STRING_LITERAL ((wide', str'), loc))
+ | Pre_parser.STRUCT loc -> loop (Parser.STRUCT loc)
+ | Pre_parser.SUB_ASSIGN loc -> loop (Parser.SUB_ASSIGN loc)
+ | Pre_parser.SWITCH loc -> loop (Parser.SWITCH loc)
+ | Pre_parser.TILDE loc -> loop (Parser.TILDE loc)
+ | Pre_parser.TYPEDEF loc -> loop (Parser.TYPEDEF loc)
+ | Pre_parser.TYPEDEF_NAME (id, typ, loc)
+ | Pre_parser.VAR_NAME (id, typ, loc) ->
+ begin match !typ with
+ | VarId -> loop (Parser.VAR_NAME (id, loc))
+ | TypedefId -> loop (Parser.TYPEDEF_NAME (id, loc))
+ | OtherId -> loop (Parser.OTHER_NAME (id, loc))
+ end
+ | Pre_parser.UNION loc -> loop (Parser.UNION loc)
+ | Pre_parser.UNSIGNED loc -> loop (Parser.UNSIGNED loc)
+ | Pre_parser.VOID loc -> loop (Parser.VOID loc)
+ | Pre_parser.VOLATILE loc -> loop (Parser.VOLATILE loc)
+ | Pre_parser.WHILE loc -> loop (Parser.WHILE loc)
+ | Pre_parser.XOR_ASSIGN loc -> loop (Parser.XOR_ASSIGN loc)
+ | Pre_parser.ALIGNAS loc -> loop (Parser.ALIGNAS loc)
+ | Pre_parser.ALIGNOF loc -> loop (Parser.ALIGNOF loc)
+ | Pre_parser.ATTRIBUTE loc -> loop (Parser.ATTRIBUTE loc)
+ | Pre_parser.ASM loc -> loop (Parser.ASM loc)
+ | Pre_parser.PRAGMA (s, loc) -> loop (Parser.PRAGMA (s, loc))
+ | Pre_parser.PRE_NAME _ -> assert false
in
- Lazy.from_fun compute_token_stream
+ Lazy.from_fun compute_buffer
}
diff --git a/cparser/MenhirLib/Alphabet.v b/cparser/MenhirLib/Alphabet.v
deleted file mode 100644
index a13f69b0..00000000
--- a/cparser/MenhirLib/Alphabet.v
+++ /dev/null
@@ -1,320 +0,0 @@
-(* *********************************************************************)
-(* *)
-(* The Compcert verified compiler *)
-(* *)
-(* Jacques-Henri Jourdan, INRIA Paris-Rocquencourt *)
-(* *)
-(* Copyright Institut National de Recherche en Informatique et en *)
-(* Automatique. All rights reserved. This file is distributed *)
-(* under the terms of the GNU General Public License as published by *)
-(* the Free Software Foundation, either version 2 of the License, or *)
-(* (at your option) any later version. This file is also distributed *)
-(* under the terms of the INRIA Non-Commercial License Agreement. *)
-(* *)
-(* *********************************************************************)
-
-Require Import Int31.
-Require Import Cyclic31.
-Require Import Omega.
-Require Import List.
-Require Import Syntax.
-Require Import Relations.
-Require Import RelationClasses.
-
-Local Obligation Tactic := intros.
-
-(** A comparable type is equiped with a [compare] function, that define an order
- relation. **)
-Class Comparable (A:Type) := {
- compare : A -> A -> comparison;
- compare_antisym : forall x y, CompOpp (compare x y) = compare y x;
- compare_trans : forall x y z c,
- (compare x y) = c -> (compare y z) = c -> (compare x z) = c
-}.
-
-Theorem compare_refl {A:Type} (C: Comparable A) :
- forall x, compare x x = Eq.
-Proof.
-intros.
-pose proof (compare_antisym x x).
-destruct (compare x x); intuition; try discriminate.
-Qed.
-
-(** The corresponding order is a strict order. **)
-Definition comparableLt {A:Type} (C: Comparable A) : relation A :=
- fun x y => compare x y = Lt.
-
-Instance ComparableLtStrictOrder {A:Type} (C: Comparable A) :
- StrictOrder (comparableLt C).
-Proof.
-apply Build_StrictOrder.
-unfold Irreflexive, Reflexive, complement, comparableLt.
-intros.
-pose proof H.
-rewrite <- compare_antisym in H.
-rewrite H0 in H.
-discriminate H.
-unfold Transitive, comparableLt.
-intros x y z.
-apply compare_trans.
-Qed.
-
-(** nat is comparable. **)
-Program Instance natComparable : Comparable nat :=
- { compare := Nat.compare }.
-Next Obligation.
-symmetry.
-destruct (Nat.compare x y) as [] eqn:?.
-rewrite Nat.compare_eq_iff in Heqc.
-destruct Heqc.
-rewrite Nat.compare_eq_iff.
-trivial.
-rewrite <- nat_compare_lt in *.
-rewrite <- nat_compare_gt in *.
-trivial.
-rewrite <- nat_compare_lt in *.
-rewrite <- nat_compare_gt in *.
-trivial.
-Qed.
-Next Obligation.
-destruct c.
-rewrite Nat.compare_eq_iff in *; destruct H; assumption.
-rewrite <- nat_compare_lt in *.
-apply (Nat.lt_trans _ _ _ H H0).
-rewrite <- nat_compare_gt in *.
-apply (gt_trans _ _ _ H H0).
-Qed.
-
-(** A pair of comparable is comparable. **)
-Program Instance PairComparable {A:Type} (CA:Comparable A) {B:Type} (CB:Comparable B) :
- Comparable (A*B) :=
- { compare := fun x y =>
- let (xa, xb) := x in let (ya, yb) := y in
- match compare xa ya return comparison with
- | Eq => compare xb yb
- | x => x
- end }.
-Next Obligation.
-destruct x, y.
-rewrite <- (compare_antisym a a0).
-rewrite <- (compare_antisym b b0).
-destruct (compare a a0); intuition.
-Qed.
-Next Obligation.
-destruct x, y, z.
-destruct (compare a a0) as [] eqn:?, (compare a0 a1) as [] eqn:?;
-try (rewrite <- H0 in H; discriminate);
-try (destruct (compare a a1) as [] eqn:?;
- try (rewrite <- compare_antisym in Heqc0;
- rewrite CompOpp_iff in Heqc0;
- rewrite (compare_trans _ _ _ _ Heqc0 Heqc2) in Heqc1;
- discriminate);
- try (rewrite <- compare_antisym in Heqc1;
- rewrite CompOpp_iff in Heqc1;
- rewrite (compare_trans _ _ _ _ Heqc2 Heqc1) in Heqc0;
- discriminate);
- assumption);
-rewrite (compare_trans _ _ _ _ Heqc0 Heqc1);
-try assumption.
-apply (compare_trans _ _ _ _ H H0).
-Qed.
-
-(** Special case of comparable, where equality is usual equality. **)
-Class ComparableUsualEq {A:Type} (C: Comparable A) :=
- compare_eq : forall x y, compare x y = Eq -> x = y.
-
-(** Boolean equality for a [Comparable]. **)
-Definition compare_eqb {A:Type} {C:Comparable A} (x y:A) :=
- match compare x y with
- | Eq => true
- | _ => false
- end.
-
-Theorem compare_eqb_iff {A:Type} {C:Comparable A} {U:ComparableUsualEq C} :
- forall x y, compare_eqb x y = true <-> x = y.
-Proof.
-unfold compare_eqb.
-intuition.
-apply compare_eq.
-destruct (compare x y); intuition; discriminate.
-destruct H.
-rewrite compare_refl; intuition.
-Qed.
-
-(** [Comparable] provides a decidable equality. **)
-Definition compare_eqdec {A:Type} {C:Comparable A} {U:ComparableUsualEq C} (x y:A):
- {x = y} + {x <> y}.
-Proof.
-destruct (compare x y) as [] eqn:?; [left; apply compare_eq; intuition | ..];
- right; intro; destruct H; rewrite compare_refl in Heqc; discriminate.
-Defined.
-
-Instance NComparableUsualEq : ComparableUsualEq natComparable := Nat.compare_eq.
-
-(** A pair of ComparableUsualEq is ComparableUsualEq **)
-Instance PairComparableUsualEq
- {A:Type} {CA:Comparable A} (UA:ComparableUsualEq CA)
- {B:Type} {CB:Comparable B} (UB:ComparableUsualEq CB) :
- ComparableUsualEq (PairComparable CA CB).
-Proof.
-intros x y; destruct x, y; simpl.
-pose proof (compare_eq a a0); pose proof (compare_eq b b0).
-destruct (compare a a0); try discriminate.
-intuition.
-destruct H2, H0.
-reflexivity.
-Qed.
-
-(** An [Finite] type is a type with the list of all elements. **)
-Class Finite (A:Type) := {
- all_list : list A;
- all_list_forall : forall x:A, In x all_list
-}.
-
-(** An alphabet is both [ComparableUsualEq] and [Finite]. **)
-Class Alphabet (A:Type) := {
- AlphabetComparable :> Comparable A;
- AlphabetComparableUsualEq :> ComparableUsualEq AlphabetComparable;
- AlphabetFinite :> Finite A
-}.
-
-(** The [Numbered] class provides a conveniant way to build [Alphabet] instances,
- with a good computationnal complexity. It is mainly a injection from it to
- [Int31] **)
-Class Numbered (A:Type) := {
- inj : A -> int31;
- surj : int31 -> A;
- surj_inj_compat : forall x, surj (inj x) = x;
- inj_bound : int31;
- inj_bound_spec : forall x, (phi (inj x) < phi inj_bound)%Z
-}.
-
-Program Instance NumberedAlphabet {A:Type} (N:Numbered A) : Alphabet A :=
- { AlphabetComparable :=
- {| compare := fun x y => compare31 (inj x) (inj y) |};
- AlphabetFinite :=
- {| all_list := fst (iter_int31 inj_bound _
- (fun p => (cons (surj (snd p)) (fst p), incr (snd p))) ([], 0%int31)) |} }.
-Next Obligation. apply Zcompare_antisym. Qed.
-Next Obligation.
-destruct c. unfold compare31 in *.
-rewrite Z.compare_eq_iff in *. congruence.
-eapply Zcompare_Lt_trans. apply H. apply H0.
-eapply Zcompare_Gt_trans. apply H. apply H0.
-Qed.
-Next Obligation.
-intros x y H. unfold compare, compare31 in H.
-rewrite Z.compare_eq_iff in *.
-rewrite <- surj_inj_compat, <- phi_inv_phi with (inj y), <- H.
-rewrite phi_inv_phi, surj_inj_compat; reflexivity.
-Qed.
-Next Obligation.
-rewrite iter_int31_iter_nat.
-pose proof (inj_bound_spec x).
-match goal with |- In x (fst ?p) => destruct p as [] eqn:? end.
-replace inj_bound with i in H.
-revert l i Heqp x H.
-induction (Z.abs_nat (phi inj_bound)); intros.
-inversion Heqp; clear Heqp; subst.
-rewrite spec_0 in H. pose proof (phi_bounded (inj x)). omega.
-simpl in Heqp.
-destruct nat_rect; specialize (IHn _ _ (eq_refl _) x); simpl in *.
-inversion Heqp. subst. clear Heqp.
-rewrite phi_incr in H.
-pose proof (phi_bounded i0).
-pose proof (phi_bounded (inj x)).
-destruct (Z_lt_le_dec (Z.succ (phi i0)) (2 ^ Z.of_nat size)%Z).
-rewrite Zmod_small in H by omega.
-apply Zlt_succ_le, Zle_lt_or_eq in H.
-destruct H; simpl; auto. left.
-rewrite <- surj_inj_compat, <- phi_inv_phi with (inj x), H, phi_inv_phi; reflexivity.
-replace (Z.succ (phi i0)) with (2 ^ Z.of_nat size)%Z in H by omega.
-rewrite Z_mod_same_full in H.
-exfalso; omega.
-rewrite <- phi_inv_phi with i, <- phi_inv_phi with inj_bound; f_equal.
-pose proof (phi_bounded inj_bound); pose proof (phi_bounded i).
-rewrite <- Z.abs_eq with (phi i), <- Z.abs_eq with (phi inj_bound) by omega.
-clear H H0 H1.
-do 2 rewrite <- Zabs2Nat.id_abs.
-f_equal.
-revert l i Heqp.
-assert (Z.abs_nat (phi inj_bound) < Z.abs_nat (2^31)).
-apply Zabs_nat_lt, phi_bounded.
-induction (Z.abs_nat (phi inj_bound)); intros.
-inversion Heqp; reflexivity.
-inversion Heqp; clear H1 H2 Heqp.
-match goal with |- _ (_ (_ (snd ?p))) = _ => destruct p end.
-pose proof (phi_bounded i0).
-erewrite <- IHn, <- Zabs2Nat.inj_succ in H |- *; eauto; try omega.
-rewrite phi_incr, Zmod_small; intuition; try omega.
-apply inj_lt in H.
-pose proof Z.le_le_succ_r.
-do 2 rewrite Zabs2Nat.id_abs, Z.abs_eq in H; now eauto.
-Qed.
-
-(** Previous class instances for [option A] **)
-Program Instance OptionComparable {A:Type} (C:Comparable A) : Comparable (option A) :=
- { compare := fun x y =>
- match x, y return comparison with
- | None, None => Eq
- | None, Some _ => Lt
- | Some _, None => Gt
- | Some x, Some y => compare x y
- end }.
-Next Obligation.
-destruct x, y; intuition.
-apply compare_antisym.
-Qed.
-Next Obligation.
-destruct x, y, z; try now intuition;
-try (rewrite <- H in H0; discriminate).
-apply (compare_trans _ _ _ _ H H0).
-Qed.
-
-Instance OptionComparableUsualEq {A:Type} {C:Comparable A} (U:ComparableUsualEq C) :
- ComparableUsualEq (OptionComparable C).
-Proof.
-intros x y.
-destruct x, y; intuition; try discriminate.
-rewrite (compare_eq a a0); intuition.
-Qed.
-
-Program Instance OptionFinite {A:Type} (E:Finite A) : Finite (option A) :=
- { all_list := None :: map Some all_list }.
-Next Obligation.
-destruct x; intuition.
-right.
-apply in_map.
-apply all_list_forall.
-Defined.
-
-(** Definitions of [FSet]/[FMap] from [Comparable] **)
-Require Import OrderedTypeAlt.
-Require FSetAVL.
-Require FMapAVL.
-Import OrderedType.
-
-Module Type ComparableM.
- Parameter t : Type.
- Declare Instance tComparable : Comparable t.
-End ComparableM.
-
-Module OrderedTypeAlt_from_ComparableM (C:ComparableM) <: OrderedTypeAlt.
- Definition t := C.t.
- Definition compare : t -> t -> comparison := compare.
-
- Infix "?=" := compare (at level 70, no associativity).
-
- Lemma compare_sym x y : (y?=x) = CompOpp (x?=y).
- Proof. exact (Logic.eq_sym (compare_antisym x y)). Qed.
- Lemma compare_trans c x y z :
- (x?=y) = c -> (y?=z) = c -> (x?=z) = c.
- Proof.
- apply compare_trans.
- Qed.
-End OrderedTypeAlt_from_ComparableM.
-
-Module OrderedType_from_ComparableM (C:ComparableM) <: OrderedType.
- Module Alt := OrderedTypeAlt_from_ComparableM C.
- Include (OrderedType_from_Alt Alt).
-End OrderedType_from_ComparableM.
diff --git a/cparser/MenhirLib/Grammar.v b/cparser/MenhirLib/Grammar.v
deleted file mode 100644
index 8e427cd9..00000000
--- a/cparser/MenhirLib/Grammar.v
+++ /dev/null
@@ -1,166 +0,0 @@
-(* *********************************************************************)
-(* *)
-(* The Compcert verified compiler *)
-(* *)
-(* Jacques-Henri Jourdan, INRIA Paris-Rocquencourt *)
-(* *)
-(* Copyright Institut National de Recherche en Informatique et en *)
-(* Automatique. All rights reserved. This file is distributed *)
-(* under the terms of the GNU General Public License as published by *)
-(* the Free Software Foundation, either version 2 of the License, or *)
-(* (at your option) any later version. This file is also distributed *)
-(* under the terms of the INRIA Non-Commercial License Agreement. *)
-(* *)
-(* *********************************************************************)
-
-Require Import List.
-Require Import Syntax.
-Require Import Alphabet.
-Require Import Orders.
-Require Tuples.
-
-(** The terminal non-terminal alphabets of the grammar. **)
-Module Type Alphs.
- Parameters terminal nonterminal : Type.
- Declare Instance TerminalAlph: Alphabet terminal.
- Declare Instance NonTerminalAlph: Alphabet nonterminal.
-End Alphs.
-
-(** Definition of the alphabet of symbols, given the alphabet of terminals
- and the alphabet of non terminals **)
-Module Symbol(Import A:Alphs).
-
- Inductive symbol :=
- | T: terminal -> symbol
- | NT: nonterminal -> symbol.
-
- Program Instance SymbolAlph : Alphabet symbol :=
- { AlphabetComparable := {| compare := fun x y =>
- match x, y return comparison with
- | T _, NT _ => Gt
- | NT _, T _ => Lt
- | T x, T y => compare x y
- | NT x, NT y => compare x y
- end |};
- AlphabetFinite := {| all_list :=
- map T all_list++map NT all_list |} }.
- Next Obligation.
- destruct x; destruct y; intuition; apply compare_antisym.
- Qed.
- Next Obligation.
- destruct x; destruct y; destruct z; intuition; try discriminate.
- apply (compare_trans _ t0); intuition.
- apply (compare_trans _ n0); intuition.
- Qed.
- Next Obligation.
- intros x y.
- destruct x; destruct y; try discriminate; intros.
- rewrite (compare_eq t t0); intuition.
- rewrite (compare_eq n n0); intuition.
- Qed.
- Next Obligation.
- rewrite in_app_iff.
- destruct x; [left | right]; apply in_map; apply all_list_forall.
- Qed.
-
-End Symbol.
-
-Module Type T.
- Export Tuples.
-
- Include Alphs <+ Symbol.
-
- (** [symbol_semantic_type] maps a symbols to the type of its semantic
- values. **)
- Parameter symbol_semantic_type: symbol -> Type.
-
- (** The type of productions identifiers **)
- Parameter production : Type.
- Declare Instance ProductionAlph : Alphabet production.
-
- (** Accessors for productions: left hand side, right hand side,
- and semantic action. The semantic actions are given in the form
- of curryfied functions, that take arguments in the reverse order. **)
- Parameter prod_lhs: production -> nonterminal.
- Parameter prod_rhs_rev: production -> list symbol.
- Parameter prod_action:
- forall p:production,
- arrows_left
- (map symbol_semantic_type (rev (prod_rhs_rev p)))
- (symbol_semantic_type (NT (prod_lhs p))).
-
-End T.
-
-Module Defs(Import G:T).
-
- (** A token is a terminal and a semantic value for this terminal. **)
- Definition token := {t:terminal & symbol_semantic_type (T t)}.
-
- (** A grammar creates a relation between word of tokens and semantic values.
- This relation is parametrized by the head symbol. It defines the
- "semantics" of the grammar. This relation is defined by a notion of
- parse tree. **)
- Inductive parse_tree:
- forall (head_symbol:symbol) (word:list token)
- (semantic_value:symbol_semantic_type head_symbol), Type :=
-
- (** A single token has its semantic value as semantic value, for the
- corresponding terminal as head symbol. **)
- | Terminal_pt:
- forall (t:terminal) (sem:symbol_semantic_type (T t)),
- parse_tree (T t)
- [existT (fun t => symbol_semantic_type (T t)) t sem] sem
-
- (** Given a production, if a word has a list of semantic values for the
- right hand side as head symbols, then this word has the semantic value
- given by the semantic action of the production for the left hand side
- as head symbol.**)
- | Non_terminal_pt:
- forall {p:production} {word:list token}
- {semantic_values:tuple (map symbol_semantic_type (rev (prod_rhs_rev p)))},
- parse_tree_list (rev (prod_rhs_rev p)) word semantic_values ->
- parse_tree (NT (prod_lhs p)) word (uncurry (prod_action p) semantic_values)
-
- (** Basically the same relation as before, but for list of head symbols (ie.
- We are building a forest of syntax trees. It is mutually recursive with the
- previous relation **)
- with parse_tree_list:
- forall (head_symbols:list symbol) (word:list token)
- (semantic_values:tuple (map symbol_semantic_type head_symbols)),
- Type :=
-
- (** The empty word has [()] as semantic for [[]] as head symbols list **)
- | Nil_ptl: parse_tree_list [] [] ()
-
- (** The cons of the semantic value for one head symbol and for a list of head
- symbols **)
- | Cons_ptl:
- (** The semantic for the head **)
- forall {head_symbolt:symbol} {wordt:list token}
- {semantic_valuet:symbol_semantic_type head_symbolt},
- parse_tree head_symbolt wordt semantic_valuet ->
-
- (** and the semantic for the tail **)
- forall {head_symbolsq:list symbol} {wordq:list token}
- {semantic_valuesq:tuple (map symbol_semantic_type head_symbolsq)},
- parse_tree_list head_symbolsq wordq semantic_valuesq ->
-
- (** give the semantic of the cons **)
- parse_tree_list
- (head_symbolt::head_symbolsq)
- (wordt++wordq)
- (semantic_valuet, semantic_valuesq).
-
-
- Fixpoint pt_size {head_symbol word sem} (tree:parse_tree head_symbol word sem) :=
- match tree with
- | Terminal_pt _ _ => 1
- | Non_terminal_pt l => S (ptl_size l)
- end
- with ptl_size {head_symbols word sems} (tree:parse_tree_list head_symbols word sems) :=
- match tree with
- | Nil_ptl => 0
- | Cons_ptl t q =>
- pt_size t + ptl_size q
- end.
-End Defs.
diff --git a/cparser/MenhirLib/Interpreter.v b/cparser/MenhirLib/Interpreter.v
deleted file mode 100644
index 4ac02693..00000000
--- a/cparser/MenhirLib/Interpreter.v
+++ /dev/null
@@ -1,228 +0,0 @@
-(* *********************************************************************)
-(* *)
-(* The Compcert verified compiler *)
-(* *)
-(* Jacques-Henri Jourdan, INRIA Paris-Rocquencourt *)
-(* *)
-(* Copyright Institut National de Recherche en Informatique et en *)
-(* Automatique. All rights reserved. This file is distributed *)
-(* under the terms of the GNU General Public License as published by *)
-(* the Free Software Foundation, either version 2 of the License, or *)
-(* (at your option) any later version. This file is also distributed *)
-(* under the terms of the INRIA Non-Commercial License Agreement. *)
-(* *)
-(* *********************************************************************)
-
-Require Import Streams.
-Require Import List.
-Require Import Syntax.
-Require Automaton.
-Require Import Alphabet.
-
-Module Make(Import A:Automaton.T).
-
-(** The error monad **)
-Inductive result (A:Type) :=
- | Err: result A
- | OK: A -> result A.
-
-Arguments Err [A].
-Arguments OK [A].
-
-Definition bind {A B: Type} (f: result A) (g: A -> result B): result B :=
- match f with
- | OK x => g x
- | Err => Err
- end.
-
-Definition bind2 {A B C: Type} (f: result (A * B)) (g: A -> B -> result C):
- result C :=
- match f with
- | OK (x, y) => g x y
- | Err => Err
- end.
-
-Notation "'do' X <- A ; B" := (bind A (fun X => B))
- (at level 200, X ident, A at level 100, B at level 200).
-
-Notation "'do' ( X , Y ) <- A ; B" := (bind2 A (fun X Y => B))
- (at level 200, X ident, Y ident, A at level 100, B at level 200).
-
-(** Some operations on streams **)
-
-(** Concatenation of a list and a stream **)
-Fixpoint app_str {A:Type} (l:list A) (s:Stream A) :=
- match l with
- | nil => s
- | cons t q => Cons t (app_str q s)
- end.
-
-Infix "++" := app_str (right associativity, at level 60).
-
-Lemma app_str_app_assoc {A:Type} (l1 l2:list A) (s:Stream A) :
- l1 ++ (l2 ++ s) = (l1 ++ l2) ++ s.
-Proof.
-induction l1.
-reflexivity.
-simpl.
-rewrite IHl1.
-reflexivity.
-Qed.
-
-(** The type of a non initial state: the type of semantic values associated
- with the last symbol of this state. *)
-Definition noninitstate_type state :=
- symbol_semantic_type (last_symb_of_non_init_state state).
-
-(** The stack of the automaton : it can be either nil or contains a non
- initial state, a semantic value for the symbol associted with this state,
- and a nested stack. **)
-Definition stack := list (sigT noninitstate_type). (* eg. list {state & state_type state} *)
-
-Section Init.
-
-Variable init : initstate.
-
-(** The top state of a stack **)
-Definition state_of_stack (stack:stack): state :=
- match stack with
- | [] => init
- | existT _ s _::_ => s
- end.
-
-(** [pop] pops some symbols from the stack. It returns the popped semantic
- values using [sem_popped] as an accumulator and discards the popped
- states.**)
-Fixpoint pop (symbols_to_pop:list symbol) (stack_cur:stack):
- forall {A:Type} (action:arrows_right A (map symbol_semantic_type symbols_to_pop)),
- result (stack * A) :=
- match symbols_to_pop return forall {A:Type} (action:arrows_right A (map _ symbols_to_pop)), result (stack * A) with
- | [] => fun A action => OK (stack_cur, action)
- | t::q => fun A action =>
- match stack_cur with
- | existT _ state_cur sem::stack_rec =>
- match compare_eqdec (last_symb_of_non_init_state state_cur) t with
- | left e =>
- let sem_conv := eq_rect _ symbol_semantic_type sem _ e in
- pop q stack_rec (action sem_conv)
- | right _ => Err
- end
- | [] => Err
- end
- end.
-
-(** [step_result] represents the result of one step of the automaton : it can
- fail, accept or progress. [Fail_sr] means that the input is incorrect.
- [Accept_sr] means that this is the last step of the automaton, and it
- returns the semantic value of the input word. [Progress_sr] means that
- some progress has been made, but new steps are needed in order to accept
- a word.
-
- For [Accept_sr] and [Progress_sr], the result contains the new input buffer.
-
- [Fail_sr] means that the input word is rejected by the automaton. It is
- different to [Err] (from the error monad), which mean that the automaton is
- bogus and has perfomed a forbidden action. **)
-Inductive step_result :=
- | Fail_sr: step_result
- | Accept_sr: symbol_semantic_type (NT (start_nt init)) -> Stream token -> step_result
- | Progress_sr: stack -> Stream token -> step_result.
-
-Program Definition prod_action':
- forall p,
- arrows_right (symbol_semantic_type (NT (prod_lhs p)))
- (map symbol_semantic_type (prod_rhs_rev p)):=
- fun p =>
- eq_rect _ (fun x => x) (prod_action p) _ _.
-Next Obligation.
-unfold arrows_left, arrows_right; simpl.
-rewrite <- fold_left_rev_right, <- map_rev, rev_involutive.
-reflexivity.
-Qed.
-
-(** [reduce_step] does a reduce action :
- - pops some elements from the stack
- - execute the action of the production
- - follows the goto for the produced non terminal symbol **)
-Definition reduce_step stack_cur production buffer: result step_result :=
- do (stack_new, sem) <-
- pop (prod_rhs_rev production) stack_cur (prod_action' production);
- match goto_table (state_of_stack stack_new) (prod_lhs production) with
- | Some (exist _ state_new e) =>
- let sem := eq_rect _ _ sem _ e in
- OK (Progress_sr (existT noninitstate_type state_new sem::stack_new) buffer)
- | None =>
- match stack_new with
- | [] =>
- match compare_eqdec (prod_lhs production) (start_nt init) with
- | left e =>
- let sem := eq_rect _ (fun nt => symbol_semantic_type (NT nt)) sem _ e in
- OK (Accept_sr sem buffer)
- | right _ => Err
- end
- | _::_ => Err
- end
- end.
-
-(** One step of parsing. **)
-Definition step stack_cur buffer: result step_result :=
- match action_table (state_of_stack stack_cur) with
- | Default_reduce_act production =>
- reduce_step stack_cur production buffer
- | Lookahead_act awt =>
- match Streams.hd buffer with
- | existT _ term sem =>
- match awt term with
- | Shift_act state_new e =>
- let sem_conv := eq_rect _ symbol_semantic_type sem _ e in
- OK (Progress_sr (existT noninitstate_type state_new sem_conv::stack_cur)
- (Streams.tl buffer))
- | Reduce_act production =>
- reduce_step stack_cur production buffer
- | Fail_action =>
- OK Fail_sr
- end
- end
- end.
-
-(** The parsing use a [nat] parameter [n_steps], so that we do not have to prove
- terminaison, which is difficult. So the result of a parsing is either
- a failure (the automaton has rejected the input word), either a timeout
- (the automaton has spent all the given [n_steps]), either a parsed semantic
- value with a rest of the input buffer.
-**)
-Inductive parse_result :=
- | Fail_pr: parse_result
- | Timeout_pr: parse_result
- | Parsed_pr: symbol_semantic_type (NT (start_nt init)) -> Stream token -> parse_result.
-
-Fixpoint parse_fix stack_cur buffer n_steps: result parse_result:=
- match n_steps with
- | O => OK Timeout_pr
- | S it =>
- do r <- step stack_cur buffer;
- match r with
- | Fail_sr => OK Fail_pr
- | Accept_sr t buffer_new => OK (Parsed_pr t buffer_new)
- | Progress_sr s buffer_new => parse_fix s buffer_new it
- end
- end.
-
-Definition parse buffer n_steps: result parse_result :=
- parse_fix [] buffer n_steps.
-
-End Init.
-
-Arguments Fail_sr [init].
-Arguments Accept_sr [init] _ _.
-Arguments Progress_sr [init] _ _.
-
-Arguments Fail_pr [init].
-Arguments Timeout_pr [init].
-Arguments Parsed_pr [init] _ _.
-
-End Make.
-
-Module Type T(A:Automaton.T).
- Include (Make A).
-End T.
diff --git a/cparser/MenhirLib/Interpreter_complete.v b/cparser/MenhirLib/Interpreter_complete.v
deleted file mode 100644
index 2e64b8da..00000000
--- a/cparser/MenhirLib/Interpreter_complete.v
+++ /dev/null
@@ -1,686 +0,0 @@
-(* *********************************************************************)
-(* *)
-(* The Compcert verified compiler *)
-(* *)
-(* Jacques-Henri Jourdan, INRIA Paris-Rocquencourt *)
-(* *)
-(* Copyright Institut National de Recherche en Informatique et en *)
-(* Automatique. All rights reserved. This file is distributed *)
-(* under the terms of the GNU General Public License as published by *)
-(* the Free Software Foundation, either version 2 of the License, or *)
-(* (at your option) any later version. This file is also distributed *)
-(* under the terms of the INRIA Non-Commercial License Agreement. *)
-(* *)
-(* *********************************************************************)
-
-Require Import Streams.
-Require Import ProofIrrelevance.
-Require Import Equality.
-Require Import List.
-Require Import Syntax.
-Require Import Alphabet.
-Require Import Arith.
-Require Grammar.
-Require Automaton.
-Require Interpreter.
-Require Validator_complete.
-
-Module Make(Import A:Automaton.T) (Import Inter:Interpreter.T A).
-Module Import Valid := Validator_complete.Make A.
-
-(** * Completeness Proof **)
-
-Section Completeness_Proof.
-
-Hypothesis complete: complete.
-
-Proposition nullable_stable: nullable_stable.
-Proof. pose proof complete; unfold Valid.complete in H; intuition. Qed.
-Proposition first_stable: first_stable.
-Proof. pose proof complete; unfold Valid.complete in H; intuition. Qed.
-Proposition start_future: start_future.
-Proof. pose proof complete; unfold Valid.complete in H; intuition. Qed.
-Proposition terminal_shift: terminal_shift.
-Proof. pose proof complete; unfold Valid.complete in H; intuition. Qed.
-Proposition end_reduce: end_reduce.
-Proof. pose proof complete; unfold Valid.complete in H; intuition. Qed.
-Proposition start_goto: start_goto.
-Proof. pose proof complete; unfold Valid.complete in H; intuition. Qed.
-Proposition non_terminal_goto: non_terminal_goto.
-Proof. pose proof complete; unfold Valid.complete in H; intuition. Qed.
-Proposition non_terminal_closed: non_terminal_closed.
-Proof. pose proof complete; unfold Valid.complete in H; intuition. Qed.
-
-(** If the nullable predicate has been validated, then it is correct. **)
-Lemma nullable_correct:
- forall head sem word, word = [] ->
- parse_tree head word sem -> nullable_symb head = true
-with nullable_correct_list:
- forall heads sems word, word = [] ->
- parse_tree_list heads word sems -> nullable_word heads = true.
-Proof with eauto.
-intros.
-destruct X.
-congruence.
-apply nullable_stable...
-intros.
-destruct X; simpl...
-apply andb_true_intro.
-apply app_eq_nil in H; destruct H; split...
-Qed.
-
-(** If the first predicate has been validated, then it is correct. **)
-Lemma first_correct:
- forall head sem word t q, word = t::q ->
- parse_tree head word sem ->
- TerminalSet.In (projT1 t) (first_symb_set head)
-with first_correct_list:
- forall heads sems word t q, word = t::q ->
- parse_tree_list heads word sems ->
- TerminalSet.In (projT1 t) (first_word_set heads).
-Proof with eauto.
-intros.
-destruct X.
-inversion H; subst.
-apply TerminalSet.singleton_2, compare_refl...
-apply first_stable...
-intros.
-destruct X.
-congruence.
-simpl.
-case_eq wordt; intros.
-erewrite nullable_correct...
-apply TerminalSet.union_3.
-subst...
-rewrite H0 in *; inversion H; destruct H2.
-destruct (nullable_symb head_symbolt)...
-apply TerminalSet.union_2...
-Qed.
-
-Variable init: initstate.
-Variable full_word: list token.
-Variable buffer_end: Stream token.
-Variable full_sem: symbol_semantic_type (NT (start_nt init)).
-
-Inductive pt_zipper:
- forall (hole_symb:symbol) (hole_word:list token)
- (hole_sem:symbol_semantic_type hole_symb), Type :=
-| Top_ptz:
- pt_zipper (NT (start_nt init)) (full_word) (full_sem)
-| Cons_ptl_ptz:
- forall {head_symbolt:symbol}
- {wordt:list token}
- {semantic_valuet:symbol_semantic_type head_symbolt},
-
- forall {head_symbolsq:list symbol}
- {wordq:list token}
- {semantic_valuesq:tuple (map symbol_semantic_type head_symbolsq)},
- parse_tree_list head_symbolsq wordq semantic_valuesq ->
-
- ptl_zipper (head_symbolt::head_symbolsq) (wordt++wordq)
- (semantic_valuet,semantic_valuesq) ->
-
- pt_zipper head_symbolt wordt semantic_valuet
-with ptl_zipper:
- forall (hole_symbs:list symbol) (hole_word:list token)
- (hole_sems:tuple (map symbol_semantic_type hole_symbs)), Type :=
-| Non_terminal_pt_ptlz:
- forall {p:production} {word:list token}
- {semantic_values:tuple (map symbol_semantic_type (rev (prod_rhs_rev p)))},
- pt_zipper (NT (prod_lhs p)) word (uncurry (prod_action p) semantic_values) ->
- ptl_zipper (rev (prod_rhs_rev p)) word semantic_values
-
-| Cons_ptl_ptlz:
- forall {head_symbolt:symbol}
- {wordt:list token}
- {semantic_valuet:symbol_semantic_type head_symbolt},
- parse_tree head_symbolt wordt semantic_valuet ->
-
- forall {head_symbolsq:list symbol}
- {wordq:list token}
- {semantic_valuesq:tuple (map symbol_semantic_type head_symbolsq)},
-
- ptl_zipper (head_symbolt::head_symbolsq) (wordt++wordq)
- (semantic_valuet,semantic_valuesq) ->
-
- ptl_zipper head_symbolsq wordq semantic_valuesq.
-
-Fixpoint ptlz_cost {hole_symbs hole_word hole_sems}
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems) :=
- match ptlz with
- | Non_terminal_pt_ptlz ptz =>
- ptz_cost ptz
- | Cons_ptl_ptlz pt ptlz' =>
- ptlz_cost ptlz'
- end
-with ptz_cost {hole_symb hole_word hole_sem}
- (ptz:pt_zipper hole_symb hole_word hole_sem) :=
- match ptz with
- | Top_ptz => 0
- | Cons_ptl_ptz ptl ptlz' =>
- 1 + ptl_size ptl + ptlz_cost ptlz'
- end.
-
-Inductive pt_dot: Type :=
-| Reduce_ptd: ptl_zipper [] [] () -> pt_dot
-| Shift_ptd:
- forall (term:terminal) (sem: symbol_semantic_type (T term))
- {symbolsq wordq semsq},
- parse_tree_list symbolsq wordq semsq ->
- ptl_zipper (T term::symbolsq) (existT (fun t => symbol_semantic_type (T t)) term sem::wordq) (sem, semsq) ->
- pt_dot.
-
-Definition ptd_cost (ptd:pt_dot) :=
- match ptd with
- | Reduce_ptd ptlz => ptlz_cost ptlz
- | Shift_ptd _ _ ptl ptlz => 1 + ptl_size ptl + ptlz_cost ptlz
- end.
-
-Fixpoint ptlz_buffer {hole_symbs hole_word hole_sems}
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems): Stream token :=
- match ptlz with
- | Non_terminal_pt_ptlz ptz =>
- ptz_buffer ptz
- | Cons_ptl_ptlz _ ptlz' =>
- ptlz_buffer ptlz'
- end
-with ptz_buffer {hole_symb hole_word hole_sem}
- (ptz:pt_zipper hole_symb hole_word hole_sem): Stream token :=
- match ptz with
- | Top_ptz => buffer_end
- | @Cons_ptl_ptz _ _ _ _ wordq _ ptl ptlz' =>
- wordq++ptlz_buffer ptlz'
- end.
-
-Definition ptd_buffer (ptd:pt_dot) :=
- match ptd with
- | Reduce_ptd ptlz => ptlz_buffer ptlz
- | @Shift_ptd term sem _ wordq _ _ ptlz =>
- Cons (existT (fun t => symbol_semantic_type (T t)) term sem)
- (wordq ++ ptlz_buffer ptlz)
- end.
-
-Fixpoint ptlz_prod {hole_symbs hole_word hole_sems}
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems): production :=
- match ptlz with
- | @Non_terminal_pt_ptlz prod _ _ _ => prod
- | Cons_ptl_ptlz _ ptlz' =>
- ptlz_prod ptlz'
- end.
-
-Fixpoint ptlz_past {hole_symbs hole_word hole_sems}
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems): list symbol :=
- match ptlz with
- | Non_terminal_pt_ptlz _ => []
- | @Cons_ptl_ptlz s _ _ _ _ _ _ ptlz' => s::ptlz_past ptlz'
- end.
-
-Lemma ptlz_past_ptlz_prod:
- forall hole_symbs hole_word hole_sems
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems),
- rev_append hole_symbs (ptlz_past ptlz) = prod_rhs_rev (ptlz_prod ptlz).
-Proof.
-fix ptlz_past_ptlz_prod 4.
-destruct ptlz; simpl.
-rewrite <- rev_alt, rev_involutive; reflexivity.
-apply (ptlz_past_ptlz_prod _ _ _ ptlz).
-Qed.
-
-Definition ptlz_state_compat {hole_symbs hole_word hole_sems}
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems)
- (state:state): Prop :=
- state_has_future state (ptlz_prod ptlz) hole_symbs
- (projT1 (Streams.hd (ptlz_buffer ptlz))).
-
-Fixpoint ptlz_stack_compat {hole_symbs hole_word hole_sems}
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems)
- (stack:stack): Prop :=
- ptlz_state_compat ptlz (state_of_stack init stack) /\
- match ptlz with
- | Non_terminal_pt_ptlz ptz =>
- ptz_stack_compat ptz stack
- | @Cons_ptl_ptlz _ _ sem _ _ _ _ ptlz' =>
- match stack with
- | [] => False
- | existT _ _ sem'::stackq =>
- ptlz_stack_compat ptlz' stackq /\
- sem ~= sem'
- end
- end
-with ptz_stack_compat {hole_symb hole_word hole_sem}
- (ptz:pt_zipper hole_symb hole_word hole_sem)
- (stack:stack): Prop :=
- match ptz with
- | Top_ptz => stack = []
- | Cons_ptl_ptz _ ptlz' =>
- ptlz_stack_compat ptlz' stack
- end.
-
-Lemma ptlz_stack_compat_ptlz_state_compat:
- forall hole_symbs hole_word hole_sems
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems)
- (stack:stack),
- ptlz_stack_compat ptlz stack -> ptlz_state_compat ptlz (state_of_stack init stack).
-Proof.
-destruct ptlz; simpl; intuition.
-Qed.
-
-Definition ptd_stack_compat (ptd:pt_dot) (stack:stack): Prop :=
- match ptd with
- | Reduce_ptd ptlz => ptlz_stack_compat ptlz stack
- | Shift_ptd _ _ _ ptlz => ptlz_stack_compat ptlz stack
- end.
-
-Fixpoint build_pt_dot {hole_symbs hole_word hole_sems}
- (ptl:parse_tree_list hole_symbs hole_word hole_sems)
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems)
- :pt_dot :=
- match ptl in parse_tree_list hole_symbs hole_word hole_sems
- return ptl_zipper hole_symbs hole_word hole_sems -> _
- with
- | Nil_ptl => fun ptlz =>
- Reduce_ptd ptlz
- | Cons_ptl pt ptl' =>
- match pt in parse_tree hole_symb hole_word hole_sem
- return ptl_zipper (hole_symb::_) (hole_word++_) (hole_sem,_) -> _
- with
- | Terminal_pt term sem => fun ptlz =>
- Shift_ptd term sem ptl' ptlz
- | Non_terminal_pt ptl'' => fun ptlz =>
- build_pt_dot ptl''
- (Non_terminal_pt_ptlz (Cons_ptl_ptz ptl' ptlz))
- end
- end ptlz.
-
-Lemma build_pt_dot_cost:
- forall hole_symbs hole_word hole_sems
- (ptl:parse_tree_list hole_symbs hole_word hole_sems)
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems),
- ptd_cost (build_pt_dot ptl ptlz) = ptl_size ptl + ptlz_cost ptlz.
-Proof.
-fix build_pt_dot_cost 4.
-destruct ptl; intros.
-reflexivity.
-destruct p.
-reflexivity.
-simpl; rewrite build_pt_dot_cost.
-simpl; rewrite <- plus_n_Sm, Nat.add_assoc; reflexivity.
-Qed.
-
-Lemma build_pt_dot_buffer:
- forall hole_symbs hole_word hole_sems
- (ptl:parse_tree_list hole_symbs hole_word hole_sems)
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems),
- ptd_buffer (build_pt_dot ptl ptlz) = hole_word ++ ptlz_buffer ptlz.
-Proof.
-fix build_pt_dot_buffer 4.
-destruct ptl; intros.
-reflexivity.
-destruct p.
-reflexivity.
-simpl; rewrite build_pt_dot_buffer.
-apply app_str_app_assoc.
-Qed.
-
-Lemma ptd_stack_compat_build_pt_dot:
- forall hole_symbs hole_word hole_sems
- (ptl:parse_tree_list hole_symbs hole_word hole_sems)
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems)
- (stack:stack),
- ptlz_stack_compat ptlz stack ->
- ptd_stack_compat (build_pt_dot ptl ptlz) stack.
-Proof.
-fix ptd_stack_compat_build_pt_dot 4.
-destruct ptl; intros.
-eauto.
-destruct p.
-eauto.
-simpl.
-apply ptd_stack_compat_build_pt_dot.
-split.
-apply ptlz_stack_compat_ptlz_state_compat, non_terminal_closed in H.
-apply H; clear H; eauto.
-destruct wordq.
-right; split.
-eauto.
-eapply nullable_correct_list; eauto.
-left.
-eapply first_correct_list; eauto.
-eauto.
-Qed.
-
-Program Fixpoint pop_ptlz {hole_symbs hole_word hole_sems}
- (ptl:parse_tree_list hole_symbs hole_word hole_sems)
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems):
- { word:_ & { sem:_ &
- (pt_zipper (NT (prod_lhs (ptlz_prod ptlz))) word sem *
- parse_tree (NT (prod_lhs (ptlz_prod ptlz))) word sem)%type } } :=
- match ptlz in ptl_zipper hole_symbs hole_word hole_sems
- return parse_tree_list hole_symbs hole_word hole_sems ->
- { word:_ & { sem:_ &
- (pt_zipper (NT (prod_lhs (ptlz_prod ptlz))) word sem *
- parse_tree (NT (prod_lhs (ptlz_prod ptlz))) word sem)%type } }
- with
- | @Non_terminal_pt_ptlz prod word sem ptz => fun ptl =>
- let sem := uncurry (prod_action prod) sem in
- existT _ word (existT _ sem (ptz, Non_terminal_pt ptl))
- | Cons_ptl_ptlz pt ptlz' => fun ptl =>
- pop_ptlz (Cons_ptl pt ptl) ptlz'
- end ptl.
-
-Lemma pop_ptlz_cost:
- forall hole_symbs hole_word hole_sems
- (ptl:parse_tree_list hole_symbs hole_word hole_sems)
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems),
- let 'existT _ word (existT _ sem (ptz, pt)) := pop_ptlz ptl ptlz in
- ptlz_cost ptlz = ptz_cost ptz.
-Proof.
-fix pop_ptlz_cost 5.
-destruct ptlz.
-reflexivity.
-simpl; apply pop_ptlz_cost.
-Qed.
-
-Lemma pop_ptlz_buffer:
- forall hole_symbs hole_word hole_sems
- (ptl:parse_tree_list hole_symbs hole_word hole_sems)
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems),
- let 'existT _ word (existT _ sem (ptz, pt)) := pop_ptlz ptl ptlz in
- ptlz_buffer ptlz = ptz_buffer ptz.
-Proof.
-fix pop_ptlz_buffer 5.
-destruct ptlz.
-reflexivity.
-simpl; apply pop_ptlz_buffer.
-Qed.
-
-Lemma pop_ptlz_pop_stack_compat_converter:
- forall A hole_symbs hole_word hole_sems (ptlz:ptl_zipper hole_symbs hole_word hole_sems),
- arrows_left (map symbol_semantic_type (rev (prod_rhs_rev (ptlz_prod ptlz)))) A =
- arrows_left (map symbol_semantic_type hole_symbs)
- (arrows_right A (map symbol_semantic_type (ptlz_past ptlz))).
-Proof.
-intros.
-rewrite <- ptlz_past_ptlz_prod.
-unfold arrows_right, arrows_left.
-rewrite rev_append_rev, map_rev, map_app, map_rev, <- fold_left_rev_right, rev_involutive, fold_right_app.
-rewrite fold_left_rev_right.
-reflexivity.
-Qed.
-
-Lemma pop_ptlz_pop_stack_compat:
- forall hole_symbs hole_word hole_sems
- (ptl:parse_tree_list hole_symbs hole_word hole_sems)
- (ptlz:ptl_zipper hole_symbs hole_word hole_sems)
- (stack:stack),
-
- ptlz_stack_compat ptlz stack ->
-
- let action' :=
- eq_rect _ (fun x=>x) (prod_action (ptlz_prod ptlz)) _
- (pop_ptlz_pop_stack_compat_converter _ _ _ _ _)
- in
- let 'existT _ word (existT _ sem (ptz, pt)) := pop_ptlz ptl ptlz in
- match pop (ptlz_past ptlz) stack (uncurry action' hole_sems) with
- | OK (stack', sem') =>
- ptz_stack_compat ptz stack' /\ sem = sem'
- | Err => True
- end.
-Proof.
-Opaque AlphabetComparable AlphabetComparableUsualEq.
-fix pop_ptlz_pop_stack_compat 5.
-destruct ptlz. intros; simpl.
-split.
-apply H.
-eapply (f_equal (fun X => uncurry X semantic_values)).
-apply JMeq_eq, JMeq_sym, JMeq_eqrect with (P:=fun x => x).
-simpl; intros; destruct stack0.
-destruct (proj2 H).
-simpl in H; destruct H.
-destruct s as (state, sem').
-destruct H0.
-specialize (pop_ptlz_pop_stack_compat _ _ _ (Cons_ptl p ptl) ptlz _ H0).
-destruct (pop_ptlz (Cons_ptl p ptl) ptlz) as [word [sem []]].
-destruct (compare_eqdec (last_symb_of_non_init_state state) head_symbolt); intuition.
-eapply JMeq_sym, JMeq_trans, JMeq_sym, JMeq_eq in H1; [|apply JMeq_eqrect with (e:=e)].
-rewrite <- H1.
-simpl in pop_ptlz_pop_stack_compat.
-erewrite proof_irrelevance with (p1:=pop_ptlz_pop_stack_compat_converter _ _ _ _ _).
-apply pop_ptlz_pop_stack_compat.
-Transparent AlphabetComparable AlphabetComparableUsualEq.
-Qed.
-
-Definition next_ptd (ptd:pt_dot): option pt_dot :=
- match ptd with
- | Shift_ptd term sem ptl ptlz =>
- Some (build_pt_dot ptl (Cons_ptl_ptlz (Terminal_pt term sem) ptlz))
- | Reduce_ptd ptlz =>
- let 'existT _ _ (existT _ _ (ptz, pt)) := pop_ptlz Nil_ptl ptlz in
- match ptz in pt_zipper sym _ _ return parse_tree sym _ _ -> _ with
- | Top_ptz => fun pt => None
- | Cons_ptl_ptz ptl ptlz' =>
- fun pt => Some (build_pt_dot ptl (Cons_ptl_ptlz pt ptlz'))
- end pt
- end.
-
-Lemma next_ptd_cost:
- forall ptd,
- match next_ptd ptd with
- | None => ptd_cost ptd = 0
- | Some ptd' => ptd_cost ptd = S (ptd_cost ptd')
- end.
-Proof.
-destruct ptd. unfold next_ptd.
-pose proof (pop_ptlz_cost _ _ _ Nil_ptl p).
-destruct (pop_ptlz Nil_ptl p) as [word[sem[[]]]].
-assumption.
-rewrite build_pt_dot_cost.
-assumption.
-simpl; rewrite build_pt_dot_cost; reflexivity.
-Qed.
-
-Lemma reduce_step_next_ptd:
- forall (ptlz:ptl_zipper [] [] ()) (stack:stack),
- ptlz_stack_compat ptlz stack ->
- match reduce_step init stack (ptlz_prod ptlz) (ptlz_buffer ptlz) with
- | OK Fail_sr =>
- False
- | OK (Accept_sr sem buffer) =>
- sem = full_sem /\ buffer = buffer_end /\ next_ptd (Reduce_ptd ptlz) = None
- | OK (Progress_sr stack buffer) =>
- match next_ptd (Reduce_ptd ptlz) with
- | None => False
- | Some ptd =>
- ptd_stack_compat ptd stack /\ buffer = ptd_buffer ptd
- end
- | Err =>
- True
- end.
-Proof.
-intros.
-unfold reduce_step, next_ptd.
-apply pop_ptlz_pop_stack_compat with (ptl:=Nil_ptl) in H.
-pose proof (pop_ptlz_buffer _ _ _ Nil_ptl ptlz).
-destruct (pop_ptlz Nil_ptl ptlz) as [word [sem [ptz pt]]].
-rewrite H0; clear H0.
-revert H.
-match goal with
- |- match ?p1 with Err => _ | OK _ => _ end -> match bind2 ?p2 _ with Err => _ | OK _ => _ end =>
- replace p1 with p2; [destruct p2 as [|[]]; intros|]
-end.
-assumption.
-simpl.
-destruct H; subst.
-generalize dependent s0.
-generalize (prod_lhs (ptlz_prod ptlz)); clear ptlz stack0.
-dependent destruction ptz; intros.
-simpl in H; subst; simpl.
-pose proof start_goto; unfold Valid.start_goto in H; rewrite H.
-destruct (compare_eqdec (start_nt init) (start_nt init)); intuition.
-apply JMeq_eq, JMeq_eqrect with (P:=fun nt => symbol_semantic_type (NT nt)).
-pose proof (ptlz_stack_compat_ptlz_state_compat _ _ _ _ _ H).
-apply non_terminal_goto in H0.
-destruct (goto_table (state_of_stack init s) n) as [[]|]; intuition.
-apply ptd_stack_compat_build_pt_dot; simpl; intuition.
-symmetry; apply JMeq_eqrect with (P:=symbol_semantic_type).
-symmetry; apply build_pt_dot_buffer.
-destruct s; intuition.
-simpl in H; apply ptlz_stack_compat_ptlz_state_compat in H.
-destruct (H0 _ _ _ H eq_refl).
-generalize (pop_ptlz_pop_stack_compat_converter (symbol_semantic_type (NT (prod_lhs (ptlz_prod ptlz)))) _ _ _ ptlz).
-pose proof (ptlz_past_ptlz_prod _ _ _ ptlz); simpl in H.
-rewrite H; clear H.
-intro; f_equal; simpl.
-apply JMeq_eq.
-etransitivity.
-apply JMeq_eqrect with (P:=fun x => x).
-symmetry.
-apply JMeq_eqrect with (P:=fun x => x).
-Qed.
-
-Lemma step_next_ptd:
- forall (ptd:pt_dot) (stack:stack),
- ptd_stack_compat ptd stack ->
- match step init stack (ptd_buffer ptd) with
- | OK Fail_sr =>
- False
- | OK (Accept_sr sem buffer) =>
- sem = full_sem /\ buffer = buffer_end /\ next_ptd ptd = None
- | OK (Progress_sr stack buffer) =>
- match next_ptd ptd with
- | None => False
- | Some ptd =>
- ptd_stack_compat ptd stack /\ buffer = ptd_buffer ptd
- end
- | Err =>
- True
- end.
-Proof.
-intros.
-destruct ptd.
-pose proof (ptlz_stack_compat_ptlz_state_compat _ _ _ _ _ H).
-apply end_reduce in H0.
-unfold step.
-destruct (action_table (state_of_stack init stack0)).
-rewrite H0 by reflexivity.
-apply reduce_step_next_ptd; assumption.
-simpl; destruct (Streams.hd (ptlz_buffer p)); simpl in H0.
-destruct (l x); intuition; rewrite H1.
-apply reduce_step_next_ptd; assumption.
-pose proof (ptlz_stack_compat_ptlz_state_compat _ _ _ _ _ H).
-apply terminal_shift in H0.
-unfold step.
-destruct (action_table (state_of_stack init stack0)); intuition.
-simpl; destruct (Streams.hd (ptlz_buffer p0)) as [] eqn:?.
-destruct (l term); intuition.
-apply ptd_stack_compat_build_pt_dot; simpl; intuition.
-unfold ptlz_state_compat; simpl; destruct Heqt; assumption.
-symmetry; apply JMeq_eqrect with (P:=symbol_semantic_type).
-rewrite build_pt_dot_buffer; reflexivity.
-Qed.
-
-Lemma parse_fix_complete:
- forall (ptd:pt_dot) (stack:stack) (n_steps:nat),
- ptd_stack_compat ptd stack ->
- match parse_fix init stack (ptd_buffer ptd) n_steps with
- | OK (Parsed_pr sem_res buffer_end_res) =>
- sem_res = full_sem /\ buffer_end_res = buffer_end /\
- S (ptd_cost ptd) <= n_steps
- | OK Fail_pr => False
- | OK Timeout_pr => n_steps < S (ptd_cost ptd)
- | Err => True
- end.
-Proof.
-fix parse_fix_complete 3.
-destruct n_steps; intros; simpl.
-apply Nat.lt_0_succ.
-apply step_next_ptd in H.
-pose proof (next_ptd_cost ptd).
-destruct (step init stack0 (ptd_buffer ptd)) as [|[]]; simpl; intuition.
-rewrite H3 in H0; rewrite H0.
-apply le_n_S, Nat.le_0_l.
-destruct (next_ptd ptd); intuition; subst.
-eapply parse_fix_complete with (n_steps:=n_steps) in H1.
-rewrite H0.
-destruct (parse_fix init s (ptd_buffer p) n_steps) as [|[]]; try assumption.
-apply lt_n_S; assumption.
-destruct H1 as [H1 []]; split; [|split]; try assumption.
-apply le_n_S; assumption.
-Qed.
-
-Variable full_pt: parse_tree (NT (start_nt init)) full_word full_sem.
-
-Definition init_ptd :=
- match full_pt in parse_tree head full_word full_sem return
- pt_zipper head full_word full_sem ->
- match head return Type with | T _ => unit | NT _ => pt_dot end
- with
- | Terminal_pt _ _ => fun _ => ()
- | Non_terminal_pt ptl =>
- fun top => build_pt_dot ptl (Non_terminal_pt_ptlz top)
- end Top_ptz.
-
-Lemma init_ptd_compat:
- ptd_stack_compat init_ptd [].
-Proof.
-unfold init_ptd.
-assert (ptz_stack_compat Top_ptz []) by reflexivity.
-pose proof (start_future init); revert H0.
-generalize dependent Top_ptz.
-generalize dependent full_word.
-generalize full_sem.
-generalize (start_nt init).
-dependent destruction full_pt0.
-intros.
-apply ptd_stack_compat_build_pt_dot; simpl; intuition.
-apply H0; reflexivity.
-Qed.
-
-Lemma init_ptd_cost:
- S (ptd_cost init_ptd) = pt_size full_pt.
-Proof.
-unfold init_ptd.
-assert (ptz_cost Top_ptz = 0) by reflexivity.
-generalize dependent Top_ptz.
-generalize dependent full_word.
-generalize full_sem.
-generalize (start_nt init).
-dependent destruction full_pt0.
-intros.
-rewrite build_pt_dot_cost; simpl.
-rewrite H, Nat.add_0_r; reflexivity.
-Qed.
-
-Lemma init_ptd_buffer:
- ptd_buffer init_ptd = full_word ++ buffer_end.
-Proof.
-unfold init_ptd.
-assert (ptz_buffer Top_ptz = buffer_end) by reflexivity.
-generalize dependent Top_ptz.
-generalize dependent full_word.
-generalize full_sem.
-generalize (start_nt init).
-dependent destruction full_pt0.
-intros.
-rewrite build_pt_dot_buffer; simpl.
-rewrite H; reflexivity.
-Qed.
-
-Theorem parse_complete n_steps:
- match parse init (full_word ++ buffer_end) n_steps with
- | OK (Parsed_pr sem_res buffer_end_res) =>
- sem_res = full_sem /\ buffer_end_res = buffer_end /\
- pt_size full_pt <= n_steps
- | OK Fail_pr => False
- | OK Timeout_pr => n_steps < pt_size full_pt
- | Err => True
- end.
-Proof.
-pose proof (parse_fix_complete init_ptd [] n_steps init_ptd_compat).
-rewrite init_ptd_buffer, init_ptd_cost in H.
-apply H.
-Qed.
-
-End Completeness_Proof.
-
-End Make.
diff --git a/cparser/MenhirLib/Interpreter_correct.v b/cparser/MenhirLib/Interpreter_correct.v
deleted file mode 100644
index 1263d4e3..00000000
--- a/cparser/MenhirLib/Interpreter_correct.v
+++ /dev/null
@@ -1,228 +0,0 @@
-(* *********************************************************************)
-(* *)
-(* The Compcert verified compiler *)
-(* *)
-(* Jacques-Henri Jourdan, INRIA Paris-Rocquencourt *)
-(* *)
-(* Copyright Institut National de Recherche en Informatique et en *)
-(* Automatique. All rights reserved. This file is distributed *)
-(* under the terms of the GNU General Public License as published by *)
-(* the Free Software Foundation, either version 2 of the License, or *)
-(* (at your option) any later version. This file is also distributed *)
-(* under the terms of the INRIA Non-Commercial License Agreement. *)
-(* *)
-(* *********************************************************************)
-
-Require Import Streams.
-Require Import List.
-Require Import Syntax.
-Require Import Equality.
-Require Import Alphabet.
-Require Grammar.
-Require Automaton.
-Require Interpreter.
-
-Module Make(Import A:Automaton.T) (Import Inter:Interpreter.T A).
-
-(** * Correctness of the interpreter **)
-
-(** We prove that, in any case, if the interpreter accepts returning a
- semantic value, then this is a semantic value of the input **)
-
-Section Init.
-
-Variable init:initstate.
-
-(** [word_has_stack_semantics] relates a word with a state, stating that the
- word is a concatenation of words that have the semantic values stored in
- the stack. **)
-Inductive word_has_stack_semantics:
- forall (word:list token) (stack:stack), Prop :=
- | Nil_stack_whss: word_has_stack_semantics [] []
- | Cons_stack_whss:
- forall (wordq:list token) (stackq:stack),
- word_has_stack_semantics wordq stackq ->
-
- forall (wordt:list token) (s:noninitstate)
- (semantic_valuet:_),
- inhabited (parse_tree (last_symb_of_non_init_state s) wordt semantic_valuet) ->
-
- word_has_stack_semantics
- (wordq++wordt) (existT noninitstate_type s semantic_valuet::stackq).
-
-Lemma pop_invariant_converter:
- forall A symbols_to_pop symbols_popped,
- arrows_left (map symbol_semantic_type (rev_append symbols_to_pop symbols_popped)) A =
- arrows_left (map symbol_semantic_type symbols_popped)
- (arrows_right A (map symbol_semantic_type symbols_to_pop)).
-Proof.
-intros.
-unfold arrows_right, arrows_left.
-rewrite rev_append_rev, map_app, map_rev, fold_left_app.
-apply f_equal.
-rewrite <- fold_left_rev_right, rev_involutive.
-reflexivity.
-Qed.
-
-(** [pop] preserves the invariant **)
-Lemma pop_invariant:
- forall (symbols_to_pop symbols_popped:list symbol)
- (stack_cur:stack)
- (A:Type)
- (action:arrows_left (map symbol_semantic_type (rev_append symbols_to_pop symbols_popped)) A),
- forall word_stack word_popped,
- forall sem_popped,
- word_has_stack_semantics word_stack stack_cur ->
- inhabited (parse_tree_list symbols_popped word_popped sem_popped) ->
- let action' := eq_rect _ (fun x=>x) action _ (pop_invariant_converter _ _ _) in
- match pop symbols_to_pop stack_cur (uncurry action' sem_popped) with
- | OK (stack_new, sem) =>
- exists word1res word2res sem_full,
- (word_stack = word1res ++ word2res)%list /\
- word_has_stack_semantics word1res stack_new /\
- sem = uncurry action sem_full /\
- inhabited (
- parse_tree_list (rev_append symbols_to_pop symbols_popped) (word2res++word_popped) sem_full)
- | Err => True
- end.
-Proof.
-induction symbols_to_pop; intros; unfold pop; fold pop.
-exists word_stack, ([]:list token), sem_popped; intuition.
-f_equal.
-apply JMeq_eq, JMeq_eqrect with (P:=(fun x => x)).
-destruct stack_cur as [|[]]; eauto.
-destruct (compare_eqdec (last_symb_of_non_init_state x) a); eauto.
-destruct e; simpl.
-dependent destruction H.
-destruct H0, H1. apply (Cons_ptl X), inhabits in X0.
-specialize (IHsymbols_to_pop _ _ _ action0 _ _ _ H X0).
-match goal with
- IHsymbols_to_pop:match ?p1 with Err => _ | OK _ => _ end |- match ?p2 with Err => _ | OK _ => _ end =>
- replace p2 with p1; [destruct p1 as [|[]]|]; intuition
-end.
-destruct IHsymbols_to_pop as [word1res [word2res [sem_full []]]]; intuition; subst.
-exists word1res.
-eexists.
-exists sem_full.
-intuition.
-rewrite <- app_assoc; assumption.
-simpl; f_equal; f_equal.
-apply JMeq_eq.
-etransitivity.
-apply JMeq_eqrect with (P:=(fun x => x)).
-symmetry.
-apply JMeq_eqrect with (P:=(fun x => x)).
-Qed.
-
-(** [reduce_step] preserves the invariant **)
-Lemma reduce_step_invariant (stack:stack) (prod:production):
- forall word buffer, word_has_stack_semantics word stack ->
- match reduce_step init stack prod buffer with
- | OK (Accept_sr sem buffer_new) =>
- buffer = buffer_new /\
- inhabited (parse_tree (NT (start_nt init)) word sem)
- | OK (Progress_sr stack_new buffer_new) =>
- buffer = buffer_new /\
- word_has_stack_semantics word stack_new
- | Err | OK Fail_sr => True
- end.
-Proof with eauto.
-intros.
-unfold reduce_step.
-pose proof (pop_invariant (prod_rhs_rev prod) [] stack (symbol_semantic_type (NT (prod_lhs prod)))).
-revert H0.
-generalize (pop_invariant_converter (symbol_semantic_type (NT (prod_lhs prod))) (prod_rhs_rev prod) []).
-rewrite <- rev_alt.
-intros.
-specialize (H0 (prod_action prod) _ [] () H (inhabits Nil_ptl)).
-match goal with H0:let action' := ?a in _ |- _ => replace a with (prod_action' prod) in H0 end.
-simpl in H0.
-destruct (pop (prod_rhs_rev prod) stack (prod_action' prod)) as [|[]]; intuition.
-destruct H0 as [word1res [word2res [sem_full]]]; intuition.
-destruct H4; apply Non_terminal_pt, inhabits in X.
-match goal with X:inhabited (parse_tree _ _ ?u) |- _ => replace u with s0 in X end.
-clear sem_full H2.
-simpl; destruct (goto_table (state_of_stack init s) (prod_lhs prod)) as [[]|]; intuition; subst.
-rewrite app_nil_r in X; revert s0 X; rewrite e0; intro; simpl.
-constructor...
-destruct s; intuition.
-destruct (compare_eqdec (prod_lhs prod) (start_nt init)); intuition.
-rewrite app_nil_r in X.
-rewrite <- e0.
-inversion H0.
-destruct X; constructor...
-apply JMeq_eq.
-etransitivity.
-apply JMeq_eqrect with (P:=(fun x => x)).
-symmetry.
-apply JMeq_eqrect with (P:=(fun x => x)).
-Qed.
-
-(** [step] preserves the invariant **)
-Lemma step_invariant (stack:stack) (buffer:Stream token):
- forall buffer_tmp,
- (exists word_old,
- buffer = word_old ++ buffer_tmp /\
- word_has_stack_semantics word_old stack) ->
- match step init stack buffer_tmp with
- | OK (Accept_sr sem buffer_new) =>
- exists word_new,
- buffer = word_new ++ buffer_new /\
- inhabited (parse_tree (NT (start_nt init)) word_new sem)
- | OK (Progress_sr stack_new buffer_new) =>
- exists word_new,
- buffer = word_new ++ buffer_new /\
- word_has_stack_semantics word_new stack_new
- | Err | OK Fail_sr => True
- end.
-Proof with eauto.
-intros.
-destruct H, H.
-unfold step.
-destruct (action_table (state_of_stack init stack)).
-pose proof (reduce_step_invariant stack p x buffer_tmp).
-destruct (reduce_step init stack p buffer_tmp) as [|[]]; intuition; subst...
-destruct buffer_tmp.
-unfold Streams.hd.
-destruct t.
-destruct (l x0); intuition.
-exists (x ++ [existT (fun t => symbol_semantic_type (T t)) x0 s])%list.
-split.
-now rewrite <- app_str_app_assoc; intuition.
-apply Cons_stack_whss; intuition.
-destruct e; simpl.
-now exact (inhabits (Terminal_pt _ _)).
-match goal with |- (match reduce_step init stack p ?buff with Err => _ | OK _ => _ end) =>
- pose proof (reduce_step_invariant stack p x buff);
- destruct (reduce_step init stack p buff) as [|[]]; intuition; subst
-end...
-Qed.
-
-(** The interpreter is correct : if it returns a semantic value, then the input
- word has this semantic value.
-**)
-Theorem parse_correct buffer n_steps:
- match parse init buffer n_steps with
- | OK (Parsed_pr sem buffer_new) =>
- exists word_new,
- buffer = word_new ++ buffer_new /\
- inhabited (parse_tree (NT (start_nt init)) word_new sem)
- | _ => True
- end.
-Proof.
-unfold parse.
-assert (exists w, buffer = w ++ buffer /\ word_has_stack_semantics w []).
-exists ([]:list token); intuition.
-now apply Nil_stack_whss.
-revert H.
-generalize ([]:stack), buffer at 2 3.
-induction n_steps; simpl; intuition.
-pose proof (step_invariant _ _ _ H).
-destruct (step init s buffer0); simpl; intuition.
-destruct s0; intuition.
-apply IHn_steps; intuition.
-Qed.
-
-End Init.
-
-End Make.
diff --git a/cparser/MenhirLib/Interpreter_safe.v b/cparser/MenhirLib/Interpreter_safe.v
deleted file mode 100644
index a1aa35b8..00000000
--- a/cparser/MenhirLib/Interpreter_safe.v
+++ /dev/null
@@ -1,275 +0,0 @@
-(* *********************************************************************)
-(* *)
-(* The Compcert verified compiler *)
-(* *)
-(* Jacques-Henri Jourdan, INRIA Paris-Rocquencourt *)
-(* *)
-(* Copyright Institut National de Recherche en Informatique et en *)
-(* Automatique. All rights reserved. This file is distributed *)
-(* under the terms of the GNU General Public License as published by *)
-(* the Free Software Foundation, either version 2 of the License, or *)
-(* (at your option) any later version. This file is also distributed *)
-(* under the terms of the INRIA Non-Commercial License Agreement. *)
-(* *)
-(* *********************************************************************)
-
-Require Import Streams.
-Require Import Equality.
-Require Import List.
-Require Import Syntax.
-Require Import Alphabet.
-Require Grammar.
-Require Automaton.
-Require Validator_safe.
-Require Interpreter.
-
-Module Make(Import A:Automaton.T) (Import Inter:Interpreter.T A).
-Module Import Valid := Validator_safe.Make A.
-
-(** * A correct automaton does not crash **)
-
-Section Safety_proof.
-
-Hypothesis safe: safe.
-
-Proposition shift_head_symbs: shift_head_symbs.
-Proof. pose proof safe; unfold Valid.safe in H; intuition. Qed.
-Proposition goto_head_symbs: goto_head_symbs.
-Proof. pose proof safe; unfold Valid.safe in H; intuition. Qed.
-Proposition shift_past_state: shift_past_state.
-Proof. pose proof safe; unfold Valid.safe in H; intuition. Qed.
-Proposition goto_past_state: goto_past_state.
-Proof. pose proof safe; unfold Valid.safe in H; intuition. Qed.
-Proposition reduce_ok: reduce_ok.
-Proof. pose proof safe; unfold Valid.safe in H; intuition. Qed.
-
-(** We prove that a correct automaton won't crash : the interpreter will
- not return [Err] **)
-
-Variable init : initstate.
-
-(** The stack of states of an automaton stack **)
-Definition state_stack_of_stack (stack:stack) :=
- (List.map
- (fun cell:sigT noninitstate_type => singleton_state_pred (projT1 cell))
- stack ++ [singleton_state_pred init])%list.
-
-(** The stack of symbols of an automaton stack **)
-Definition symb_stack_of_stack (stack:stack) :=
- List.map
- (fun cell:sigT noninitstate_type => last_symb_of_non_init_state (projT1 cell))
- stack.
-
-(** The stack invariant : it basically states that the assumptions on the
- states are true. **)
-Inductive stack_invariant: stack -> Prop :=
- | stack_invariant_constr:
- forall stack,
- prefix (head_symbs_of_state (state_of_stack init stack))
- (symb_stack_of_stack stack) ->
- prefix_pred (head_states_of_state (state_of_stack init stack))
- (state_stack_of_stack stack) ->
- stack_invariant_next stack ->
- stack_invariant stack
-with stack_invariant_next: stack -> Prop :=
- | stack_invariant_next_nil:
- stack_invariant_next []
- | stack_invariant_next_cons:
- forall state_cur st stack_rec,
- stack_invariant stack_rec ->
- stack_invariant_next (existT _ state_cur st::stack_rec).
-
-(** [pop] conserves the stack invariant and does not crash
- under the assumption that we can pop at this place.
- Moreover, after a pop, the top state of the stack is allowed. **)
-Lemma pop_stack_invariant_conserved
- (symbols_to_pop:list symbol) (stack_cur:stack) A action:
- stack_invariant stack_cur ->
- prefix symbols_to_pop (head_symbs_of_state (state_of_stack init stack_cur)) ->
- match pop symbols_to_pop stack_cur (A:=A) action with
- | OK (stack_new, _) =>
- stack_invariant stack_new /\
- state_valid_after_pop
- (state_of_stack init stack_new) symbols_to_pop
- (head_states_of_state (state_of_stack init stack_cur))
- | Err => False
- end.
-Proof with eauto.
- intros.
- pose proof H.
- destruct H.
- revert H H0 H1 H2 H3.
- generalize (head_symbs_of_state (state_of_stack init stack0)).
- generalize (head_states_of_state (state_of_stack init stack0)).
- revert stack0 A action.
- induction symbols_to_pop; intros.
- - split...
- destruct l; constructor.
- inversion H2; subst.
- specialize (H7 (state_of_stack init stack0)).
- destruct (f2 (state_of_stack init stack0)) as [] eqn:? ...
- destruct stack0 as [|[]]; simpl in *.
- + inversion H6; subst.
- unfold singleton_state_pred in Heqb0.
- now rewrite compare_refl in Heqb0; discriminate.
- + inversion H6; subst.
- unfold singleton_state_pred in Heqb0.
- now rewrite compare_refl in Heqb0; discriminate.
- - destruct stack0 as [|[]]; unfold pop.
- + inversion H0; subst.
- now inversion H.
- + fold pop.
- destruct (compare_eqdec (last_symb_of_non_init_state x) a).
- * inversion H0; subst. clear H0.
- inversion H; subst. clear H.
- dependent destruction H3; simpl.
- assert (prefix_pred (List.tl l) (state_stack_of_stack stack0)).
- unfold tl; destruct l; [constructor | inversion H2]...
- pose proof H. destruct H3.
- specialize (IHsymbols_to_pop stack0 A (action0 n) _ _ H4 H7 H H0 H6).
- revert IHsymbols_to_pop.
- fold (noninitstate_type x); generalize (pop symbols_to_pop stack0 (action0 n)).
- destruct r as [|[]]; intuition...
- destruct l; constructor...
- * apply n0.
- inversion H0; subst.
- inversion H; subst...
-Qed.
-
-(** [prefix] is associative **)
-Lemma prefix_ass:
- forall (l1 l2 l3:list symbol), prefix l1 l2 -> prefix l2 l3 -> prefix l1 l3.
-Proof.
-induction l1; intros.
-constructor.
-inversion H; subst.
-inversion H0; subst.
-constructor; eauto.
-Qed.
-
-(** [prefix_pred] is associative **)
-Lemma prefix_pred_ass:
- forall (l1 l2 l3:list (state->bool)),
- prefix_pred l1 l2 -> prefix_pred l2 l3 -> prefix_pred l1 l3.
-Proof.
-induction l1; intros.
-constructor.
-inversion H; subst.
-inversion H0; subst.
-constructor; eauto.
-intro.
-specialize (H3 x).
-specialize (H4 x).
-destruct (f0 x); simpl in *; intuition.
-rewrite H4 in H3; intuition.
-Qed.
-
-(** If we have the right to reduce at this state, then the stack invariant
- is conserved by [reduce_step] and [reduce_step] does not crash. **)
-Lemma reduce_step_stack_invariant_conserved stack prod buff:
- stack_invariant stack ->
- valid_for_reduce (state_of_stack init stack) prod ->
- match reduce_step init stack prod buff with
- | OK (Fail_sr | Accept_sr _ _) => True
- | OK (Progress_sr stack_new _) => stack_invariant stack_new
- | Err => False
- end.
-Proof with eauto.
-unfold valid_for_reduce.
-intuition.
-unfold reduce_step.
-pose proof (pop_stack_invariant_conserved (prod_rhs_rev prod) stack _ (prod_action' prod)).
-destruct (pop (prod_rhs_rev prod) stack (prod_action' prod)) as [|[]].
-apply H0...
-destruct H0...
-pose proof (goto_head_symbs (state_of_stack init s) (prod_lhs prod)).
-pose proof (goto_past_state (state_of_stack init s) (prod_lhs prod)).
-unfold bind2.
-destruct H0.
-specialize (H2 _ H3)...
-destruct (goto_table (state_of_stack init stack0) (prod_lhs prod)) as [[]|].
-constructor.
-simpl.
-constructor.
-eapply prefix_ass...
-unfold state_stack_of_stack; simpl; constructor.
-intro; destruct (singleton_state_pred x x0); reflexivity.
-eapply prefix_pred_ass...
-constructor...
-constructor...
-destruct stack0 as [|[]]...
-destruct (compare_eqdec (prod_lhs prod) (start_nt init))...
-apply n, H2, eq_refl.
-apply H2, eq_refl.
-Qed.
-
-(** If the automaton is safe, then the stack invariant is conserved by
- [step] and [step] does not crash. **)
-Lemma step_stack_invariant_conserved (stack:stack) buffer:
- stack_invariant stack ->
- match step init stack buffer with
- | OK (Fail_sr | Accept_sr _ _) => True
- | OK (Progress_sr stack_new _) => stack_invariant stack_new
- | Err => False
- end.
-Proof with eauto.
-intro.
-unfold step.
-pose proof (shift_head_symbs (state_of_stack init stack)).
-pose proof (shift_past_state (state_of_stack init stack)).
-pose proof (reduce_ok (state_of_stack init stack)).
-destruct (action_table (state_of_stack init stack)).
-apply reduce_step_stack_invariant_conserved...
-destruct buffer as [[]]; simpl.
-specialize (H0 x); specialize (H1 x); specialize (H2 x).
-destruct (l x)...
-destruct H.
-constructor.
-unfold state_of_stack.
-constructor.
-eapply prefix_ass...
-unfold state_stack_of_stack; simpl; constructor.
-intro; destruct (singleton_state_pred s0 x0)...
-eapply prefix_pred_ass...
-constructor...
-constructor...
-apply reduce_step_stack_invariant_conserved...
-Qed.
-
-(** If the automaton is safe, then it does not crash **)
-Theorem parse_no_err buffer n_steps:
- parse init buffer n_steps <> Err.
-Proof with eauto.
-unfold parse.
-assert (stack_invariant []).
-constructor.
-constructor.
-unfold state_stack_of_stack; simpl; constructor.
-intro; destruct (singleton_state_pred init x)...
-constructor.
-constructor.
-revert H.
-generalize buffer ([]:stack).
-induction n_steps; simpl.
-now discriminate.
-intros.
-pose proof (step_stack_invariant_conserved s buffer0 H).
-destruct (step init s buffer0) as [|[]]; simpl...
-discriminate.
-discriminate.
-Qed.
-
-(** A version of [parse] that uses safeness in order to return a
- [parse_result], and not a [result parse_result] : we have proven that
- parsing does not return [Err]. **)
-Definition parse_with_safe (buffer:Stream token) (n_steps:nat):
- parse_result init.
-Proof with eauto.
-pose proof (parse_no_err buffer n_steps).
-destruct (parse init buffer n_steps)...
-congruence.
-Defined.
-
-End Safety_proof.
-
-End Make.
diff --git a/cparser/MenhirLib/Main.v b/cparser/MenhirLib/Main.v
deleted file mode 100644
index 1a17e988..00000000
--- a/cparser/MenhirLib/Main.v
+++ /dev/null
@@ -1,92 +0,0 @@
-(* *********************************************************************)
-(* *)
-(* The Compcert verified compiler *)
-(* *)
-(* Jacques-Henri Jourdan, INRIA Paris-Rocquencourt *)
-(* *)
-(* Copyright Institut National de Recherche en Informatique et en *)
-(* Automatique. All rights reserved. This file is distributed *)
-(* under the terms of the GNU General Public License as published by *)
-(* the Free Software Foundation, either version 2 of the License, or *)
-(* (at your option) any later version. This file is also distributed *)
-(* under the terms of the INRIA Non-Commercial License Agreement. *)
-(* *)
-(* *********************************************************************)
-
-Require Grammar.
-Require Automaton.
-Require Interpreter_safe.
-Require Interpreter_correct.
-Require Interpreter_complete.
-Require Import Syntax.
-
-Module Make(Export Aut:Automaton.T).
-Export Aut.Gram.
-Export Aut.GramDefs.
-
-Module Import Inter := Interpreter.Make Aut.
-Module Safe := Interpreter_safe.Make Aut Inter.
-Module Correct := Interpreter_correct.Make Aut Inter.
-Module Complete := Interpreter_complete.Make Aut Inter.
-
-Definition complete_validator:unit->bool := Complete.Valid.is_complete.
-Definition safe_validator:unit->bool := Safe.Valid.is_safe.
-Definition parse (safe:safe_validator ()=true) init n_steps buffer : parse_result init:=
- Safe.parse_with_safe (Safe.Valid.is_safe_correct safe) init buffer n_steps.
-
-(** Correction theorem. **)
-Theorem parse_correct
- (safe:safe_validator ()= true) init n_steps buffer:
- match parse safe init n_steps buffer with
- | Parsed_pr sem buffer_new =>
- exists word,
- buffer = word ++ buffer_new /\ inhabited (parse_tree (NT (start_nt init)) word sem)
- | _ => True
- end.
-Proof.
-unfold parse, Safe.parse_with_safe.
-pose proof (Correct.parse_correct init buffer n_steps).
-generalize (Safe.parse_no_err (Safe.Valid.is_safe_correct safe) init buffer n_steps).
-destruct (Inter.parse init buffer n_steps); intros.
-now destruct (n (eq_refl _)).
-now destruct p; trivial.
-Qed.
-
-(** Completeness theorem. **)
-Theorem parse_complete
- (safe:safe_validator () = true) init n_steps word buffer_end sem:
- complete_validator () = true ->
- forall tree:parse_tree (NT (start_nt init)) word sem,
- match parse safe init n_steps (word ++ buffer_end) with
- | Fail_pr => False
- | Parsed_pr sem_res buffer_end_res =>
- sem_res = sem /\ buffer_end_res = buffer_end /\ pt_size tree <= n_steps
- | Timeout_pr => n_steps < pt_size tree
- end.
-Proof.
-intros.
-unfold parse, Safe.parse_with_safe.
-pose proof (Complete.parse_complete (Complete.Valid.is_complete_correct H) init _ buffer_end _ tree n_steps).
-generalize (Safe.parse_no_err (Safe.Valid.is_safe_correct safe) init (word ++ buffer_end) n_steps).
-destruct (Inter.parse init (word ++ buffer_end) n_steps); intros.
-now destruct (n eq_refl).
-now exact H0.
-Qed.
-
-(** Unambiguity theorem. **)
-Theorem unambiguity:
- safe_validator () = true -> complete_validator () = true -> inhabited token ->
- forall init word,
- forall sem1 (tree1:parse_tree (NT (start_nt init)) word sem1),
- forall sem2 (tree2:parse_tree (NT (start_nt init)) word sem2),
- sem1 = sem2.
-Proof.
-intros.
-destruct H1.
-pose proof (parse_complete H init (pt_size tree1) word (Streams.const X) sem1) H0 tree1.
-pose proof (parse_complete H init (pt_size tree1) word (Streams.const X) sem2) H0 tree2.
-destruct (parse H init (pt_size tree1) (word ++ Streams.const X)); intuition.
-rewrite <- H3, H1; reflexivity.
-Qed.
-
-End Make.
diff --git a/cparser/MenhirLib/Tuples.v b/cparser/MenhirLib/Tuples.v
deleted file mode 100644
index 3fd2ec03..00000000
--- a/cparser/MenhirLib/Tuples.v
+++ /dev/null
@@ -1,49 +0,0 @@
-(* *********************************************************************)
-(* *)
-(* The Compcert verified compiler *)
-(* *)
-(* Jacques-Henri Jourdan, INRIA Paris-Rocquencourt *)
-(* *)
-(* Copyright Institut National de Recherche en Informatique et en *)
-(* Automatique. All rights reserved. This file is distributed *)
-(* under the terms of the GNU General Public License as published by *)
-(* the Free Software Foundation, either version 2 of the License, or *)
-(* (at your option) any later version. This file is also distributed *)
-(* under the terms of the INRIA Non-Commercial License Agreement. *)
-(* *)
-(* *********************************************************************)
-
-Require Import List.
-Require Import Coq.Program.Syntax.
-Require Import Equality.
-
-(** A curryfied function with multiple parameters **)
-Definition arrows_left: list Type -> Type -> Type :=
- fold_left (fun A B => B -> A).
-
-(** A curryfied function with multiple parameters **)
-Definition arrows_right: Type -> list Type -> Type :=
- fold_right (fun A B => A -> B).
-
-(** A tuple is a heterogeneous list. For convenience, we use pairs. **)
-Fixpoint tuple (types : list Type) : Type :=
- match types with
- | nil => unit
- | t::q => prod t (tuple q)
- end.
-
-Fixpoint uncurry {args:list Type} {res:Type}:
- arrows_left args res -> tuple args -> res :=
- match args return forall res, arrows_left args res -> tuple args -> res with
- | [] => fun _ f _ => f
- | t::q => fun res f p => let (d, t) := p in
- (@uncurry q _ f t) d
- end res.
-
-Lemma JMeq_eqrect:
- forall (U:Type) (a b:U) (P:U -> Type) (x:P a) (e:a=b),
- eq_rect a P x b e ~= x.
-Proof.
-destruct e.
-reflexivity.
-Qed.
diff --git a/cparser/MenhirLib/Validator_complete.v b/cparser/MenhirLib/Validator_complete.v
deleted file mode 100644
index a9823278..00000000
--- a/cparser/MenhirLib/Validator_complete.v
+++ /dev/null
@@ -1,542 +0,0 @@
-(* *********************************************************************)
-(* *)
-(* The Compcert verified compiler *)
-(* *)
-(* Jacques-Henri Jourdan, INRIA Paris-Rocquencourt *)
-(* *)
-(* Copyright Institut National de Recherche en Informatique et en *)
-(* Automatique. All rights reserved. This file is distributed *)
-(* under the terms of the GNU General Public License as published by *)
-(* the Free Software Foundation, either version 2 of the License, or *)
-(* (at your option) any later version. This file is also distributed *)
-(* under the terms of the INRIA Non-Commercial License Agreement. *)
-(* *)
-(* *********************************************************************)
-
-Require Automaton.
-Require Import Alphabet.
-Require Import List.
-Require Import Syntax.
-
-Module Make(Import A:Automaton.T).
-
-(** We instantiate some sets/map. **)
-Module TerminalComparableM <: ComparableM.
- Definition t := terminal.
- Instance tComparable : Comparable t := _.
-End TerminalComparableM.
-Module TerminalOrderedType := OrderedType_from_ComparableM TerminalComparableM.
-Module StateProdPosComparableM <: ComparableM.
- Definition t := (state*production*nat)%type.
- Instance tComparable : Comparable t := _.
-End StateProdPosComparableM.
-Module StateProdPosOrderedType :=
- OrderedType_from_ComparableM StateProdPosComparableM.
-
-Module TerminalSet := FSetAVL.Make TerminalOrderedType.
-Module StateProdPosMap := FMapAVL.Make StateProdPosOrderedType.
-
-(** Nullable predicate for symbols and list of symbols. **)
-Definition nullable_symb (symbol:symbol) :=
- match symbol with
- | NT nt => nullable_nterm nt
- | _ => false
- end.
-
-Definition nullable_word (word:list symbol) :=
- forallb nullable_symb word.
-
-(** First predicate for non terminal, symbols and list of symbols, given as FSets. **)
-Definition first_nterm_set (nterm:nonterminal) :=
- fold_left (fun acc t => TerminalSet.add t acc)
- (first_nterm nterm) TerminalSet.empty.
-
-Definition first_symb_set (symbol:symbol) :=
- match symbol with
- | NT nt => first_nterm_set nt
- | T t => TerminalSet.singleton t
- end.
-
-Fixpoint first_word_set (word:list symbol) :=
- match word with
- | [] => TerminalSet.empty
- | t::q =>
- if nullable_symb t then
- TerminalSet.union (first_symb_set t) (first_word_set q)
- else
- first_symb_set t
- end.
-
-(** Small helper for finding the part of an item that is after the dot. **)
-Definition future_of_prod prod dot_pos : list symbol :=
- (fix loop n lst :=
- match n with
- | O => lst
- | S x => match loop x lst with [] => [] | _::q => q end
- end)
- dot_pos (rev' (prod_rhs_rev prod)).
-
-(** We build a fast map to store all the items of all the states. **)
-Definition items_map (_:unit): StateProdPosMap.t TerminalSet.t :=
- fold_left (fun acc state =>
- fold_left (fun acc item =>
- let key := (state, prod_item item, dot_pos_item item) in
- let data := fold_left (fun acc t => TerminalSet.add t acc)
- (lookaheads_item item) TerminalSet.empty
- in
- let old :=
- match StateProdPosMap.find key acc with
- | Some x => x | None => TerminalSet.empty
- end
- in
- StateProdPosMap.add key (TerminalSet.union data old) acc
- ) (items_of_state state) acc
- ) all_list (StateProdPosMap.empty TerminalSet.t).
-
-(** Accessor. **)
-Definition find_items_map items_map state prod dot_pos : TerminalSet.t :=
- match StateProdPosMap.find (state, prod, dot_pos) items_map with
- | None => TerminalSet.empty
- | Some x => x
- end.
-
-Definition state_has_future state prod (fut:list symbol) (lookahead:terminal) :=
- exists dot_pos:nat,
- fut = future_of_prod prod dot_pos /\
- TerminalSet.In lookahead (find_items_map (items_map ()) state prod dot_pos).
-
-(** Iterator over items. **)
-Definition forallb_items items_map (P:state -> production -> nat -> TerminalSet.t -> bool): bool:=
- StateProdPosMap.fold (fun key set acc =>
- match key with (st, p, pos) => (acc && P st p pos set)%bool end
- ) items_map true.
-
-Lemma forallb_items_spec :
- forall p, forallb_items (items_map ()) p = true ->
- forall st prod fut lookahead, state_has_future st prod fut lookahead ->
- forall P:state -> production -> list symbol -> terminal -> Prop,
- (forall st prod pos set lookahead,
- TerminalSet.In lookahead set -> p st prod pos set = true ->
- P st prod (future_of_prod prod pos) lookahead) ->
- P st prod fut lookahead.
-Proof.
-intros.
-unfold forallb_items in H.
-rewrite StateProdPosMap.fold_1 in H.
-destruct H0; destruct H0.
-specialize (H1 st prod x _ _ H2).
-destruct H0.
-apply H1.
-unfold find_items_map in *.
-pose proof (@StateProdPosMap.find_2 _ (items_map ()) (st, prod, x)).
-destruct (StateProdPosMap.find (st, prod, x) (items_map ())); [ |destruct (TerminalSet.empty_1 H2)].
-specialize (H0 _ (eq_refl _)).
-pose proof (StateProdPosMap.elements_1 H0).
-revert H.
-generalize true at 1.
-induction H3.
-destruct H.
-destruct y.
-simpl in H3; destruct H3.
-pose proof (compare_eq (st, prod, x) k H).
-destruct H3.
-simpl.
-generalize (p st prod x t).
-induction l; simpl; intros.
-rewrite Bool.andb_true_iff in H3.
-intuition.
-destruct a; destruct k; destruct p0.
-simpl in H3.
-replace (b0 && b && p s p0 n t0)%bool with (b0 && p s p0 n t0 && b)%bool in H3.
-apply (IHl _ _ H3).
-destruct b, b0, (p s p0 n t0); reflexivity.
-intro.
-apply IHInA.
-Qed.
-
-(** * Validation for completeness **)
-
-(** The nullable predicate is a fixpoint : it is correct. **)
-Definition nullable_stable:=
- forall p:production,
- nullable_word (rev (prod_rhs_rev p)) = true ->
- nullable_nterm (prod_lhs p) = true.
-
-Definition is_nullable_stable (_:unit) :=
- forallb (fun p:production =>
- implb (nullable_word (rev' (prod_rhs_rev p))) (nullable_nterm (prod_lhs p)))
- all_list.
-
-Property is_nullable_stable_correct :
- is_nullable_stable () = true -> nullable_stable.
-Proof.
-unfold is_nullable_stable, nullable_stable.
-intros.
-rewrite forallb_forall in H.
-specialize (H p (all_list_forall p)).
-unfold rev' in H; rewrite <- rev_alt in H.
-rewrite H0 in H; intuition.
-Qed.
-
-(** The first predicate is a fixpoint : it is correct. **)
-Definition first_stable:=
- forall (p:production),
- TerminalSet.Subset (first_word_set (rev (prod_rhs_rev p)))
- (first_nterm_set (prod_lhs p)).
-
-Definition is_first_stable (_:unit) :=
- forallb (fun p:production =>
- TerminalSet.subset (first_word_set (rev' (prod_rhs_rev p)))
- (first_nterm_set (prod_lhs p)))
- all_list.
-
-Property is_first_stable_correct :
- is_first_stable () = true -> first_stable.
-Proof.
-unfold is_first_stable, first_stable.
-intros.
-rewrite forallb_forall in H.
-specialize (H p (all_list_forall p)).
-unfold rev' in H; rewrite <- rev_alt in H.
-apply TerminalSet.subset_2; intuition.
-Qed.
-
-(** The initial state has all the S=>.u items, where S is the start non-terminal **)
-Definition start_future :=
- forall (init:initstate) (t:terminal) (p:production),
- prod_lhs p = start_nt init ->
- state_has_future init p (rev (prod_rhs_rev p)) t.
-
-Definition is_start_future items_map :=
- forallb (fun init =>
- forallb (fun prod =>
- if compare_eqb (prod_lhs prod) (start_nt init) then
- let lookaheads := find_items_map items_map init prod 0 in
- forallb (fun t => TerminalSet.mem t lookaheads) all_list
- else
- true) all_list) all_list.
-
-Property is_start_future_correct :
- is_start_future (items_map ()) = true -> start_future.
-Proof.
-unfold is_start_future, start_future.
-intros.
-rewrite forallb_forall in H.
-specialize (H init (all_list_forall _)).
-rewrite forallb_forall in H.
-specialize (H p (all_list_forall _)).
-rewrite <- compare_eqb_iff in H0.
-rewrite H0 in H.
-rewrite forallb_forall in H.
-specialize (H t (all_list_forall _)).
-exists 0.
-split.
-apply rev_alt.
-apply TerminalSet.mem_2; eauto.
-Qed.
-
-(** If a state contains an item of the form A->_.av[[b]], where a is a
- terminal, then reading an a does a [Shift_act], to a state containing
- an item of the form A->_.v[[b]]. **)
-Definition terminal_shift :=
- forall (s1:state) prod fut lookahead,
- state_has_future s1 prod fut lookahead ->
- match fut with
- | T t::q =>
- match action_table s1 with
- | Lookahead_act awp =>
- match awp t with
- | Shift_act s2 _ =>
- state_has_future s2 prod q lookahead
- | _ => False
- end
- | _ => False
- end
- | _ => True
- end.
-
-Definition is_terminal_shift items_map :=
- forallb_items items_map (fun s1 prod pos lset =>
- match future_of_prod prod pos with
- | T t::_ =>
- match action_table s1 with
- | Lookahead_act awp =>
- match awp t with
- | Shift_act s2 _ =>
- TerminalSet.subset lset (find_items_map items_map s2 prod (S pos))
- | _ => false
- end
- | _ => false
- end
- | _ => true
- end).
-
-Property is_terminal_shift_correct :
- is_terminal_shift (items_map ()) = true -> terminal_shift.
-Proof.
-unfold is_terminal_shift, terminal_shift.
-intros.
-apply (forallb_items_spec _ H _ _ _ _ H0 (fun _ _ fut look => _)).
-intros.
-destruct (future_of_prod prod0 pos) as [|[]] eqn:?; intuition.
-destruct (action_table st); intuition.
-destruct (l0 t); intuition.
-exists (S pos).
-split.
-unfold future_of_prod in *.
-rewrite Heql; reflexivity.
-apply (TerminalSet.subset_2 H2); intuition.
-Qed.
-
-(** If a state contains an item of the form A->_.[[a]], then either we do a
- [Default_reduce_act] of the corresponding production, either a is a
- terminal (ie. there is a lookahead terminal), and reading a does a
- [Reduce_act] of the corresponding production. **)
-Definition end_reduce :=
- forall (s:state) prod fut lookahead,
- state_has_future s prod fut lookahead ->
- fut = [] ->
- match action_table s with
- | Default_reduce_act p => p = prod
- | Lookahead_act awt =>
- match awt lookahead with
- | Reduce_act p => p = prod
- | _ => False
- end
- end.
-
-Definition is_end_reduce items_map :=
- forallb_items items_map (fun s prod pos lset =>
- match future_of_prod prod pos with
- | [] =>
- match action_table s with
- | Default_reduce_act p => compare_eqb p prod
- | Lookahead_act awt =>
- TerminalSet.fold (fun lookahead acc =>
- match awt lookahead with
- | Reduce_act p => (acc && compare_eqb p prod)%bool
- | _ => false
- end) lset true
- end
- | _ => true
- end).
-
-Property is_end_reduce_correct :
- is_end_reduce (items_map ()) = true -> end_reduce.
-Proof.
-unfold is_end_reduce, end_reduce.
-intros.
-revert H1.
-apply (forallb_items_spec _ H _ _ _ _ H0 (fun st prod fut look => _ ->
- match action_table st with
- | Default_reduce_act p => p = prod
- | _ => _
- end)).
-intros.
-rewrite H3 in H2.
-destruct (action_table st); intuition.
-apply compare_eqb_iff; intuition.
-rewrite TerminalSet.fold_1 in H2.
-revert H2.
-generalize true at 1.
-pose proof (TerminalSet.elements_1 H1).
-induction H2.
-pose proof (compare_eq _ _ H2).
-destruct H4.
-simpl.
-assert (fold_left
- (fun (a : bool) (e : TerminalSet.elt) =>
- match l e with
- | Shift_act _ _ => false
- | Reduce_act p => (a && compare_eqb p prod0)%bool
- | Fail_act => false
- end) l0 false = true -> False).
-induction l0; intuition.
-apply IHl0.
-simpl in H4.
-destruct (l a); intuition.
-destruct (l lookahead0); intuition.
-apply compare_eqb_iff.
-destruct (compare_eqb p prod0); intuition.
-destruct b; intuition.
-simpl; intros.
-eapply IHInA; eauto.
-Qed.
-
-(** If a state contains an item of the form A->_.Bv[[b]], where B is a
- non terminal, then the goto table says we have to go to a state containing
- an item of the form A->_.v[[b]]. **)
-Definition non_terminal_goto :=
- forall (s1:state) prod fut lookahead,
- state_has_future s1 prod fut lookahead ->
- match fut with
- | NT nt::q =>
- match goto_table s1 nt with
- | Some (exist _ s2 _) =>
- state_has_future s2 prod q lookahead
- | None =>
- forall prod fut lookahead,
- state_has_future s1 prod fut lookahead ->
- match fut with
- | NT nt'::_ => nt <> nt'
- | _ => True
- end
- end
- | _ => True
- end.
-
-Definition is_non_terminal_goto items_map :=
- forallb_items items_map (fun s1 prod pos lset =>
- match future_of_prod prod pos with
- | NT nt::_ =>
- match goto_table s1 nt with
- | Some (exist _ s2 _) =>
- TerminalSet.subset lset (find_items_map items_map s2 prod (S pos))
- | None => forallb_items items_map (fun s1' prod' pos' _ =>
- (implb (compare_eqb s1 s1')
- match future_of_prod prod' pos' with
- | NT nt' :: _ => negb (compare_eqb nt nt')
- | _ => true
- end)%bool)
- end
- | _ => true
- end).
-
-Property is_non_terminal_goto_correct :
- is_non_terminal_goto (items_map ()) = true -> non_terminal_goto.
-Proof.
-unfold is_non_terminal_goto, non_terminal_goto.
-intros.
-apply (forallb_items_spec _ H _ _ _ _ H0 (fun st prod fut look =>
- match fut with
- | NT nt :: q =>
- match goto_table st nt with
- | Some _ => _
- | None =>
- forall p f l, state_has_future st p f l -> (_:Prop)
- end
- | _ => _
- end)).
-intros.
-destruct (future_of_prod prod0 pos) as [|[]] eqn:?; intuition.
-destruct (goto_table st n) as [[]|].
-exists (S pos).
-split.
-unfold future_of_prod in *.
-rewrite Heql; reflexivity.
-apply (TerminalSet.subset_2 H2); intuition.
-intros.
-remember st in H2; revert Heqs.
-apply (forallb_items_spec _ H2 _ _ _ _ H3 (fun st' prod fut look => s = st' -> match fut return Prop with [] => _ | _ => _ end)); intros.
-rewrite <- compare_eqb_iff in H6; rewrite H6 in H5.
-destruct (future_of_prod prod1 pos0) as [|[]]; intuition.
-rewrite <- compare_eqb_iff in H7; rewrite H7 in H5.
-discriminate.
-Qed.
-
-Definition start_goto :=
- forall (init:initstate), goto_table init (start_nt init) = None.
-
-Definition is_start_goto (_:unit) :=
- forallb (fun (init:initstate) =>
- match goto_table init (start_nt init) with
- | Some _ => false
- | None => true
- end) all_list.
-
-Definition is_start_goto_correct:
- is_start_goto () = true -> start_goto.
-Proof.
-unfold is_start_goto, start_goto.
-rewrite forallb_forall.
-intros.
-specialize (H init (all_list_forall _)).
-destruct (goto_table init (start_nt init)); congruence.
-Qed.
-
-(** Closure property of item sets : if a state contains an item of the form
- A->_.Bv[[b]], then for each production B->u and each terminal a of
- first(vb), the state contains an item of the form B->_.u[[a]] **)
-Definition non_terminal_closed :=
- forall (s1:state) prod fut lookahead,
- state_has_future s1 prod fut lookahead ->
- match fut with
- | NT nt::q =>
- forall (p:production) (lookahead2:terminal),
- prod_lhs p = nt ->
- TerminalSet.In lookahead2 (first_word_set q) \/
- lookahead2 = lookahead /\ nullable_word q = true ->
- state_has_future s1 p (rev (prod_rhs_rev p)) lookahead2
- | _ => True
- end.
-
-Definition is_non_terminal_closed items_map :=
- forallb_items items_map (fun s1 prod pos lset =>
- match future_of_prod prod pos with
- | NT nt::q =>
- forallb (fun p =>
- if compare_eqb (prod_lhs p) nt then
- let lookaheads := find_items_map items_map s1 p 0 in
- (implb (nullable_word q) (TerminalSet.subset lset lookaheads)) &&
- TerminalSet.subset (first_word_set q) lookaheads
- else true
- )%bool all_list
- | _ => true
- end).
-
-Property is_non_terminal_closed_correct:
- is_non_terminal_closed (items_map ()) = true -> non_terminal_closed.
-Proof.
-unfold is_non_terminal_closed, non_terminal_closed.
-intros.
-apply (forallb_items_spec _ H _ _ _ _ H0 (fun st prod fut look =>
- match fut with
- | NT nt :: q => forall p l, _ -> _ -> state_has_future st _ _ _
- | _ => _
- end)).
-intros.
-destruct (future_of_prod prod0 pos); intuition.
-destruct s; eauto; intros.
-rewrite forallb_forall in H2.
-specialize (H2 p (all_list_forall p)).
-rewrite <- compare_eqb_iff in H3.
-rewrite H3 in H2.
-rewrite Bool.andb_true_iff in H2.
-destruct H2.
-exists 0.
-split.
-apply rev_alt.
-destruct H4 as [|[]]; subst.
-apply (TerminalSet.subset_2 H5); intuition.
-rewrite H6 in H2.
-apply (TerminalSet.subset_2 H2); intuition.
-Qed.
-
-(** The automaton is complete **)
-Definition complete :=
- nullable_stable /\ first_stable /\ start_future /\ terminal_shift
- /\ end_reduce /\ non_terminal_goto /\ start_goto /\ non_terminal_closed.
-
-Definition is_complete (_:unit) :=
- let items_map := items_map () in
- (is_nullable_stable () && is_first_stable () && is_start_future items_map &&
- is_terminal_shift items_map && is_end_reduce items_map && is_start_goto () &&
- is_non_terminal_goto items_map && is_non_terminal_closed items_map)%bool.
-
-Property is_complete_correct:
- is_complete () = true -> complete.
-Proof.
-unfold is_complete, complete.
-repeat rewrite Bool.andb_true_iff.
-intuition.
-apply is_nullable_stable_correct; assumption.
-apply is_first_stable_correct; assumption.
-apply is_start_future_correct; assumption.
-apply is_terminal_shift_correct; assumption.
-apply is_end_reduce_correct; assumption.
-apply is_non_terminal_goto_correct; assumption.
-apply is_start_goto_correct; assumption.
-apply is_non_terminal_closed_correct; assumption.
-Qed.
-
-End Make.
diff --git a/cparser/MenhirLib/Validator_safe.v b/cparser/MenhirLib/Validator_safe.v
deleted file mode 100644
index 183d661b..00000000
--- a/cparser/MenhirLib/Validator_safe.v
+++ /dev/null
@@ -1,414 +0,0 @@
-(* *********************************************************************)
-(* *)
-(* The Compcert verified compiler *)
-(* *)
-(* Jacques-Henri Jourdan, INRIA Paris-Rocquencourt *)
-(* *)
-(* Copyright Institut National de Recherche en Informatique et en *)
-(* Automatique. All rights reserved. This file is distributed *)
-(* under the terms of the GNU General Public License as published by *)
-(* the Free Software Foundation, either version 2 of the License, or *)
-(* (at your option) any later version. This file is also distributed *)
-(* under the terms of the INRIA Non-Commercial License Agreement. *)
-(* *)
-(* *********************************************************************)
-
-Require Automaton.
-Require Import Alphabet.
-Require Import List.
-Require Import Syntax.
-
-Module Make(Import A:Automaton.T).
-
-(** The singleton predicate for states **)
-Definition singleton_state_pred (state:state) :=
- (fun state' => match compare state state' with Eq => true |_ => false end).
-
-(** [past_state_of_non_init_state], extended for all states. **)
-Definition past_state_of_state (state:state) :=
- match state with
- | Init _ => []
- | Ninit nis => past_state_of_non_init_state nis
- end.
-
-(** Concatenations of last and past **)
-Definition head_symbs_of_state (state:state) :=
- match state with
- | Init _ => []
- | Ninit s =>
- last_symb_of_non_init_state s::past_symb_of_non_init_state s
- end.
-Definition head_states_of_state (state:state) :=
- singleton_state_pred state::past_state_of_state state.
-
-(** * Validation for correctness **)
-
-(** Prefix predicate between two lists of symbols. **)
-Inductive prefix: list symbol -> list symbol -> Prop :=
- | prefix_nil: forall l, prefix [] l
- | prefix_cons: forall l1 l2 x, prefix l1 l2 -> prefix (x::l1) (x::l2).
-
-Fixpoint is_prefix (l1 l2:list symbol):=
- match l1, l2 with
- | [], _ => true
- | t1::q1, t2::q2 => (compare_eqb t1 t2 && is_prefix q1 q2)%bool
- | _::_, [] => false
- end.
-
-Property is_prefix_correct (l1 l2:list symbol):
- is_prefix l1 l2 = true -> prefix l1 l2.
-Proof.
-revert l2.
-induction l1; intros.
-apply prefix_nil.
-unfold is_prefix in H.
-destruct l2; intuition; try discriminate.
-rewrite Bool.andb_true_iff in H.
-destruct H.
-rewrite compare_eqb_iff in H.
-destruct H.
-apply prefix_cons.
-apply IHl1; intuition.
-Qed.
-
-(** If we shift, then the known top symbols of the destination state is
- a prefix of the known top symbols of the source state, with the new
- symbol added. **)
-Definition shift_head_symbs :=
- forall s,
- match action_table s with
- | Lookahead_act awp =>
- forall t, match awp t with
- | Shift_act s2 _ =>
- prefix (past_symb_of_non_init_state s2) (head_symbs_of_state s)
- | _ => True
- end
- | _ => True
- end.
-
-Definition is_shift_head_symbs (_:unit) :=
- forallb (fun s:state =>
- match action_table s with
- | Lookahead_act awp =>
- forallb (fun t =>
- match awp t with
- | Shift_act s2 _ =>
- is_prefix (past_symb_of_non_init_state s2) (head_symbs_of_state s)
- | _ => true
- end)
- all_list
- | _ => true
- end)
- all_list.
-
-Property is_shift_head_symbs_correct:
- is_shift_head_symbs () = true -> shift_head_symbs.
-Proof.
-unfold is_shift_head_symbs, shift_head_symbs.
-intros.
-rewrite forallb_forall in H.
-specialize (H s (all_list_forall s)).
-destruct (action_table s); intuition.
-rewrite forallb_forall in H.
-specialize (H t (all_list_forall t)).
-destruct (l t); intuition.
-apply is_prefix_correct; intuition.
-Qed.
-
-(** When a goto happens, then the known top symbols of the destination state
- is a prefix of the known top symbols of the source state, with the new
- symbol added. **)
-Definition goto_head_symbs :=
- forall s nt,
- match goto_table s nt with
- | Some (exist _ s2 _) =>
- prefix (past_symb_of_non_init_state s2) (head_symbs_of_state s)
- | None => True
- end.
-
-Definition is_goto_head_symbs (_:unit) :=
- forallb (fun s:state =>
- forallb (fun nt =>
- match goto_table s nt with
- | Some (exist _ s2 _) =>
- is_prefix (past_symb_of_non_init_state s2) (head_symbs_of_state s)
- | None => true
- end)
- all_list)
- all_list.
-
-Property is_goto_head_symbs_correct:
- is_goto_head_symbs () = true -> goto_head_symbs.
-Proof.
-unfold is_goto_head_symbs, goto_head_symbs.
-intros.
-rewrite forallb_forall in H.
-specialize (H s (all_list_forall s)).
-rewrite forallb_forall in H.
-specialize (H nt (all_list_forall nt)).
-destruct (goto_table s nt); intuition.
-destruct s0.
-apply is_prefix_correct; intuition.
-Qed.
-
-(** We have to say the same kind of checks for the assumptions about the
- states stack. However, theses assumptions are predicates. So we define
- a notion of "prefix" over predicates lists, that means, basically, that
- an assumption entails another **)
-Inductive prefix_pred: list (state->bool) -> list (state->bool) -> Prop :=
- | prefix_pred_nil: forall l, prefix_pred [] l
- | prefix_pred_cons: forall l1 l2 f1 f2,
- (forall x, implb (f2 x) (f1 x) = true) ->
- prefix_pred l1 l2 -> prefix_pred (f1::l1) (f2::l2).
-
-Fixpoint is_prefix_pred (l1 l2:list (state->bool)) :=
- match l1, l2 with
- | [], _ => true
- | f1::q1, f2::q2 =>
- (forallb (fun x => implb (f2 x) (f1 x)) all_list
- && is_prefix_pred q1 q2)%bool
- | _::_, [] => false
- end.
-
-Property is_prefix_pred_correct (l1 l2:list (state->bool)) :
- is_prefix_pred l1 l2 = true -> prefix_pred l1 l2.
-Proof.
-revert l2.
-induction l1.
-intros.
-apply prefix_pred_nil.
-intros.
-destruct l2; intuition; try discriminate.
-unfold is_prefix_pred in H.
-rewrite Bool.andb_true_iff in H.
-rewrite forallb_forall in H.
-apply prefix_pred_cons; intuition.
-apply H0.
-apply all_list_forall.
-Qed.
-
-(** The assumptions about state stack is conserved when we shift **)
-Definition shift_past_state :=
- forall s,
- match action_table s with
- | Lookahead_act awp =>
- forall t, match awp t with
- | Shift_act s2 _ =>
- prefix_pred (past_state_of_non_init_state s2)
- (head_states_of_state s)
- | _ => True
- end
- | _ => True
- end.
-
-Definition is_shift_past_state (_:unit) :=
- forallb (fun s:state =>
- match action_table s with
- | Lookahead_act awp =>
- forallb (fun t =>
- match awp t with
- | Shift_act s2 _ =>
- is_prefix_pred
- (past_state_of_non_init_state s2) (head_states_of_state s)
- | _ => true
- end)
- all_list
- | _ => true
- end)
- all_list.
-
-Property is_shift_past_state_correct:
- is_shift_past_state () = true -> shift_past_state.
-Proof.
-unfold is_shift_past_state, shift_past_state.
-intros.
-rewrite forallb_forall in H.
-specialize (H s (all_list_forall s)).
-destruct (action_table s); intuition.
-rewrite forallb_forall in H.
-specialize (H t (all_list_forall t)).
-destruct (l t); intuition.
-apply is_prefix_pred_correct; intuition.
-Qed.
-
-(** The assumptions about state stack is conserved when we do a goto **)
-Definition goto_past_state :=
- forall s nt,
- match goto_table s nt with
- | Some (exist _ s2 _) =>
- prefix_pred (past_state_of_non_init_state s2)
- (head_states_of_state s)
- | None => True
- end.
-
-Definition is_goto_past_state (_:unit) :=
- forallb (fun s:state =>
- forallb (fun nt =>
- match goto_table s nt with
- | Some (exist _ s2 _) =>
- is_prefix_pred
- (past_state_of_non_init_state s2) (head_states_of_state s)
- | None => true
- end)
- all_list)
- all_list.
-
-Property is_goto_past_state_correct :
- is_goto_past_state () = true -> goto_past_state.
-Proof.
-unfold is_goto_past_state, goto_past_state.
-intros.
-rewrite forallb_forall in H.
-specialize (H s (all_list_forall s)).
-rewrite forallb_forall in H.
-specialize (H nt (all_list_forall nt)).
-destruct (goto_table s nt); intuition.
-destruct s0.
-apply is_prefix_pred_correct; intuition.
-Qed.
-
-(** What states are possible after having popped these symbols from the
- stack, given the annotation of the current state ? **)
-Inductive state_valid_after_pop (s:state):
- list symbol -> list (state -> bool) -> Prop :=
- | state_valid_after_pop_nil1:
- forall p pl, p s = true -> state_valid_after_pop s [] (p::pl)
- | state_valid_after_pop_nil2:
- forall sl, state_valid_after_pop s sl []
- | state_valid_after_pop_cons:
- forall st sq p pl, state_valid_after_pop s sq pl ->
- state_valid_after_pop s (st::sq) (p::pl).
-
-Fixpoint is_state_valid_after_pop
- (state:state) (to_pop:list symbol) annot :=
- match annot, to_pop with
- | [], _ => true
- | p::_, [] => p state
- | p::pl, s::sl => is_state_valid_after_pop state sl pl
- end.
-
-Property is_state_valid_after_pop_complete state sl pl :
- state_valid_after_pop state sl pl -> is_state_valid_after_pop state sl pl = true.
-Proof.
-intro.
-induction H; intuition.
-destruct sl; intuition.
-Qed.
-
-(** A state is valid for reducing a production when :
- - The assumptions on the state are such that we will find the right hand
- side of the production on the stack.
- - We will be able to do a goto after having popped the right hand side.
-**)
-Definition valid_for_reduce (state:state) prod :=
- prefix (prod_rhs_rev prod) (head_symbs_of_state state) /\
- forall state_new,
- state_valid_after_pop state_new
- (prod_rhs_rev prod) (head_states_of_state state) ->
- goto_table state_new (prod_lhs prod) = None ->
- match state_new with
- | Init i => prod_lhs prod = start_nt i
- | Ninit _ => False
- end.
-
-Definition is_valid_for_reduce (state:state) prod:=
- (is_prefix (prod_rhs_rev prod) (head_symbs_of_state state) &&
- forallb (fun state_new =>
- if is_state_valid_after_pop state_new (prod_rhs_rev prod)
- (head_states_of_state state) then
- match goto_table state_new (prod_lhs prod) with
- | Some _ => true
- | None =>
- match state_new with
- | Init i => compare_eqb (prod_lhs prod) (start_nt i)
- | Ninit _ => false
- end
- end
- else
- true)
- all_list)%bool.
-
-Property is_valid_for_reduce_correct (state:state) prod:
- is_valid_for_reduce state prod = true -> valid_for_reduce state prod.
-Proof.
-unfold is_valid_for_reduce, valid_for_reduce.
-intros.
-rewrite Bool.andb_true_iff in H.
-split.
-apply is_prefix_correct.
-intuition.
-intros.
-rewrite forallb_forall in H.
-destruct H.
-specialize (H2 state_new (all_list_forall state_new)).
-rewrite is_state_valid_after_pop_complete, H1 in H2.
-destruct state_new; intuition.
-rewrite compare_eqb_iff in H2; intuition.
-intuition.
-Qed.
-
-(** All the states that does a reduce are valid for reduction **)
-Definition reduce_ok :=
- forall s,
- match action_table s with
- | Lookahead_act awp =>
- forall t, match awp t with
- | Reduce_act p => valid_for_reduce s p
- | _ => True
- end
- | Default_reduce_act p => valid_for_reduce s p
- end.
-
-Definition is_reduce_ok (_:unit) :=
- forallb (fun s =>
- match action_table s with
- | Lookahead_act awp =>
- forallb (fun t =>
- match awp t with
- | Reduce_act p => is_valid_for_reduce s p
- | _ => true
- end)
- all_list
- | Default_reduce_act p => is_valid_for_reduce s p
- end)
- all_list.
-
-Property is_reduce_ok_correct :
- is_reduce_ok () = true -> reduce_ok.
-Proof.
-unfold is_reduce_ok, reduce_ok.
-intros.
-rewrite forallb_forall in H.
-specialize (H s (all_list_forall s)).
-destruct (action_table s).
-apply is_valid_for_reduce_correct; intuition.
-intro.
-rewrite forallb_forall in H.
-specialize (H t (all_list_forall t)).
-destruct (l t); intuition.
-apply is_valid_for_reduce_correct; intuition.
-Qed.
-
-(** The automaton is safe **)
-Definition safe :=
- shift_head_symbs /\ goto_head_symbs /\ shift_past_state /\
- goto_past_state /\ reduce_ok.
-
-Definition is_safe (_:unit) :=
- (is_shift_head_symbs () && is_goto_head_symbs () && is_shift_past_state () &&
- is_goto_past_state () && is_reduce_ok ())%bool.
-
-Property is_safe_correct:
- is_safe () = true -> safe.
-Proof.
-unfold safe, is_safe.
-repeat rewrite Bool.andb_true_iff.
-intuition.
-apply is_shift_head_symbs_correct; assumption.
-apply is_goto_head_symbs_correct; assumption.
-apply is_shift_past_state_correct; assumption.
-apply is_goto_past_state_correct; assumption.
-apply is_reduce_ok_correct; assumption.
-Qed.
-
-End Make.
diff --git a/cparser/Parse.ml b/cparser/Parse.ml
index 154e3dcf..29245083 100644
--- a/cparser/Parse.ml
+++ b/cparser/Parse.ml
@@ -56,22 +56,21 @@ let preprocessed_file transfs name sourcefile =
let text = read_file sourcefile in
let p =
let t = parse_transformations transfs in
- let rec inf = Datatypes.S inf in
+ let log_fuel = Camlcoq.Nat.of_int 50 in
let ast : Cabs.definition list =
- Obj.magic
(match Timing.time "Parsing"
(* The call to Lexer.tokens_stream results in the pre
parsing of the entire file. This is non-negligeabe,
so we cannot use Timing.time2 *)
(fun () ->
- Parser.translation_unit_file inf (Lexer.tokens_stream name text)) ()
+ Parser.translation_unit_file log_fuel (Lexer.tokens_stream name text)) ()
with
- | Parser.Parser.Inter.Fail_pr ->
+ | Parser.MenhirLibParser.Inter.Fail_pr ->
(* Theoretically impossible : implies inconsistencies
between grammars. *)
- Diagnostics.fatal_error Diagnostics.no_loc "internal error while parsing"
- | Parser.Parser.Inter.Timeout_pr -> assert false
- | Parser.Parser.Inter.Parsed_pr (ast, _ ) -> ast) in
+ Diagnostics.fatal_error Diagnostics.no_loc "internal error while parsing"
+ | Parser.MenhirLibParser.Inter.Timeout_pr -> assert false
+ | Parser.MenhirLibParser.Inter.Parsed_pr (ast, _ ) -> ast) in
let p1 = Timing.time "Elaboration" Elab.elab_file ast in
Diagnostics.check_errors ();
Timing.time2 "Emulations" transform_program t p1 name in
diff --git a/cparser/Parser.vy b/cparser/Parser.vy
index 79e3793d..03bfa590 100644
--- a/cparser/Parser.vy
+++ b/cparser/Parser.vy
@@ -15,96 +15,99 @@
%{
-Require Import Cabs.
Require Import List.
+Require Cabs.
%}
-%token<string * cabsloc> VAR_NAME TYPEDEF_NAME OTHER_NAME
-%token<string * cabsloc> PRAGMA
-%token<bool * list char_code * cabsloc> STRING_LITERAL
-%token<constant * cabsloc> CONSTANT
-%token<cabsloc> SIZEOF PTR INC DEC LEFT RIGHT LEQ GEQ EQEQ EQ NEQ LT GT
+%token<Cabs.string * Cabs.loc> VAR_NAME TYPEDEF_NAME OTHER_NAME
+%token<Cabs.string * Cabs.loc> PRAGMA
+%token<bool * list Cabs.char_code * Cabs.loc> STRING_LITERAL
+%token<Cabs.constant * Cabs.loc> CONSTANT
+%token<Cabs.loc> SIZEOF PTR INC DEC LEFT RIGHT LEQ GEQ EQEQ EQ NEQ LT GT
ANDAND BARBAR PLUS MINUS STAR TILDE BANG SLASH PERCENT HAT BAR QUESTION
COLON AND ALIGNOF
-%token<cabsloc> MUL_ASSIGN DIV_ASSIGN MOD_ASSIGN ADD_ASSIGN SUB_ASSIGN
+%token<Cabs.loc> MUL_ASSIGN DIV_ASSIGN MOD_ASSIGN ADD_ASSIGN SUB_ASSIGN
LEFT_ASSIGN RIGHT_ASSIGN AND_ASSIGN XOR_ASSIGN OR_ASSIGN
-%token<cabsloc> LPAREN RPAREN LBRACK RBRACK LBRACE RBRACE DOT COMMA
- SEMICOLON ELLIPSIS TYPEDEF EXTERN STATIC RESTRICT AUTO REGISTER INLINE NORETURN
- CHAR SHORT INT LONG SIGNED UNSIGNED FLOAT DOUBLE CONST VOLATILE VOID
+%token<Cabs.loc> LPAREN RPAREN LBRACK RBRACK LBRACE RBRACE DOT COMMA
+ SEMICOLON ELLIPSIS TYPEDEF EXTERN STATIC RESTRICT AUTO REGISTER INLINE
+ NORETURN CHAR SHORT INT LONG SIGNED UNSIGNED FLOAT DOUBLE CONST VOLATILE VOID
STRUCT UNION ENUM UNDERSCORE_BOOL PACKED ALIGNAS ATTRIBUTE ASM
-%token<cabsloc> CASE DEFAULT IF ELSE SWITCH WHILE DO FOR GOTO CONTINUE BREAK
+%token<Cabs.loc> CASE DEFAULT IF_ ELSE SWITCH WHILE DO FOR GOTO CONTINUE BREAK
RETURN BUILTIN_VA_ARG BUILTIN_OFFSETOF
%token EOF
-%type<expression * cabsloc> primary_expression postfix_expression
+%type<Cabs.expression * Cabs.loc> primary_expression postfix_expression
unary_expression cast_expression multiplicative_expression additive_expression
shift_expression relational_expression equality_expression AND_expression
exclusive_OR_expression inclusive_OR_expression logical_AND_expression
logical_OR_expression conditional_expression assignment_expression
constant_expression expression
-%type<unary_operator * cabsloc> unary_operator
-%type<binary_operator> assignment_operator
-%type<list expression (* Reverse order *)> argument_expression_list
-%type<definition> declaration
-%type<list spec_elem * cabsloc> declaration_specifiers
-%type<list spec_elem> declaration_specifiers_typespec_opt
-%type<list init_name (* Reverse order *)> init_declarator_list
-%type<init_name> init_declarator
-%type<storage * cabsloc> storage_class_specifier
-%type<typeSpecifier * cabsloc> type_specifier struct_or_union_specifier enum_specifier
-%type<structOrUnion * cabsloc> struct_or_union
-%type<list field_group (* Reverse order *)> struct_declaration_list
-%type<field_group> struct_declaration
-%type<list spec_elem * cabsloc> specifier_qualifier_list
-%type<list (option name * option expression) (* Reverse order *)> struct_declarator_list
-%type<option name * option expression> struct_declarator
-%type<list (string * option expression * cabsloc) (* Reverse order *)> enumerator_list
-%type<string * option expression * cabsloc> enumerator
-%type<string * cabsloc> enumeration_constant
-%type<cvspec * cabsloc> type_qualifier type_qualifier_noattr
-%type<funspec * cabsloc> function_specifier
-%type<name> declarator declarator_noattrend direct_declarator
-%type<(decl_type -> decl_type) * cabsloc> pointer
-%type<list cvspec (* Reverse order *)> type_qualifier_list
-%type<list parameter * bool> parameter_type_list
-%type<list parameter (* Reverse order *)> parameter_list
-%type<parameter> parameter_declaration
-%type<list spec_elem * decl_type> type_name
-%type<decl_type> abstract_declarator direct_abstract_declarator
-%type<init_expression> c_initializer
-%type<list (list initwhat * init_expression) (* Reverse order *)> initializer_list
-%type<list initwhat> designation
-%type<list initwhat (* Reverse order *)> designator_list
-%type<initwhat> designator
-%type<statement> statement_dangerous statement_safe
+%type<Cabs.unary_operator * Cabs.loc> unary_operator
+%type<Cabs.binary_operator> assignment_operator
+%type<list Cabs.expression (* Reverse order *)> argument_expression_list
+%type<Cabs.definition> declaration
+%type<list Cabs.spec_elem * Cabs.loc> declaration_specifiers
+%type<list Cabs.spec_elem> declaration_specifiers_typespec_opt
+%type<list Cabs.init_name (* Reverse order *)> init_declarator_list
+%type<Cabs.init_name> init_declarator
+%type<Cabs.storage * Cabs.loc> storage_class_specifier
+%type<Cabs.typeSpecifier * Cabs.loc> type_specifier struct_or_union_specifier enum_specifier
+%type<Cabs.structOrUnion * Cabs.loc> struct_or_union
+%type<list Cabs.field_group (* Reverse order *)> struct_declaration_list
+%type<Cabs.field_group> struct_declaration
+%type<list Cabs.spec_elem * Cabs.loc> specifier_qualifier_list
+%type<list (option Cabs.name * option Cabs.expression) (* Reverse order *)>
+ struct_declarator_list
+%type<option Cabs.name * option Cabs.expression> struct_declarator
+%type<list (Cabs.string * option Cabs.expression * Cabs.loc) (* Reverse order *)>
+ enumerator_list
+%type<Cabs.string * option Cabs.expression * Cabs.loc> enumerator
+%type<Cabs.string * Cabs.loc> enumeration_constant
+%type<Cabs.cvspec * Cabs.loc> type_qualifier type_qualifier_noattr
+%type<Cabs.funspec * Cabs.loc> function_specifier
+%type<Cabs.name> declarator declarator_noattrend direct_declarator
+%type<(Cabs.decl_type -> Cabs.decl_type) * Cabs.loc> pointer
+%type<list Cabs.cvspec (* Reverse order *)> type_qualifier_list
+%type<list Cabs.parameter * bool> parameter_type_list
+%type<list Cabs.parameter (* Reverse order *)> parameter_list
+%type<Cabs.parameter> parameter_declaration
+%type<list Cabs.spec_elem * Cabs.decl_type> type_name
+%type<Cabs.decl_type> abstract_declarator direct_abstract_declarator
+%type<Cabs.init_expression> c_initializer
+%type<list (list Cabs.initwhat * Cabs.init_expression) (* Reverse order *)>
+ initializer_list
+%type<list Cabs.initwhat> designation
+%type<list Cabs.initwhat (* Reverse order *)> designator_list
+%type<Cabs.initwhat> designator
+%type<Cabs.statement> statement_dangerous statement_safe
labeled_statement(statement_safe) labeled_statement(statement_dangerous)
iteration_statement(statement_safe) iteration_statement(statement_dangerous)
compound_statement
-%type<list statement (* Reverse order *)> block_item_list
-%type<statement> block_item expression_statement selection_statement_dangerous
+%type<list Cabs.statement (* Reverse order *)> block_item_list
+%type<Cabs.statement> block_item expression_statement selection_statement_dangerous
selection_statement_safe jump_statement asm_statement
-%type<list definition (* Reverse order *)> translation_unit
-%type<definition> external_declaration function_definition
-%type<list definition> declaration_list
-%type<attribute * cabsloc> attribute_specifier
-%type<list attribute> attribute_specifier_list
-%type<gcc_attribute> gcc_attribute
-%type<list gcc_attribute> gcc_attribute_list
-%type<gcc_attribute_word> gcc_attribute_word
-%type<list string (* Reverse order *)> identifier_list
-%type<list asm_flag> asm_flags
-%type<option string> asm_op_name
-%type<asm_operand> asm_operand
-%type<list asm_operand> asm_operands asm_operands_ne
-%type<list asm_operand * list asm_operand * list asm_flag> asm_arguments
-%type<list cvspec> asm_attributes
-
-%start<list definition> translation_unit_file
+%type<list Cabs.definition (* Reverse order *)> translation_unit
+%type<Cabs.definition> external_declaration function_definition
+%type<list Cabs.definition> declaration_list
+%type<Cabs.attribute * Cabs.loc> attribute_specifier
+%type<list Cabs.attribute> attribute_specifier_list
+%type<Cabs.gcc_attribute> gcc_attribute
+%type<list Cabs.gcc_attribute> gcc_attribute_list
+%type<Cabs.gcc_attribute_word> gcc_attribute_word
+%type<list Cabs.string (* Reverse order *)> identifier_list
+%type<list Cabs.asm_flag> asm_flags
+%type<option Cabs.string> asm_op_name
+%type<Cabs.asm_operand> asm_operand
+%type<list Cabs.asm_operand> asm_operands asm_operands_ne
+%type<list Cabs.asm_operand * list Cabs.asm_operand * list Cabs.asm_flag> asm_arguments
+%type<list Cabs.cvspec> asm_attributes
+
+%start<list Cabs.definition> translation_unit_file
%%
(* Actual grammar *)
@@ -112,12 +115,12 @@ Require Import List.
(* 6.5.1 *)
primary_expression:
| var = VAR_NAME
- { (VARIABLE (fst var), snd var) }
+ { (Cabs.VARIABLE (fst var), snd var) }
| cst = CONSTANT
- { (CONSTANT (fst cst), snd cst) }
+ { (Cabs.CONSTANT (fst cst), snd cst) }
| str = STRING_LITERAL
{ let '((wide, chars), loc) := str in
- (CONSTANT (CONST_STRING wide chars), loc) }
+ (Cabs.CONSTANT (Cabs.CONST_STRING wide chars), loc) }
| loc = LPAREN expr = expression RPAREN
{ (fst expr, loc)}
@@ -126,29 +129,30 @@ postfix_expression:
| expr = primary_expression
{ expr }
| expr = postfix_expression LBRACK index = expression RBRACK
- { (INDEX (fst expr) (fst index), snd expr) }
+ { (Cabs.INDEX (fst expr) (fst index), snd expr) }
| expr = postfix_expression LPAREN args = argument_expression_list RPAREN
- { (CALL (fst expr) (rev' args), snd expr) }
+ { (Cabs.CALL (fst expr) (rev' args), snd expr) }
| expr = postfix_expression LPAREN RPAREN
- { (CALL (fst expr) [], snd expr) }
+ { (Cabs.CALL (fst expr) [], snd expr) }
| loc = BUILTIN_VA_ARG LPAREN expr = assignment_expression COMMA ty = type_name RPAREN
- { (BUILTIN_VA_ARG (fst expr) ty, loc) }
+ { (Cabs.BUILTIN_VA_ARG (fst expr) ty, loc) }
| expr = postfix_expression DOT mem = OTHER_NAME
- { (MEMBEROF (fst expr) (fst mem), snd expr) }
+ { (Cabs.MEMBEROF (fst expr) (fst mem), snd expr) }
| expr = postfix_expression PTR mem = OTHER_NAME
- { (MEMBEROFPTR (fst expr) (fst mem), snd expr) }
+ { (Cabs.MEMBEROFPTR (fst expr) (fst mem), snd expr) }
| expr = postfix_expression INC
- { (UNARY POSINCR (fst expr), snd expr) }
+ { (Cabs.UNARY Cabs.POSINCR (fst expr), snd expr) }
| expr = postfix_expression DEC
- { (UNARY POSDECR (fst expr), snd expr) }
+ { (Cabs.UNARY Cabs.POSDECR (fst expr), snd expr) }
| loc = LPAREN typ = type_name RPAREN LBRACE init = initializer_list RBRACE
- { (CAST typ (COMPOUND_INIT (rev' init)), loc) }
+ { (Cabs.CAST typ (Cabs.COMPOUND_INIT (rev' init)), loc) }
| loc = LPAREN typ = type_name RPAREN LBRACE init = initializer_list COMMA RBRACE
- { (CAST typ (COMPOUND_INIT (rev' init)), loc) }
-| loc = BUILTIN_OFFSETOF LPAREN typ = type_name COMMA id = OTHER_NAME mems = designator_list RPAREN
- { (BUILTIN_OFFSETOF typ ((INFIELD_INIT (fst id))::(rev mems)), loc) }
+ { (Cabs.CAST typ (Cabs.COMPOUND_INIT (rev' init)), loc) }
+| loc = BUILTIN_OFFSETOF LPAREN typ = type_name COMMA id = OTHER_NAME
+ mems = designator_list RPAREN
+ { (Cabs.BUILTIN_OFFSETOF typ ((Cabs.INFIELD_INIT (fst id))::(rev mems)), loc) }
| loc = BUILTIN_OFFSETOF LPAREN typ = type_name COMMA mem = OTHER_NAME RPAREN
- { (BUILTIN_OFFSETOF typ [INFIELD_INIT (fst mem)], loc) }
+ { (Cabs.BUILTIN_OFFSETOF typ [Cabs.INFIELD_INIT (fst mem)], loc) }
(* Semantic value is in reverse order. *)
argument_expression_list:
@@ -162,170 +166,171 @@ unary_expression:
| expr = postfix_expression
{ expr }
| loc = INC expr = unary_expression
- { (UNARY PREINCR (fst expr), loc) }
+ { (Cabs.UNARY Cabs.PREINCR (fst expr), loc) }
| loc = DEC expr = unary_expression
- { (UNARY PREDECR (fst expr), loc) }
+ { (Cabs.UNARY Cabs.PREDECR (fst expr), loc) }
| op = unary_operator expr = cast_expression
- { (UNARY (fst op) (fst expr), snd op) }
+ { (Cabs.UNARY (fst op) (fst expr), snd op) }
| loc = SIZEOF expr = unary_expression
- { (EXPR_SIZEOF (fst expr), loc) }
+ { (Cabs.EXPR_SIZEOF (fst expr), loc) }
| loc = SIZEOF LPAREN typ = type_name RPAREN
- { (TYPE_SIZEOF typ, loc) }
+ { (Cabs.TYPE_SIZEOF typ, loc) }
(* Non-standard *)
| loc = ALIGNOF LPAREN typ = type_name RPAREN
- { (ALIGNOF typ, loc) }
+ { (Cabs.ALIGNOF typ, loc) }
unary_operator:
| loc = AND
- { (ADDROF, loc) }
+ { (Cabs.ADDROF, loc) }
| loc = STAR
- { (MEMOF, loc) }
+ { (Cabs.MEMOF, loc) }
| loc = PLUS
- { (PLUS, loc) }
+ { (Cabs.PLUS, loc) }
| loc = MINUS
- { (MINUS, loc) }
+ { (Cabs.MINUS, loc) }
| loc = TILDE
- { (BNOT, loc) }
+ { (Cabs.BNOT, loc) }
| loc = BANG
- { (NOT, loc) }
+ { (Cabs.NOT, loc) }
(* 6.5.4 *)
cast_expression:
| expr = unary_expression
{ expr }
| loc = LPAREN typ = type_name RPAREN expr = cast_expression
- { (CAST typ (SINGLE_INIT (fst expr)), loc) }
+ { (Cabs.CAST typ (Cabs.SINGLE_INIT (fst expr)), loc) }
(* 6.5.5 *)
multiplicative_expression:
| expr = cast_expression
{ expr }
| expr1 = multiplicative_expression STAR expr2 = cast_expression
- { (BINARY MUL (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.MUL (fst expr1) (fst expr2), snd expr1) }
| expr1 = multiplicative_expression SLASH expr2 = cast_expression
- { (BINARY DIV (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.DIV (fst expr1) (fst expr2), snd expr1) }
| expr1 = multiplicative_expression PERCENT expr2 = cast_expression
- { (BINARY MOD (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.MOD (fst expr1) (fst expr2), snd expr1) }
(* 6.5.6 *)
additive_expression:
| expr = multiplicative_expression
{ expr }
| expr1 = additive_expression PLUS expr2 = multiplicative_expression
- { (BINARY ADD (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.ADD (fst expr1) (fst expr2), snd expr1) }
| expr1 = additive_expression MINUS expr2 = multiplicative_expression
- { (BINARY SUB (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.SUB (fst expr1) (fst expr2), snd expr1) }
(* 6.5.7 *)
shift_expression:
| expr = additive_expression
{ expr }
| expr1 = shift_expression LEFT expr2 = additive_expression
- { (BINARY SHL (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.SHL (fst expr1) (fst expr2), snd expr1) }
| expr1 = shift_expression RIGHT expr2 = additive_expression
- { (BINARY SHR (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.SHR (fst expr1) (fst expr2), snd expr1) }
(* 6.5.8 *)
relational_expression:
| expr = shift_expression
{ expr }
| expr1 = relational_expression LT expr2 = shift_expression
- { (BINARY LT (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.LT (fst expr1) (fst expr2), snd expr1) }
| expr1 = relational_expression GT expr2 = shift_expression
- { (BINARY GT (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.GT (fst expr1) (fst expr2), snd expr1) }
| expr1 = relational_expression LEQ expr2 = shift_expression
- { (BINARY LE (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.LE (fst expr1) (fst expr2), snd expr1) }
| expr1 = relational_expression GEQ expr2 = shift_expression
- { (BINARY GE (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.GE (fst expr1) (fst expr2), snd expr1) }
(* 6.5.9 *)
equality_expression:
| expr = relational_expression
{ expr }
| expr1 = equality_expression EQEQ expr2 = relational_expression
- { (BINARY EQ (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.EQ (fst expr1) (fst expr2), snd expr1) }
| expr1 = equality_expression NEQ expr2 = relational_expression
- { (BINARY NE (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.NE (fst expr1) (fst expr2), snd expr1) }
(* 6.5.10 *)
AND_expression:
| expr = equality_expression
{ expr }
| expr1 = AND_expression AND expr2 = equality_expression
- { (BINARY BAND (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.BAND (fst expr1) (fst expr2), snd expr1) }
(* 6.5.11 *)
exclusive_OR_expression:
| expr = AND_expression
{ expr }
| expr1 = exclusive_OR_expression HAT expr2 = AND_expression
- { (BINARY XOR (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.XOR (fst expr1) (fst expr2), snd expr1) }
(* 6.5.12 *)
inclusive_OR_expression:
| expr = exclusive_OR_expression
{ expr }
| expr1 = inclusive_OR_expression BAR expr2 = exclusive_OR_expression
- { (BINARY BOR (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.BOR (fst expr1) (fst expr2), snd expr1) }
(* 6.5.13 *)
logical_AND_expression:
| expr = inclusive_OR_expression
{ expr }
| expr1 = logical_AND_expression ANDAND expr2 = inclusive_OR_expression
- { (BINARY AND (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.AND (fst expr1) (fst expr2), snd expr1) }
(* 6.5.14 *)
logical_OR_expression:
| expr = logical_AND_expression
{ expr }
| expr1 = logical_OR_expression BARBAR expr2 = logical_AND_expression
- { (BINARY OR (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.OR (fst expr1) (fst expr2), snd expr1) }
(* 6.5.15 *)
conditional_expression:
| expr = logical_OR_expression
{ expr }
-| expr1 = logical_OR_expression QUESTION expr2 = expression COLON expr3 = conditional_expression
- { (QUESTION (fst expr1) (fst expr2) (fst expr3), snd expr1) }
+| expr1 = logical_OR_expression QUESTION expr2 = expression COLON
+ expr3 = conditional_expression
+ { (Cabs.QUESTION (fst expr1) (fst expr2) (fst expr3), snd expr1) }
(* 6.5.16 *)
assignment_expression:
| expr = conditional_expression
{ expr }
| expr1 = unary_expression op = assignment_operator expr2 = assignment_expression
- { (BINARY op (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY op (fst expr1) (fst expr2), snd expr1) }
assignment_operator:
| EQ
- { ASSIGN }
+ { Cabs.ASSIGN }
| MUL_ASSIGN
- { MUL_ASSIGN }
+ { Cabs.MUL_ASSIGN }
| DIV_ASSIGN
- { DIV_ASSIGN }
+ { Cabs.DIV_ASSIGN }
| MOD_ASSIGN
- { MOD_ASSIGN }
+ { Cabs.MOD_ASSIGN }
| ADD_ASSIGN
- { ADD_ASSIGN }
+ { Cabs.ADD_ASSIGN }
| SUB_ASSIGN
- { SUB_ASSIGN }
+ { Cabs.SUB_ASSIGN }
| LEFT_ASSIGN
- { SHL_ASSIGN }
+ { Cabs.SHL_ASSIGN }
| RIGHT_ASSIGN
- { SHR_ASSIGN }
+ { Cabs.SHR_ASSIGN }
| XOR_ASSIGN
- { XOR_ASSIGN }
+ { Cabs.XOR_ASSIGN }
| OR_ASSIGN
- { BOR_ASSIGN }
+ { Cabs.BOR_ASSIGN }
| AND_ASSIGN
- { BAND_ASSIGN }
+ { Cabs.BAND_ASSIGN }
(* 6.5.17 *)
expression:
| expr = assignment_expression
{ expr }
| expr1 = expression COMMA expr2 = assignment_expression
- { (BINARY COMMA (fst expr1) (fst expr2), snd expr1) }
+ { (Cabs.BINARY Cabs.COMMA (fst expr1) (fst expr2), snd expr1) }
(* 6.6 *)
constant_expression:
@@ -335,19 +340,19 @@ constant_expression:
(* 6.7 *)
declaration:
| decspec = declaration_specifiers decls = init_declarator_list SEMICOLON
- { DECDEF (fst decspec, rev' decls) (snd decspec) }
+ { Cabs.DECDEF (fst decspec, rev' decls) (snd decspec) }
| decspec = declaration_specifiers SEMICOLON
- { DECDEF (fst decspec, []) (snd decspec) }
+ { Cabs.DECDEF (fst decspec, []) (snd decspec) }
declaration_specifiers_typespec_opt:
| storage = storage_class_specifier rest = declaration_specifiers_typespec_opt
- { SpecStorage (fst storage)::rest }
+ { Cabs.SpecStorage (fst storage)::rest }
| typ = type_specifier rest = declaration_specifiers_typespec_opt
- { SpecType (fst typ)::rest }
+ { Cabs.SpecType (fst typ)::rest }
| qual = type_qualifier rest = declaration_specifiers_typespec_opt
- { SpecCV (fst qual)::rest }
+ { Cabs.SpecCV (fst qual)::rest }
| func = function_specifier rest = declaration_specifiers_typespec_opt
- { SpecFunction (fst func)::rest }
+ { Cabs.SpecFunction (fst func)::rest }
| /* empty */
{ [] }
@@ -357,16 +362,16 @@ declaration_specifiers_typespec_opt:
specifier. *)
declaration_specifiers:
| storage = storage_class_specifier rest = declaration_specifiers
- { (SpecStorage (fst storage)::fst rest, snd storage) }
+ { (Cabs.SpecStorage (fst storage)::fst rest, snd storage) }
| typ = type_specifier rest = declaration_specifiers_typespec_opt
- { (SpecType (fst typ)::rest, snd typ) }
+ { (Cabs.SpecType (fst typ)::rest, snd typ) }
(* We have to inline type_qualifier in order to avoid a conflict. *)
| qual = type_qualifier_noattr rest = declaration_specifiers
- { (SpecCV (fst qual)::fst rest, snd qual) }
+ { (Cabs.SpecCV (fst qual)::fst rest, snd qual) }
| attr = attribute_specifier rest = declaration_specifiers
- { (SpecCV (CV_ATTR (fst attr))::fst rest, snd attr) }
+ { (Cabs.SpecCV (Cabs.CV_ATTR (fst attr))::fst rest, snd attr) }
| func = function_specifier rest = declaration_specifiers
- { (SpecFunction (fst func)::fst rest, snd func) }
+ { (Cabs.SpecFunction (fst func)::fst rest, snd func) }
init_declarator_list:
| init = init_declarator
@@ -376,71 +381,71 @@ init_declarator_list:
init_declarator:
| name = declarator
- { Init_name name NO_INIT }
+ { Cabs.Init_name name Cabs.NO_INIT }
| name = declarator EQ init = c_initializer
- { Init_name name init }
+ { Cabs.Init_name name init }
(* 6.7.1 *)
storage_class_specifier:
| loc = TYPEDEF
- { (TYPEDEF, loc) }
+ { (Cabs.TYPEDEF, loc) }
| loc = EXTERN
- { (EXTERN, loc) }
+ { (Cabs.EXTERN, loc) }
| loc = STATIC
- { (STATIC, loc) }
+ { (Cabs.STATIC, loc) }
| loc = AUTO
- { (AUTO, loc) }
+ { (Cabs.AUTO, loc) }
| loc = REGISTER
- { (REGISTER, loc) }
+ { (Cabs.REGISTER, loc) }
(* 6.7.2 *)
type_specifier:
| loc = VOID
- { (Tvoid, loc) }
+ { (Cabs.Tvoid, loc) }
| loc = CHAR
- { (Tchar, loc) }
+ { (Cabs.Tchar, loc) }
| loc = SHORT
- { (Tshort, loc) }
+ { (Cabs.Tshort, loc) }
| loc = INT
- { (Tint, loc) }
+ { (Cabs.Tint, loc) }
| loc = LONG
- { (Tlong, loc) }
+ { (Cabs.Tlong, loc) }
| loc = FLOAT
- { (Tfloat, loc) }
+ { (Cabs.Tfloat, loc) }
| loc = DOUBLE
- { (Tdouble, loc) }
+ { (Cabs.Tdouble, loc) }
| loc = SIGNED
- { (Tsigned, loc) }
+ { (Cabs.Tsigned, loc) }
| loc = UNSIGNED
- { (Tunsigned, loc) }
+ { (Cabs.Tunsigned, loc) }
| loc = UNDERSCORE_BOOL
- { (T_Bool, loc) }
+ { (Cabs.T_Bool, loc) }
| spec = struct_or_union_specifier
{ spec }
| spec = enum_specifier
{ spec }
| id = TYPEDEF_NAME
- { (Tnamed (fst id), snd id) }
+ { (Cabs.Tnamed (fst id), snd id) }
(* 6.7.2.1 *)
struct_or_union_specifier:
| str_uni = struct_or_union attrs = attribute_specifier_list id = OTHER_NAME
LBRACE decls = struct_declaration_list RBRACE
- { (Tstruct_union (fst str_uni) (Some (fst id)) (Some (rev' decls)) attrs,
+ { (Cabs.Tstruct_union (fst str_uni) (Some (fst id)) (Some (rev' decls)) attrs,
snd str_uni) }
| str_uni = struct_or_union attrs = attribute_specifier_list
LBRACE decls = struct_declaration_list RBRACE
- { (Tstruct_union (fst str_uni) None (Some (rev' decls)) attrs,
+ { (Cabs.Tstruct_union (fst str_uni) None (Some (rev' decls)) attrs,
snd str_uni) }
| str_uni = struct_or_union attrs = attribute_specifier_list id = OTHER_NAME
- { (Tstruct_union (fst str_uni) (Some (fst id)) None attrs,
+ { (Cabs.Tstruct_union (fst str_uni) (Some (fst id)) None attrs,
snd str_uni) }
struct_or_union:
| loc = STRUCT
- { (STRUCT, loc) }
+ { (Cabs.STRUCT, loc) }
| loc = UNION
- { (UNION, loc) }
+ { (Cabs.UNION, loc) }
struct_declaration_list:
| (* empty *)
@@ -450,20 +455,20 @@ struct_declaration_list:
struct_declaration:
| decspec = specifier_qualifier_list decls = struct_declarator_list SEMICOLON
- { Field_group (fst decspec) (rev' decls) (snd decspec) }
+ { Cabs.Field_group (fst decspec) (rev' decls) (snd decspec) }
(* Extension to C99 grammar needed to parse some GNU header files. *)
| decspec = specifier_qualifier_list SEMICOLON
- { Field_group (fst decspec) [(None,None)] (snd decspec) }
+ { Cabs.Field_group (fst decspec) [(None,None)] (snd decspec) }
specifier_qualifier_list:
| typ = type_specifier rest = specifier_qualifier_list
- { (SpecType (fst typ)::fst rest, snd typ) }
+ { (Cabs.SpecType (fst typ)::fst rest, snd typ) }
| typ = type_specifier
- { ([SpecType (fst typ)], snd typ) }
+ { ([Cabs.SpecType (fst typ)], snd typ) }
| qual = type_qualifier rest = specifier_qualifier_list
- { (SpecCV (fst qual)::fst rest, snd qual) }
+ { (Cabs.SpecCV (fst qual)::fst rest, snd qual) }
| qual = type_qualifier
- { ([SpecCV (fst qual)], snd qual) }
+ { ([Cabs.SpecCV (fst qual)], snd qual) }
struct_declarator_list:
| decl = struct_declarator
@@ -483,18 +488,18 @@ struct_declarator:
enum_specifier:
| loc = ENUM attrs = attribute_specifier_list name = OTHER_NAME
LBRACE enum_list = enumerator_list RBRACE
- { (Tenum (Some (fst name)) (Some (rev' enum_list)) attrs, loc) }
+ { (Cabs.Tenum (Some (fst name)) (Some (rev' enum_list)) attrs, loc) }
| loc = ENUM attrs = attribute_specifier_list
LBRACE enum_list = enumerator_list RBRACE
- { (Tenum None (Some (rev' enum_list)) attrs, loc) }
+ { (Cabs.Tenum None (Some (rev' enum_list)) attrs, loc) }
| loc = ENUM attrs = attribute_specifier_list name = OTHER_NAME
LBRACE enum_list = enumerator_list COMMA RBRACE
- { (Tenum (Some (fst name)) (Some (rev' enum_list)) attrs, loc) }
+ { (Cabs.Tenum (Some (fst name)) (Some (rev' enum_list)) attrs, loc) }
| loc = ENUM attrs = attribute_specifier_list
LBRACE enum_list = enumerator_list COMMA RBRACE
- { (Tenum None (Some (rev' enum_list)) attrs, loc) }
+ { (Cabs.Tenum None (Some (rev' enum_list)) attrs, loc) }
| loc = ENUM attrs = attribute_specifier_list name = OTHER_NAME
- { (Tenum (Some (fst name)) None attrs, loc) }
+ { (Cabs.Tenum (Some (fst name)) None attrs, loc) }
enumerator_list:
| enum = enumerator
@@ -515,18 +520,18 @@ enumeration_constant:
(* 6.7.3 *)
type_qualifier_noattr:
| loc = CONST
- { (CV_CONST, loc) }
+ { (Cabs.CV_CONST, loc) }
| loc = RESTRICT
- { (CV_RESTRICT, loc) }
+ { (Cabs.CV_RESTRICT, loc) }
| loc = VOLATILE
- { (CV_VOLATILE, loc) }
+ { (Cabs.CV_VOLATILE, loc) }
type_qualifier:
| qual = type_qualifier_noattr
{ qual }
(* Non-standard *)
| attr = attribute_specifier
- { (CV_ATTR (fst attr), snd attr) }
+ { (Cabs.CV_ATTR (fst attr), snd attr) }
(* Non-standard *)
@@ -538,13 +543,13 @@ attribute_specifier_list:
attribute_specifier:
| loc = ATTRIBUTE LPAREN LPAREN attr = gcc_attribute_list RPAREN RPAREN
- { (GCC_ATTR (rev' attr) loc, loc) }
+ { (Cabs.GCC_ATTR (rev' attr) loc, loc) }
| loc = PACKED LPAREN args = argument_expression_list RPAREN
- { (PACKED_ATTR (rev' args) loc, loc) }
+ { (Cabs.PACKED_ATTR (rev' args) loc, loc) }
| loc = ALIGNAS LPAREN args = argument_expression_list RPAREN
- { (ALIGNAS_ATTR (rev' args) loc, loc) }
+ { (Cabs.ALIGNAS_ATTR (rev' args) loc, loc) }
| loc = ALIGNAS LPAREN typ = type_name RPAREN
- { (ALIGNAS_ATTR [ALIGNOF typ] loc, loc) }
+ { (Cabs.ALIGNAS_ATTR [Cabs.ALIGNOF typ] loc, loc) }
gcc_attribute_list:
| a = gcc_attribute
@@ -554,80 +559,81 @@ gcc_attribute_list:
gcc_attribute:
| /* empty */
- { GCC_ATTR_EMPTY }
+ { Cabs.GCC_ATTR_EMPTY }
| w = gcc_attribute_word
- { GCC_ATTR_NOARGS w }
+ { Cabs.GCC_ATTR_NOARGS w }
| w = gcc_attribute_word LPAREN RPAREN
- { GCC_ATTR_ARGS w [] }
+ { Cabs.GCC_ATTR_ARGS w [] }
| w = gcc_attribute_word LPAREN args = argument_expression_list RPAREN
- { GCC_ATTR_ARGS w (rev' args) }
+ { Cabs.GCC_ATTR_ARGS w (rev' args) }
gcc_attribute_word:
| i = OTHER_NAME
- { GCC_ATTR_IDENT (fst i) }
+ { Cabs.GCC_ATTR_IDENT (fst i) }
| CONST
- { GCC_ATTR_CONST }
+ { Cabs.GCC_ATTR_CONST }
| PACKED
- { GCC_ATTR_PACKED }
+ { Cabs.GCC_ATTR_PACKED }
(* 6.7.4 *)
function_specifier:
| loc = INLINE
- { (INLINE, loc) }
+ { (Cabs.INLINE, loc) }
| loc = NORETURN
- { (NORETURN, loc)}
+ { (Cabs.NORETURN, loc)}
(* 6.7.5 *)
declarator:
| decl = declarator_noattrend attrs = attribute_specifier_list
- { match decl with Name name typ attr loc =>
- Name name typ (List.app attr attrs) loc end }
+ { let 'Cabs.Name name typ attr loc := decl in
+ Cabs.Name name typ (List.app attr attrs) loc }
declarator_noattrend:
| decl = direct_declarator
{ decl }
| pt = pointer decl = direct_declarator
- { match decl with Name name typ attr _ =>
- Name name ((fst pt) typ) attr (snd pt) end }
+ { let 'Cabs.Name name typ attr _ := decl in
+ Cabs.Name name ((fst pt) typ) attr (snd pt) }
direct_declarator:
| id = VAR_NAME
- { Name (fst id) JUSTBASE [] (snd id) }
+ { Cabs.Name (fst id) Cabs.JUSTBASE [] (snd id) }
| LPAREN decl = declarator RPAREN
{ decl }
-| decl = direct_declarator LBRACK quallst = type_qualifier_list expr = assignment_expression RBRACK
- { match decl with Name name typ attr loc =>
- Name name (ARRAY typ (rev' quallst) (Some (fst expr))) attr loc end }
+| decl = direct_declarator LBRACK quallst = type_qualifier_list
+ expr = assignment_expression RBRACK
+ { let 'Cabs.Name name typ attr loc := decl in
+ Cabs.Name name (Cabs.ARRAY typ (rev' quallst) (Some (fst expr))) attr loc }
| decl = direct_declarator LBRACK expr = assignment_expression RBRACK
- { match decl with Name name typ attr loc =>
- Name name (ARRAY typ [] (Some (fst expr))) attr loc end }
+ { let 'Cabs.Name name typ attr loc := decl in
+ Cabs.Name name (Cabs.ARRAY typ [] (Some (fst expr))) attr loc }
| decl = direct_declarator LBRACK quallst = type_qualifier_list RBRACK
- { match decl with Name name typ attr loc =>
- Name name (ARRAY typ (rev' quallst) None) attr loc end }
+ { let 'Cabs.Name name typ attr loc := decl in
+ Cabs.Name name (Cabs.ARRAY typ (rev' quallst) None) attr loc }
| decl = direct_declarator LBRACK RBRACK
- { match decl with Name name typ attr loc =>
- Name name (ARRAY typ [] None) attr loc end }
+ { let 'Cabs.Name name typ attr loc := decl in
+ Cabs.Name name (Cabs.ARRAY typ [] None) attr loc }
(*| direct_declarator LBRACK ... STATIC ... RBRACK
| direct_declarator LBRACK STAR RBRACK*)
| decl = direct_declarator LPAREN params = parameter_type_list RPAREN
- { match decl with Name name typ attr loc =>
- Name name (PROTO typ params) attr loc end }
+ { let 'Cabs.Name name typ attr loc := decl in
+ Cabs.Name name (Cabs.PROTO typ params) attr loc }
| decl = direct_declarator LPAREN RPAREN
- { match decl with Name name typ attr loc =>
- Name name (PROTO_OLD typ []) attr loc end }
+ { let 'Cabs.Name name typ attr loc := decl in
+ Cabs.Name name (Cabs.PROTO_OLD typ []) attr loc }
| decl = direct_declarator LPAREN params = identifier_list RPAREN
- { match decl with Name name typ attr loc =>
- Name name (PROTO_OLD typ (rev' params)) attr loc end }
+ { let 'Cabs.Name name typ attr loc := decl in
+ Cabs.Name name (Cabs.PROTO_OLD typ (rev' params)) attr loc }
pointer:
| loc = STAR
- { (fun typ => PTR [] typ, loc) }
+ { (fun typ => Cabs.PTR [] typ, loc) }
| loc = STAR quallst = type_qualifier_list
- { (fun typ => PTR (rev' quallst) typ, loc) }
+ { (fun typ => Cabs.PTR (rev' quallst) typ, loc) }
| loc = STAR pt = pointer
- { (fun typ => PTR [] ((fst pt) typ), loc) }
+ { (fun typ => Cabs.PTR [] ((fst pt) typ), loc) }
| loc = STAR quallst = type_qualifier_list pt = pointer
- { (fun typ => PTR (rev' quallst) ((fst pt) typ), loc) }
+ { (fun typ => Cabs.PTR (rev' quallst) ((fst pt) typ), loc) }
type_qualifier_list:
| qual = type_qualifier
@@ -649,12 +655,12 @@ parameter_list:
parameter_declaration:
| specs = declaration_specifiers decl = declarator
- { match decl with Name name typ attr _ =>
- PARAM (fst specs) (Some name) typ attr (snd specs) end }
+ { match decl with Cabs.Name name typ attr _ =>
+ Cabs.PARAM (fst specs) (Some name) typ attr (snd specs) end }
| specs = declaration_specifiers decl = abstract_declarator
- { PARAM (fst specs) None decl [] (snd specs) }
+ { Cabs.PARAM (fst specs) None decl [] (snd specs) }
| specs = declaration_specifiers
- { PARAM (fst specs) None JUSTBASE [] (snd specs) }
+ { Cabs.PARAM (fst specs) None Cabs.JUSTBASE [] (snd specs) }
identifier_list:
| id = VAR_NAME
@@ -665,13 +671,13 @@ identifier_list:
(* 6.7.6 *)
type_name:
| specqual = specifier_qualifier_list
- { (fst specqual, JUSTBASE) }
+ { (fst specqual, Cabs.JUSTBASE) }
| specqual = specifier_qualifier_list typ = abstract_declarator
{ (fst specqual, typ) }
abstract_declarator:
| pt = pointer
- { (fst pt) JUSTBASE }
+ { (fst pt) Cabs.JUSTBASE }
| pt = pointer typ = direct_abstract_declarator
{ (fst pt) typ }
| typ = direct_abstract_declarator
@@ -680,41 +686,42 @@ abstract_declarator:
direct_abstract_declarator:
| LPAREN typ = abstract_declarator RPAREN
{ typ }
-| typ = direct_abstract_declarator LBRACK cvspec = type_qualifier_list expr = assignment_expression RBRACK
- { ARRAY typ cvspec (Some (fst expr)) }
+| typ = direct_abstract_declarator LBRACK cvspec = type_qualifier_list
+ expr = assignment_expression RBRACK
+ { Cabs.ARRAY typ cvspec (Some (fst expr)) }
| LBRACK cvspec = type_qualifier_list expr = assignment_expression RBRACK
- { ARRAY JUSTBASE cvspec (Some (fst expr)) }
+ { Cabs.ARRAY Cabs.JUSTBASE cvspec (Some (fst expr)) }
| typ = direct_abstract_declarator LBRACK expr = assignment_expression RBRACK
- { ARRAY typ [] (Some (fst expr)) }
+ { Cabs.ARRAY typ [] (Some (fst expr)) }
| LBRACK expr = assignment_expression RBRACK
- { ARRAY JUSTBASE [] (Some (fst expr)) }
+ { Cabs.ARRAY Cabs.JUSTBASE [] (Some (fst expr)) }
| typ = direct_abstract_declarator LBRACK cvspec = type_qualifier_list RBRACK
- { ARRAY typ cvspec None }
+ { Cabs.ARRAY typ cvspec None }
| LBRACK cvspec = type_qualifier_list RBRACK
- { ARRAY JUSTBASE cvspec None }
+ { Cabs.ARRAY Cabs.JUSTBASE cvspec None }
| typ = direct_abstract_declarator LBRACK RBRACK
- { ARRAY typ [] None }
+ { Cabs.ARRAY typ [] None }
| LBRACK RBRACK
- { ARRAY JUSTBASE [] None }
+ { Cabs.ARRAY Cabs.JUSTBASE [] None }
(*| direct_abstract_declarator? LBRACK STAR RBRACK*)
(*| direct_abstract_declarator? LBRACK ... STATIC ... RBRACK*)
| typ = direct_abstract_declarator LPAREN params = parameter_type_list RPAREN
- { PROTO typ params }
+ { Cabs.PROTO typ params }
| LPAREN params = parameter_type_list RPAREN
- { PROTO JUSTBASE params }
+ { Cabs.PROTO Cabs.JUSTBASE params }
| typ = direct_abstract_declarator LPAREN RPAREN
- { PROTO typ ([], false) }
+ { Cabs.PROTO typ ([], false) }
| LPAREN RPAREN
- { PROTO JUSTBASE ([], false) }
+ { Cabs.PROTO Cabs.JUSTBASE ([], false) }
(* 6.7.8 *)
c_initializer:
| expr = assignment_expression
- { SINGLE_INIT (fst expr) }
+ { Cabs.SINGLE_INIT (fst expr) }
| LBRACE init = initializer_list RBRACE
- { COMPOUND_INIT (rev' init) }
+ { Cabs.COMPOUND_INIT (rev' init) }
| LBRACE init = initializer_list COMMA RBRACE
- { COMPOUND_INIT (rev' init) }
+ { Cabs.COMPOUND_INIT (rev' init) }
initializer_list:
| design = designation init = c_initializer
@@ -738,9 +745,9 @@ designator_list:
designator:
| LBRACK expr = constant_expression RBRACK
- { ATINDEX_INIT (fst expr) }
+ { Cabs.ATINDEX_INIT (fst expr) }
| DOT id = OTHER_NAME
- { INFIELD_INIT (fst id) }
+ { Cabs.INFIELD_INIT (fst id) }
(* 6.8 *)
statement_dangerous:
@@ -768,18 +775,18 @@ statement_safe:
(* 6.8.1 *)
labeled_statement(last_statement):
| lbl = OTHER_NAME COLON stmt = last_statement
- { LABEL (fst lbl) stmt (snd lbl) }
+ { Cabs.LABEL (fst lbl) stmt (snd lbl) }
| loc = CASE expr = constant_expression COLON stmt = last_statement
- { CASE (fst expr) stmt loc }
+ { Cabs.CASE (fst expr) stmt loc }
| loc = DEFAULT COLON stmt = last_statement
- { DEFAULT stmt loc }
+ { Cabs.DEFAULT stmt loc }
(* 6.8.2 *)
compound_statement:
| loc = LBRACE lst = block_item_list RBRACE
- { BLOCK (rev' lst) loc }
+ { Cabs.BLOCK (rev' lst) loc }
| loc = LBRACE RBRACE
- { BLOCK [] loc }
+ { Cabs.BLOCK [] loc }
block_item_list:
| stmt = block_item
@@ -789,93 +796,103 @@ block_item_list:
block_item:
| decl = declaration
- { DEFINITION decl }
+ { Cabs.DEFINITION decl }
| stmt = statement_dangerous
{ stmt }
(* Non-standard *)
| p = PRAGMA
- { DEFINITION (PRAGMA (fst p) (snd p)) }
+ { Cabs.DEFINITION (Cabs.PRAGMA (fst p) (snd p)) }
(* 6.8.3 *)
expression_statement:
| expr = expression SEMICOLON
- { COMPUTATION (fst expr) (snd expr) }
+ { Cabs.COMPUTATION (fst expr) (snd expr) }
| loc = SEMICOLON
- { NOP loc }
+ { Cabs.NOP loc }
(* 6.8.4 *)
selection_statement_dangerous:
-| loc = IF LPAREN expr = expression RPAREN stmt = statement_dangerous
- { If (fst expr) stmt None loc }
-| loc = IF LPAREN expr = expression RPAREN stmt1 = statement_safe ELSE stmt2 = statement_dangerous
- { If (fst expr) stmt1 (Some stmt2) loc }
+| loc = IF_ LPAREN expr = expression RPAREN stmt = statement_dangerous
+ { Cabs.If (fst expr) stmt None loc }
+| loc = IF_ LPAREN expr = expression RPAREN stmt1 = statement_safe ELSE
+ stmt2 = statement_dangerous
+ { Cabs.If (fst expr) stmt1 (Some stmt2) loc }
| loc = SWITCH LPAREN expr = expression RPAREN stmt = statement_dangerous
- { SWITCH (fst expr) stmt loc }
+ { Cabs.SWITCH (fst expr) stmt loc }
selection_statement_safe:
-| loc = IF LPAREN expr = expression RPAREN stmt1 = statement_safe ELSE stmt2 = statement_safe
- { If (fst expr) stmt1 (Some stmt2) loc }
+| loc = IF_ LPAREN expr = expression RPAREN stmt1 = statement_safe ELSE
+ stmt2 = statement_safe
+ { Cabs.If (fst expr) stmt1 (Some stmt2) loc }
| loc = SWITCH LPAREN expr = expression RPAREN stmt = statement_safe
- { SWITCH (fst expr) stmt loc }
+ { Cabs.SWITCH (fst expr) stmt loc }
(* 6.8.5 *)
iteration_statement(last_statement):
| loc = WHILE LPAREN expr = expression RPAREN stmt = last_statement
- { WHILE (fst expr) stmt loc }
+ { Cabs.WHILE (fst expr) stmt loc }
| loc = DO stmt = statement_dangerous WHILE LPAREN expr = expression RPAREN SEMICOLON
- { DOWHILE (fst expr) stmt loc }
-| loc = FOR LPAREN expr1 = expression SEMICOLON expr2 = expression SEMICOLON expr3 = expression RPAREN stmt = last_statement
- { FOR (Some (FC_EXP (fst expr1))) (Some (fst expr2)) (Some (fst expr3)) stmt loc }
-| loc = FOR LPAREN decl1 = declaration expr2 = expression SEMICOLON expr3 = expression RPAREN stmt = last_statement
- { FOR (Some (FC_DECL decl1)) (Some (fst expr2)) (Some (fst expr3)) stmt loc }
-| loc = FOR LPAREN SEMICOLON expr2 = expression SEMICOLON expr3 = expression RPAREN stmt = last_statement
- { FOR None (Some (fst expr2)) (Some (fst expr3)) stmt loc }
-| loc = FOR LPAREN expr1 = expression SEMICOLON SEMICOLON expr3 = expression RPAREN stmt = last_statement
- { FOR (Some (FC_EXP (fst expr1))) None (Some (fst expr3)) stmt loc }
-| loc = FOR LPAREN decl1 = declaration SEMICOLON expr3 = expression RPAREN stmt = last_statement
- { FOR (Some (FC_DECL decl1)) None (Some (fst expr3)) stmt loc }
+ { Cabs.DOWHILE (fst expr) stmt loc }
+| loc = FOR LPAREN expr1 = expression SEMICOLON expr2 = expression SEMICOLON
+ expr3 = expression RPAREN stmt = last_statement
+ { Cabs.FOR (Some (Cabs.FC_EXP (fst expr1))) (Some (fst expr2)) (Some (fst expr3)) stmt loc }
+| loc = FOR LPAREN decl1 = declaration expr2 = expression SEMICOLON
+ expr3 = expression RPAREN stmt = last_statement
+ { Cabs.FOR (Some (Cabs.FC_DECL decl1)) (Some (fst expr2)) (Some (fst expr3)) stmt loc }
+| loc = FOR LPAREN SEMICOLON expr2 = expression SEMICOLON expr3 = expression RPAREN
+ stmt = last_statement
+ { Cabs.FOR None (Some (fst expr2)) (Some (fst expr3)) stmt loc }
+| loc = FOR LPAREN expr1 = expression SEMICOLON SEMICOLON expr3 = expression RPAREN
+ stmt = last_statement
+ { Cabs.FOR (Some (Cabs.FC_EXP (fst expr1))) None (Some (fst expr3)) stmt loc }
+| loc = FOR LPAREN decl1 = declaration SEMICOLON expr3 = expression RPAREN
+ stmt = last_statement
+ { Cabs.FOR (Some (Cabs.FC_DECL decl1)) None (Some (fst expr3)) stmt loc }
| loc = FOR LPAREN SEMICOLON SEMICOLON expr3 = expression RPAREN stmt = last_statement
- { FOR None None (Some (fst expr3)) stmt loc }
-| loc = FOR LPAREN expr1 = expression SEMICOLON expr2 = expression SEMICOLON RPAREN stmt = last_statement
- { FOR (Some (FC_EXP (fst expr1))) (Some (fst expr2)) None stmt loc }
-| loc = FOR LPAREN decl1 = declaration expr2 = expression SEMICOLON RPAREN stmt = last_statement
- { FOR (Some (FC_DECL decl1)) (Some (fst expr2)) None stmt loc }
+ { Cabs.FOR None None (Some (fst expr3)) stmt loc }
+| loc = FOR LPAREN expr1 = expression SEMICOLON expr2 = expression SEMICOLON RPAREN
+ stmt = last_statement
+ { Cabs.FOR (Some (Cabs.FC_EXP (fst expr1))) (Some (fst expr2)) None stmt loc }
+| loc = FOR LPAREN decl1 = declaration expr2 = expression SEMICOLON RPAREN
+ stmt = last_statement
+ { Cabs.FOR (Some (Cabs.FC_DECL decl1)) (Some (fst expr2)) None stmt loc }
| loc = FOR LPAREN SEMICOLON expr2 = expression SEMICOLON RPAREN stmt = last_statement
- { FOR None (Some (fst expr2)) None stmt loc }
+ { Cabs.FOR None (Some (fst expr2)) None stmt loc }
| loc = FOR LPAREN expr1 = expression SEMICOLON SEMICOLON RPAREN stmt = last_statement
- { FOR (Some (FC_EXP (fst expr1))) None None stmt loc }
+ { Cabs.FOR (Some (Cabs.FC_EXP (fst expr1))) None None stmt loc }
| loc = FOR LPAREN decl1 = declaration SEMICOLON RPAREN stmt = last_statement
- { FOR (Some (FC_DECL decl1)) None None stmt loc }
+ { Cabs.FOR (Some (Cabs.FC_DECL decl1)) None None stmt loc }
| loc = FOR LPAREN SEMICOLON SEMICOLON RPAREN stmt = last_statement
- { FOR None None None stmt loc }
+ { Cabs.FOR None None None stmt loc }
(* 6.8.6 *)
jump_statement:
| loc = GOTO id = OTHER_NAME SEMICOLON
- { GOTO (fst id) loc }
+ { Cabs.GOTO (fst id) loc }
| loc = CONTINUE SEMICOLON
- { CONTINUE loc }
+ { Cabs.CONTINUE loc }
| loc = BREAK SEMICOLON
- { BREAK loc }
+ { Cabs.BREAK loc }
| loc = RETURN expr = expression SEMICOLON
- { RETURN (Some (fst expr)) loc }
+ { Cabs.RETURN (Some (fst expr)) loc }
| loc = RETURN SEMICOLON
- { RETURN None loc }
+ { Cabs.RETURN None loc }
(* Non-standard *)
asm_statement:
-| loc = ASM attr = asm_attributes LPAREN template = STRING_LITERAL args = asm_arguments RPAREN SEMICOLON
+| loc = ASM attr = asm_attributes LPAREN template = STRING_LITERAL args = asm_arguments
+ RPAREN SEMICOLON
{ let '(wide, chars, _) := template in
let '(outputs, inputs, flags) := args in
- ASM attr wide chars outputs inputs flags loc }
+ Cabs.ASM attr wide chars outputs inputs flags loc }
asm_attributes:
| /* empty */
{ [] }
| CONST attr = asm_attributes
- { CV_CONST :: attr }
+ { Cabs.CV_CONST :: attr }
| VOLATILE attr = asm_attributes
- { CV_VOLATILE :: attr }
+ { Cabs.CV_VOLATILE :: attr }
asm_arguments:
| /* empty */
@@ -897,7 +914,7 @@ asm_operands_ne:
asm_operand:
| n = asm_op_name cstr = STRING_LITERAL LPAREN e = expression RPAREN
- { let '(wide, s, loc) := cstr in ASMOPERAND n wide s (fst e) }
+ { let '(wide, s, loc) := cstr in Cabs.ASMOPERAND n wide s (fst e) }
asm_op_name:
| /* empty */ { None }
@@ -934,7 +951,7 @@ external_declaration:
{ def }
(* Non-standard *)
| p = PRAGMA
- { PRAGMA (fst p) (snd p) }
+ { Cabs.PRAGMA (fst p) (snd p) }
(* 6.9.1 *)
@@ -943,11 +960,11 @@ function_definition:
decl = declarator_noattrend
dlist = declaration_list
stmt = compound_statement
- { FUNDEF (fst specs) decl (List.rev' dlist) stmt (snd specs) }
+ { Cabs.FUNDEF (fst specs) decl (List.rev' dlist) stmt (snd specs) }
| specs = declaration_specifiers
decl = declarator
stmt = compound_statement
- { FUNDEF (fst specs) decl [] stmt (snd specs) }
+ { Cabs.FUNDEF (fst specs) decl [] stmt (snd specs) }
declaration_list:
| d = declaration
diff --git a/cparser/pre_parser.mly b/cparser/pre_parser.mly
index 71eaf419..669ecf5e 100644
--- a/cparser/pre_parser.mly
+++ b/cparser/pre_parser.mly
@@ -43,13 +43,13 @@
%}
%token<string> PRE_NAME
-%token<string * Pre_parser_aux.identifier_type ref * Cabs.cabsloc>
+%token<string * Pre_parser_aux.identifier_type ref * Cabs.loc>
VAR_NAME TYPEDEF_NAME
-%token<Cabs.constant * Cabs.cabsloc> CONSTANT
-%token<bool * int64 list * Cabs.cabsloc> STRING_LITERAL
-%token<string * Cabs.cabsloc> PRAGMA
+%token<Cabs.constant * Cabs.loc> CONSTANT
+%token<bool * int64 list * Cabs.loc> STRING_LITERAL
+%token<string * Cabs.loc> PRAGMA
-%token<Cabs.cabsloc> SIZEOF PTR INC DEC LEFT RIGHT LEQ GEQ EQEQ EQ NEQ LT GT
+%token<Cabs.loc> SIZEOF PTR INC DEC LEFT RIGHT LEQ GEQ EQEQ EQ NEQ LT GT
ANDAND BARBAR PLUS MINUS STAR TILDE BANG SLASH PERCENT HAT BAR QUESTION
COLON AND MUL_ASSIGN DIV_ASSIGN MOD_ASSIGN ADD_ASSIGN SUB_ASSIGN LEFT_ASSIGN
RIGHT_ASSIGN AND_ASSIGN XOR_ASSIGN OR_ASSIGN LPAREN RPAREN LBRACK RBRACK
diff --git a/extraction/extraction.v b/extraction/extraction.v
index c0286f7b..25319475 100644
--- a/extraction/extraction.v
+++ b/extraction/extraction.v
@@ -34,7 +34,6 @@ Require Clight.
Require Compiler.
Require Parser.
Require Initializers.
-Require Int31.
(* Standard lib *)
Require Import ExtrOcamlBasic.
@@ -128,7 +127,7 @@ Extract Constant Compiler.time => "Timing.time_coq".
(*Extraction Inline Compiler.apply_total Compiler.apply_partial.*)
(* Cabs *)
-Extract Constant Cabs.cabsloc =>
+Extract Constant Cabs.loc =>
"{ lineno : int;
filename: string;
byteno: int;
@@ -137,15 +136,6 @@ Extract Constant Cabs.cabsloc =>
Extract Inlined Constant Cabs.string => "String.t".
Extract Constant Cabs.char_code => "int64".
-(* Int31 *)
-Extract Inductive Int31.digits => "bool" [ "false" "true" ].
-Extract Inductive Int31.int31 => "int" [ "Camlcoq.Int31.constr" ] "Camlcoq.Int31.destr".
-Extract Constant Int31.twice => "Camlcoq.Int31.twice".
-Extract Constant Int31.twice_plus_one => "Camlcoq.Int31.twice_plus_one".
-Extract Constant Int31.compare31 => "Camlcoq.Int31.compare".
-Extract Constant Int31.On => "0".
-Extract Constant Int31.In => "1".
-
(* Processor-specific extraction directives *)
Load extractionMachdep.
diff --git a/lib/Camlcoq.ml b/lib/Camlcoq.ml
index d94e3582..66322efb 100644
--- a/lib/Camlcoq.ml
+++ b/lib/Camlcoq.ml
@@ -335,54 +335,3 @@ let coqfloat32_of_camlfloat f =
Float32.of_bits(coqint_of_camlint(Int32.bits_of_float f))
let camlfloat_of_coqfloat32 f =
Int32.float_of_bits(camlint_of_coqint(Float32.to_bits f))
-
-(* Int31 *)
-
-module Int31 = struct
-
-(*
-let constr (b30,b29,b28,b27,b26,b25,b24,
- b23,b22,b21,b20,b19,b18,b17,b16,
- b15,b14,b13,b12,b11,b10,b9,b8,
- b7,b6,b5,b4,b3,b2,b1,b0) =
- let f i b accu = if b then accu + (1 lsl i) else accu in
- f 30 b30 (f 29 b29 (f 28 b28 (f 27 b27 (f 26 b26 (f 25 b25 (f 24 b24
- (f 23 b23 (f 22 b22 (f 21 b21 (f 20 b20 (f 19 b19 (f 18 b18 (f 17 b17 (f 16 b16
- (f 15 b15 (f 14 b14 (f 13 b13 (f 12 b12 (f 11 b11 (f 10 b10 (f 9 b9 (f 8 b8
- (f 7 b7 (f 6 b6 (f 5 b5 (f 4 b4 (f 3 b3 (f 2 b2 (f 1 b1 (f 0 b0 0))))))))))))))))))))))))))))))
-*)
-
-let constr (b30,b29,b28,b27,b26,b25,b24,
- b23,b22,b21,b20,b19,b18,b17,b16,
- b15,b14,b13,b12,b11,b10,b9,b8,
- b7,b6,b5,b4,b3,b2,b1,b0) =
- let f i b = if b then 1 lsl i else 0 in
- f 30 b30 + f 29 b29 + f 28 b28 + f 27 b27 + f 26 b26 + f 25 b25 + f 24 b24 +
- f 23 b23 + f 22 b22 + f 21 b21 + f 20 b20 + f 19 b19 + f 18 b18 + f 17 b17 + f 16 b16 +
- f 15 b15 + f 14 b14 + f 13 b13 + f 12 b12 + f 11 b11 + f 10 b10 + f 9 b9 + f 8 b8 +
- f 7 b7 + f 6 b6 + f 5 b5 + f 4 b4 + f 3 b3 + f 2 b2 + f 1 b1 + f 0 b0
-
-let destr f n =
- let b i = n land (1 lsl i) <> 0 in
- f (b 30) (b 29) (b 28) (b 27) (b 26) (b 25) (b 24)
- (b 23) (b 22) (b 21) (b 20) (b 19) (b 18) (b 17) (b 16)
- (b 15) (b 14) (b 13) (b 12) (b 11) (b 10) (b 9) (b 8)
- (b 7) (b 6) (b 5) (b 4) (b 3) (b 2) (b 1) (b 0)
-
-let twice n =
- (n lsl 1) land 0x7FFFFFFF
-
-let twice_plus_one n =
- ((n lsl 1) land 0x7FFFFFFF) lor 1
-
-let compare (x:int) (y:int) =
- if x = y then Datatypes.Eq
- else begin
- let sx = x < 0 and sy = y < 0 in
- if sx = sy then
- (if x < y then Datatypes.Lt else Datatypes.Gt)
- else
- (if sx then Datatypes.Gt else Datatypes.Lt)
- end
-
-end