aboutsummaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
authorJames Pollard <james@pollard.dev>2020-06-23 23:00:08 +0100
committerJames Pollard <james@pollard.dev>2020-06-23 23:01:44 +0100
commitec0fa1ac249a8eeb0df9700c50a3e6c4f1b540f2 (patch)
treee168031be13a7e4ed5f7b245c03b18fc48d8b49c /src
parentb70f007eae5886990a8ffc1e7b94294702e238f8 (diff)
downloadvericert-ec0fa1ac249a8eeb0df9700c50a3e6c4f1b540f2.tar.gz
vericert-ec0fa1ac249a8eeb0df9700c50a3e6c4f1b540f2.zip
Normalise entire expression to avoid overflow issues.
Diffstat (limited to 'src')
-rw-r--r--src/common/Coquplib.v10
-rw-r--r--src/common/IntegerExtra.v235
-rw-r--r--src/translation/HTLgen.v11
-rw-r--r--src/translation/HTLgenproof.v532
-rw-r--r--src/translation/HTLgenspec.v7
5 files changed, 523 insertions, 272 deletions
diff --git a/src/common/Coquplib.v b/src/common/Coquplib.v
index 59b23ae..b4ca906 100644
--- a/src/common/Coquplib.v
+++ b/src/common/Coquplib.v
@@ -84,6 +84,11 @@ Ltac unfold_constants :=
| [ H : context[Integers.Ptrofs.max_signed] |- _ ] =>
replace Integers.Ptrofs.max_signed with 2147483647 in H by reflexivity
+ | [ _ : _ |- context[Integers.Ptrofs.max_unsigned] ] =>
+ replace Integers.Ptrofs.max_unsigned with 4294967295 by reflexivity
+ | [ H : context[Integers.Ptrofs.max_unsigned] |- _ ] =>
+ replace Integers.Ptrofs.max_unsigned with 4294967295 in H by reflexivity
+
| [ _ : _ |- context[Integers.Int.modulus] ] =>
replace Integers.Int.modulus with 4294967296 by reflexivity
| [ H : context[Integers.Int.modulus] |- _ ] =>
@@ -98,6 +103,11 @@ Ltac unfold_constants :=
replace Integers.Int.max_signed with 2147483647 by reflexivity
| [ H : context[Integers.Int.max_signed] |- _ ] =>
replace Integers.Int.max_signed with 2147483647 in H by reflexivity
+
+ | [ _ : _ |- context[Integers.Int.max_unsigned] ] =>
+ replace Integers.Int.max_unsigned with 4294967295 by reflexivity
+ | [ H : context[Integers.Int.max_unsigned] |- _ ] =>
+ replace Integers.Int.max_unsigned with 4294967295 in H by reflexivity
end.
Ltac simplify := unfold_constants; simpl in *;
diff --git a/src/common/IntegerExtra.v b/src/common/IntegerExtra.v
new file mode 100644
index 0000000..ad01015
--- /dev/null
+++ b/src/common/IntegerExtra.v
@@ -0,0 +1,235 @@
+Require Import BinInt.
+Require Import Lia.
+Require Import ZBinary.
+
+From bbv Require Import ZLib.
+From compcert Require Import Integers Coqlib.
+
+Require Import Coquplib.
+
+Local Open Scope Z_scope.
+
+Module PtrofsExtra.
+
+ Lemma mul_divs :
+ forall x y,
+ 0 <= Ptrofs.signed y ->
+ 0 < Ptrofs.signed x ->
+ Ptrofs.signed y mod Ptrofs.signed x = 0 ->
+ (Integers.Ptrofs.mul x (Integers.Ptrofs.divs y x)) = y.
+ Proof.
+ intros.
+
+ pose proof (Ptrofs.mods_divs_Euclid y x).
+ pose proof (Zquot.Zrem_Zmod_zero (Ptrofs.signed y) (Ptrofs.signed x)).
+ apply <- H3 in H1; try lia; clear H3.
+ unfold Ptrofs.mods in H2.
+ rewrite H1 in H2.
+ replace (Ptrofs.repr 0) with (Ptrofs.zero) in H2 by reflexivity.
+ rewrite Ptrofs.add_zero in H2.
+ rewrite Ptrofs.mul_commut.
+ congruence.
+ Qed.
+
+ Lemma Z_div_distr_add :
+ forall x y z,
+ x mod z = 0 ->
+ y mod z = 0 ->
+ z <> 0 ->
+ x / z + y / z = (x + y) / z.
+ Proof.
+ intros.
+
+ assert ((x + y) mod z = 0).
+ { rewrite <- Z.add_mod_idemp_l; try assumption.
+ rewrite H. assumption. }
+
+ rewrite <- Z.mul_cancel_r with (p := z); try assumption.
+ rewrite Z.mul_add_distr_r.
+ repeat rewrite ZLib.div_mul_undo; lia.
+ Qed.
+
+ Lemma mul_unsigned :
+ forall x y,
+ Ptrofs.mul x y =
+ Ptrofs.repr (Ptrofs.unsigned x * Ptrofs.unsigned y).
+ Proof.
+ intros; unfold Ptrofs.mul.
+ apply Ptrofs.eqm_samerepr.
+ apply Ptrofs.eqm_mult; exists 0; lia.
+ Qed.
+
+ Lemma mul_repr :
+ forall x y,
+ Ptrofs.min_signed <= y <= Ptrofs.max_signed ->
+ Ptrofs.min_signed <= x <= Ptrofs.max_signed ->
+ Ptrofs.mul (Ptrofs.repr y) (Ptrofs.repr x) = Ptrofs.repr (x * y).
+ Proof.
+ intros; unfold Ptrofs.mul.
+ destruct (Z_ge_lt_dec x 0); destruct (Z_ge_lt_dec y 0).
+
+ - f_equal.
+ repeat rewrite Ptrofs.unsigned_repr_eq.
+ repeat rewrite Z.mod_small; simplify; lia.
+
+ - assert (Ptrofs.lt (Ptrofs.repr y) Ptrofs.zero = true).
+ {
+ unfold Ptrofs.lt.
+ rewrite Ptrofs.signed_repr; auto.
+ rewrite Ptrofs.signed_zero.
+ destruct (zlt y 0); try lia; auto.
+ }
+
+ rewrite Ptrofs.unsigned_signed with (n := Ptrofs.repr y).
+ rewrite H1.
+ rewrite Ptrofs.signed_repr; auto.
+ rewrite Ptrofs.unsigned_repr_eq.
+ rewrite Z.mod_small; simplify; try lia.
+ rewrite Z.mul_add_distr_r.
+ apply Ptrofs.eqm_samerepr.
+ exists x. simplify. lia.
+
+ - assert (Ptrofs.lt (Ptrofs.repr x) Ptrofs.zero = true).
+ {
+ unfold Ptrofs.lt.
+ rewrite Ptrofs.signed_repr; auto.
+ rewrite Ptrofs.signed_zero.
+ destruct (zlt x 0); try lia; auto.
+ }
+
+ rewrite Ptrofs.unsigned_signed with (n := Ptrofs.repr x).
+ rewrite H1.
+ rewrite Ptrofs.signed_repr; auto.
+ rewrite Ptrofs.unsigned_repr_eq.
+ rewrite Z.mod_small; simplify; try lia.
+ rewrite Z.mul_add_distr_l.
+ apply Ptrofs.eqm_samerepr.
+ exists y. simplify. lia.
+
+ - assert (Ptrofs.lt (Ptrofs.repr x) Ptrofs.zero = true).
+ {
+ unfold Ptrofs.lt.
+ rewrite Ptrofs.signed_repr; auto.
+ rewrite Ptrofs.signed_zero.
+ destruct (zlt x 0); try lia; auto.
+ }
+ assert (Ptrofs.lt (Ptrofs.repr y) Ptrofs.zero = true).
+ {
+ unfold Ptrofs.lt.
+ rewrite Ptrofs.signed_repr; auto.
+ rewrite Ptrofs.signed_zero.
+ destruct (zlt y 0); try lia; auto.
+ }
+ rewrite Ptrofs.unsigned_signed with (n := Ptrofs.repr x).
+ rewrite Ptrofs.unsigned_signed with (n := Ptrofs.repr y).
+ rewrite H2.
+ rewrite H1.
+ repeat rewrite Ptrofs.signed_repr; auto.
+ replace ((y + Ptrofs.modulus) * (x + Ptrofs.modulus)) with
+ (x * y + (x + y + Ptrofs.modulus) * Ptrofs.modulus) by lia.
+ apply Ptrofs.eqm_samerepr.
+ exists (x + y + Ptrofs.modulus). lia.
+ Qed.
+End PtrofsExtra.
+
+Module IntExtra.
+ Lemma mul_unsigned :
+ forall x y,
+ Int.mul x y =
+ Int.repr (Int.unsigned x * Int.unsigned y).
+ Proof.
+ intros; unfold Int.mul.
+ apply Int.eqm_samerepr.
+ apply Int.eqm_mult; exists 0; lia.
+ Qed.
+
+ Lemma mul_repr :
+ forall x y,
+ Int.min_signed <= y <= Int.max_signed ->
+ Int.min_signed <= x <= Int.max_signed ->
+ Int.mul (Int.repr y) (Int.repr x) = Int.repr (x * y).
+ Proof.
+ intros; unfold Int.mul.
+ destruct (Z_ge_lt_dec x 0); destruct (Z_ge_lt_dec y 0).
+
+ - f_equal.
+ repeat rewrite Int.unsigned_repr_eq.
+ repeat rewrite Z.mod_small; simplify; lia.
+
+ - assert (Int.lt (Int.repr y) Int.zero = true).
+ {
+ unfold Int.lt.
+ rewrite Int.signed_repr; auto.
+ rewrite Int.signed_zero.
+ destruct (zlt y 0); try lia; auto.
+ }
+
+ rewrite Int.unsigned_signed with (n := Int.repr y).
+ rewrite H1.
+ rewrite Int.signed_repr; auto.
+ rewrite Int.unsigned_repr_eq.
+ rewrite Z.mod_small; simplify; try lia.
+ rewrite Z.mul_add_distr_r.
+ apply Int.eqm_samerepr.
+ exists x. simplify. lia.
+
+ - assert (Int.lt (Int.repr x) Int.zero = true).
+ {
+ unfold Int.lt.
+ rewrite Int.signed_repr; auto.
+ rewrite Int.signed_zero.
+ destruct (zlt x 0); try lia; auto.
+ }
+
+ rewrite Int.unsigned_signed with (n := Int.repr x).
+ rewrite H1.
+ rewrite Int.signed_repr; auto.
+ rewrite Int.unsigned_repr_eq.
+ rewrite Z.mod_small; simplify; try lia.
+ rewrite Z.mul_add_distr_l.
+ apply Int.eqm_samerepr.
+ exists y. simplify. lia.
+
+ - assert (Int.lt (Int.repr x) Int.zero = true).
+ {
+ unfold Int.lt.
+ rewrite Int.signed_repr; auto.
+ rewrite Int.signed_zero.
+ destruct (zlt x 0); try lia; auto.
+ }
+ assert (Int.lt (Int.repr y) Int.zero = true).
+ {
+ unfold Int.lt.
+ rewrite Int.signed_repr; auto.
+ rewrite Int.signed_zero.
+ destruct (zlt y 0); try lia; auto.
+ }
+ rewrite Int.unsigned_signed with (n := Int.repr x).
+ rewrite Int.unsigned_signed with (n := Int.repr y).
+ rewrite H2.
+ rewrite H1.
+ repeat rewrite Int.signed_repr; auto.
+ replace ((y + Int.modulus) * (x + Int.modulus)) with
+ (x * y + (x + y + Int.modulus) * Int.modulus) by lia.
+ apply Int.eqm_samerepr.
+ exists (x + y + Int.modulus). lia.
+ Qed.
+End IntExtra.
+
+Lemma mul_of_int :
+ forall x y,
+ 0 <= x < Integers.Ptrofs.modulus ->
+ Integers.Ptrofs.mul (Integers.Ptrofs.repr x) (Integers.Ptrofs.of_int y) =
+ Integers.Ptrofs.of_int (Integers.Int.mul (Integers.Int.repr x) y).
+Proof.
+ intros.
+ pose proof (Integers.Ptrofs.agree32_of_int eq_refl y) as A.
+ pose proof (Integers.Ptrofs.agree32_to_int eq_refl (Integers.Ptrofs.repr x)) as B.
+ exploit Integers.Ptrofs.agree32_mul; [> reflexivity | exact B | exact A | intro C].
+ unfold Integers.Ptrofs.to_int in C.
+ unfold Integers.Ptrofs.of_int in C.
+ rewrite Integers.Ptrofs.unsigned_repr_eq in C.
+ rewrite Z.mod_small in C; auto.
+ symmetry.
+ apply Integers.Ptrofs.agree32_of_int_eq; auto.
+Qed.
diff --git a/src/translation/HTLgen.v b/src/translation/HTLgen.v
index cc6a3f8..92e40f5 100644
--- a/src/translation/HTLgen.v
+++ b/src/translation/HTLgen.v
@@ -349,17 +349,18 @@ Definition translate_arr_access (mem : AST.memory_chunk) (addr : Op.addressing)
match mem, addr, args with (* TODO: We should be more methodical here; what are the possibilities?*)
| Mint32, Op.Aindexed off, r1::nil =>
if (check_address_parameter off)
- then ret (Vvari stack (Vbinop Vadd (boplitz Vdiv r1 4) (Vlit (ZToValue 32 (off / 4)))))
+ then ret (Vvari stack (Vbinop Vdiv (boplitz Vadd r1 off) (Vlit (ZToValue 32 4))))
else error (Errors.msg "Veriloggen: translate_arr_access address misaligned")
| Mint32, Op.Ascaled scale offset, r1::nil =>
if (check_address_parameter scale) && (check_address_parameter offset)
- then ret (Vvari stack (Vbinop Vadd (boplitz Vmul r1 (scale / 4)) (Vlit (ZToValue 32 (offset / 4)))))
+ then ret (Vvari stack (Vbinop Vdiv (Vbinop Vadd (boplitz Vmul r1 scale) (Vlit (ZToValue 32 offset))) (Vlit (ZToValue 32 4))))
else error (Errors.msg "Veriloggen: translate_arr_access address misaligned")
| Mint32, Op.Aindexed2scaled scale offset, r1::r2::nil => (* Typical for dynamic array addressing *)
if (check_address_parameter scale) && (check_address_parameter offset)
then ret (Vvari stack
- (Vbinop Vadd (Vbinop Vadd (boplitz Vdiv r1 4) (Vlit (ZToValue 32 (offset / 4))))
- (boplitz Vmul r2 (scale / 4))))
+ (Vbinop Vdiv
+ (Vbinop Vadd (boplitz Vadd r1 offset) (boplitz Vmul r2 scale))
+ (ZToValue 32 4)))
else error (Errors.msg "Veriloggen: translate_arr_access address misaligned")
| Mint32, Op.Ainstack a, nil => (* We need to be sure that the base address is aligned *)
let a := Integers.Ptrofs.unsigned a in
@@ -451,7 +452,7 @@ Definition create_arr (i : option io) (sz : nat) (ln : nat) : mon (reg * nat) :=
(create_arr_state_incr s sz ln i).
Definition stack_correct (sz : Z) : bool :=
- (0 <=? sz) && (Z.modulo sz 4 =? 0).
+ (0 <=? sz) && (sz <? Integers.Ptrofs.modulus) && (Z.modulo sz 4 =? 0).
Definition transf_module (f: function) : mon module :=
if stack_correct f.(fn_stacksize) then
diff --git a/src/translation/HTLgenproof.v b/src/translation/HTLgenproof.v
index 7e9cb26..8e97c58 100644
--- a/src/translation/HTLgenproof.v
+++ b/src/translation/HTLgenproof.v
@@ -380,7 +380,7 @@ Section CORRECTNESS.
2: { reflexivity. }
rewrite AssocMap.gss.
unfold Verilog.merge_arr.
- setoid_rewrite H1.
+ setoid_rewrite H3.
reflexivity.
rewrite combine_length.
@@ -510,60 +510,58 @@ Section CORRECTNESS.
pose proof (H0 r0).
assert (HPler0 : Ple r0 (RTL.max_reg_function f))
by (eapply RTL.max_reg_function_use; eauto; simplify; eauto).
- apply H4 in HPler0.
+ apply H6 in HPler0.
invert HPler0; try congruence.
- rewrite EQr0 in H6.
- invert H6.
- clear H0. clear H4.
+ rewrite EQr0 in H8.
+ invert H8.
+ clear H0. clear H6.
unfold check_address_parameter in *. simplify.
+ remember (Integers.Ptrofs.add (Integers.Ptrofs.repr (valueToZ asr # r0))
+ (Integers.Ptrofs.of_int (Integers.Int.repr z))) as OFFSET.
+
+ (** Read bounds assumption *)
+ assert (0 <= Integers.Ptrofs.signed OFFSET) as READ_BOUND_LOW by admit.
+ assert (Integers.Ptrofs.signed OFFSET < f.(RTL.fn_stacksize)) as READ_BOUND_HIGH by admit.
+
+ (** Modular Preservation proof *)
+ assert (Integers.Ptrofs.signed OFFSET mod 4 = 0) as MOD_PRESERVE by admit.
+
(** Normalisation proof *)
- assert
- (Integers.Ptrofs.add (Integers.Ptrofs.repr (valueToZ asr # r0))
- (Integers.Ptrofs.of_int (Integers.Int.repr z))
- =
- Integers.Ptrofs.repr
- (4 *
- Integers.Ptrofs.unsigned
- (Integers.Ptrofs.add (Integers.Ptrofs.repr (valueToZ asr # r0 / 4))
- (Integers.Ptrofs.of_int (Integers.Int.repr (z / 4))))))
+ assert (Integers.Ptrofs.repr
+ (4 * Integers.Ptrofs.unsigned
+ (Integers.Ptrofs.divs OFFSET (Integers.Ptrofs.repr 4))) = OFFSET)
as NORMALISE.
- etransitivity.
- 2: {
- replace (4 *
- Integers.Ptrofs.unsigned
- (Integers.Ptrofs.add (Integers.Ptrofs.repr (valueToZ asr # r0 / 4))
- (Integers.Ptrofs.of_int (Integers.Int.repr (z / 4)))))
- with (Integers.Ptrofs.unsigned (Integers.Ptrofs.repr 4) *
- Integers.Ptrofs.unsigned
- (Integers.Ptrofs.add (Integers.Ptrofs.repr (valueToZ asr # r0 / 4))
- (Integers.Ptrofs.of_int (Integers.Int.repr (z / 4))))).
- 2: { rewrite Integers.Ptrofs.unsigned_repr_eq.
- rewrite Z.mod_small. reflexivity.
- simplify. lia. }
-
+ { replace 4 with (Integers.Ptrofs.unsigned (Integers.Ptrofs.repr 4)) at 1 by reflexivity.
rewrite <- PtrofsExtra.mul_unsigned.
- rewrite Integers.Ptrofs.mul_add_distr_r.
- rewrite PtrofsExtra.mul_repr; simplify; try lia.
- rewrite ZLib.div_mul_undo; try lia.
- rewrite mul_of_int; simplify; try lia.
- rewrite IntExtra.mul_repr; simplify; try lia.
- rewrite ZLib.div_mul_undo; try lia.
- reflexivity.
-
+ apply PtrofsExtra.mul_divs; auto.
+ rewrite Integers.Ptrofs.signed_repr; simplify; lia. }
+
+ (** Normalised bounds proof *)
+ assert (0 <=
+ Integers.Ptrofs.unsigned (Integers.Ptrofs.divs OFFSET (Integers.Ptrofs.repr 4))
+ < (RTL.fn_stacksize f / 4))
+ as NORMALISE_BOUND.
+ { split.
+ apply Integers.Ptrofs.unsigned_range_2.
+ assert (forall x y, Integers.Ptrofs.divs x y = Integers.Ptrofs.divs x y ) by reflexivity.
+ unfold Integers.Ptrofs.divs at 2 in H0.
+ rewrite H0. clear H0.
+ rewrite Integers.Ptrofs.unsigned_repr; simplify.
+ rewrite Integers.Ptrofs.signed_repr; simplify; try lia.
+ rewrite Z.quot_div_nonneg; try (rewrite <- HeqOFFSET; assumption); try lia.
+ apply Zmult_lt_reg_r with (p := 4); try lia.
+ repeat rewrite ZLib.div_mul_undo; try lia.
+ rewrite Integers.Ptrofs.signed_repr.
+ rewrite Z.quot_div_nonneg; try (rewrite <- HeqOFFSET; assumption); try lia.
split.
- apply Z.div_le_lower_bound; lia.
- apply Z.div_le_upper_bound; lia.
-
- admit. (* FIXME: Register size 32 bits *) }
- reflexivity.
+ apply Z.div_pos; try (rewrite <- HeqOFFSET; assumption); try lia.
+ apply Z.div_le_upper_bound; simplify; try (rewrite <- HeqOFFSET; lia); try lia.
+ simplify; lia. }
- remember (Integers.Ptrofs.add (Integers.Ptrofs.repr (valueToZ asr # r0))
- (Integers.Ptrofs.of_int (Integers.Int.repr z))) as OFFSET.
-
- (** Read bounds assumption *)
- assert (Integers.Ptrofs.unsigned OFFSET < f.(RTL.fn_stacksize)) as READ_BOUND_HIGH by admit.
+ inversion NORMALISE_BOUND as [ NORMALISE_BOUND_LOW NORMALISE_BOUND_HIGH ];
+ clear NORMALISE_BOUND.
eexists. split.
eapply Smallstep.plus_one.
@@ -575,8 +573,10 @@ Section CORRECTNESS.
econstructor. econstructor. econstructor. econstructor.
econstructor. econstructor. econstructor. econstructor.
+ all: simplify.
+
(** Verilog array lookup *)
- unfold Verilog.arr_assocmap_lookup. simplify. setoid_rewrite H3.
+ unfold Verilog.arr_assocmap_lookup. setoid_rewrite H5.
f_equal.
(** State Lookup *)
@@ -596,34 +596,33 @@ Section CORRECTNESS.
apply regs_lessdef_add_match.
(** Equality proof *)
- assert (Integers.Ptrofs.repr 0 = Integers.Ptrofs.zero) by reflexivity.
- rewrite H0 in H5. clear H0.
- setoid_rewrite Integers.Ptrofs.add_zero_l in H5.
-
- specialize (H5 (Integers.Ptrofs.unsigned
- (Integers.Ptrofs.add (Integers.Ptrofs.repr (valueToZ asr # r0 / 4))
- (Integers.Ptrofs.of_int (Integers.Int.repr (z / 4)))))).
-
- exploit H5; auto; intros.
- rewrite Z2Nat.id.
- split.
- apply Integers.Ptrofs.unsigned_range_2.
- admit.
- apply Z_div_pos; lia.
- clear H5.
+ assert (Integers.Ptrofs.repr 0 = Integers.Ptrofs.zero) as ZERO by reflexivity.
+ rewrite ZERO in H7. clear ZERO.
+ setoid_rewrite Integers.Ptrofs.add_zero_l in H7.
+
+ specialize (H7 (Integers.Ptrofs.unsigned
+ (Integers.Ptrofs.divs
+ OFFSET
+ (Integers.Ptrofs.repr 4)))).
+
+ exploit H7.
+ rewrite Z2Nat.id; eauto.
+ apply Z.div_pos; lia.
+
+ intros I.
assert (Z.to_nat
(Integers.Ptrofs.unsigned
- (Integers.Ptrofs.add (Integers.Ptrofs.repr (valueToZ asr # r0 / 4))
- (Integers.Ptrofs.of_int (Integers.Int.repr (z / 4)))))
+ (Integers.Ptrofs.divs
+ OFFSET
+ (Integers.Ptrofs.repr 4)))
=
- valueToNat (vplus (vdivs asr # r0 (ZToValue 32 4) ?EQ2) (ZToValue 32 (z / 4)) ?EQ1))
- as EXPR_OK by admit.
-
+ valueToNat (vdivs (vplus asr # r0 (ZToValue 32 z) ?EQ2) (ZToValue 32 4) ?EQ1))
+ as EXPR_OK by admit.
rewrite <- EXPR_OK.
- rewrite <- NORMALISE in H0.
- rewrite H1 in H0.
- invert H0. assumption.
+ rewrite NORMALISE in I.
+ rewrite H1 in I.
+ invert I. assumption.
(** PC match *)
apply regs_lessdef_add_greater.
@@ -648,7 +647,7 @@ Section CORRECTNESS.
2: { reflexivity. }
rewrite AssocMap.gss.
unfold Verilog.merge_arr.
- setoid_rewrite H3.
+ setoid_rewrite H5.
reflexivity.
rewrite combine_length.
@@ -667,22 +666,20 @@ Section CORRECTNESS.
(** RSBP preservation *)
unfold reg_stack_based_pointers. intros.
- destruct (Pos.eq_dec r1 dst); subst.
+ destruct (Pos.eq_dec r1 dst); try rewrite e. (* FIXME: Prepare this for automation *)
rewrite Registers.Regmap.gss.
unfold arr_stack_based_pointers in ASBP.
specialize (ASBP (Integers.Ptrofs.unsigned
- (Integers.Ptrofs.add (Integers.Ptrofs.repr (valueToZ asr # r0 / 4))
- (Integers.Ptrofs.of_int (Integers.Int.repr (z / 4)))))).
- exploit ASBP; auto; intros.
- admit.
-
- rewrite <- NORMALISE in H0.
- assert (Integers.Ptrofs.repr 0 = Integers.Ptrofs.zero) by reflexivity.
- rewrite H10 in H0. clear H10.
+ (Integers.Ptrofs.divs OFFSET (Integers.Ptrofs.repr 4)))).
+ exploit ASBP; auto; intros I.
+
+ rewrite NORMALISE in I.
+ assert (Integers.Ptrofs.repr 0 = Integers.Ptrofs.zero) as ZERO by reflexivity.
+ rewrite ZERO in I. clear ZERO.
simplify.
- rewrite Integers.Ptrofs.add_zero_l in H0.
- rewrite H1 in H0.
+ rewrite Integers.Ptrofs.add_zero_l in I.
+ rewrite H1 in I.
assumption.
simplify.
@@ -718,84 +715,62 @@ Section CORRECTNESS.
by (eapply RTL.max_reg_function_use; eauto; simplify; eauto).
assert (HPler1 : Ple r1 (RTL.max_reg_function f))
by (eapply RTL.max_reg_function_use; eauto; simpl; auto).
- apply H4 in HPler0.
- apply H6 in HPler1.
+ apply H6 in HPler0.
+ apply H8 in HPler1.
invert HPler0; invert HPler1; try congruence.
- rewrite EQr0 in H7.
- rewrite EQr1 in H8.
- invert H7. invert H8.
- clear H0. clear H4. clear H6.
+ rewrite EQr0 in H10.
+ rewrite EQr1 in H12.
+ invert H10. invert H12.
+ clear H0. clear H6. clear H8.
unfold check_address_parameter in *. simplify.
- (** Normalisation proof *)
- assert
- (Integers.Ptrofs.add (Integers.Ptrofs.repr (valueToZ asr # r0))
- (Integers.Ptrofs.of_int
- (Integers.Int.add (Integers.Int.mul (valueToInt asr # r1) (Integers.Int.repr z))
- (Integers.Int.repr z0)))
- =
- Integers.Ptrofs.repr
- (4 *
- Integers.Ptrofs.unsigned
- (Integers.Ptrofs.add (Integers.Ptrofs.repr (valueToZ asr # r0 / 4))
- (Integers.Ptrofs.of_int
- (Integers.Int.add
- (Integers.Int.mul (valueToInt asr # r1) (Integers.Int.repr (z / 4)))
- (Integers.Int.repr (z0 / 4))))))) as NORMALISE.
- etransitivity.
- 2: {
- replace (4 *
- Integers.Ptrofs.unsigned
- (Integers.Ptrofs.add (Integers.Ptrofs.repr (valueToZ asr # r0 / 4))
- (Integers.Ptrofs.of_int
- (Integers.Int.add
- (Integers.Int.mul (valueToInt asr # r1) (Integers.Int.repr (z / 4)))
- (Integers.Int.repr (z0 / 4)))))) with
- (Integers.Ptrofs.unsigned (Integers.Ptrofs.repr 4) *
- Integers.Ptrofs.unsigned
- (Integers.Ptrofs.add (Integers.Ptrofs.repr (valueToZ asr # r0 / 4))
- (Integers.Ptrofs.of_int
- (Integers.Int.add
- (Integers.Int.mul (valueToInt asr # r1) (Integers.Int.repr (z / 4)))
- (Integers.Int.repr (z0 / 4)))))).
- 2: { rewrite Integers.Ptrofs.unsigned_repr_eq.
- rewrite Z.mod_small. reflexivity.
- simplify. lia. }
-
- rewrite <- PtrofsExtra.mul_unsigned.
- rewrite Integers.Ptrofs.mul_add_distr_r.
- rewrite PtrofsExtra.mul_repr; simplify; try lia.
- rewrite ZLib.div_mul_undo; try lia.
- rewrite mul_of_int; simplify; try lia.
- rewrite Integers.Int.mul_add_distr_r.
- rewrite Integers.Int.mul_commut.
- rewrite Integers.Int.mul_assoc.
- rewrite IntExtra.mul_repr; simplify; try lia.
- rewrite IntExtra.mul_repr; simplify; try lia.
- rewrite ZLib.div_mul_undo; try lia.
- rewrite Z.mul_comm.
- rewrite ZLib.div_mul_undo; try lia.
- reflexivity.
-
- split.
- apply Z.div_le_lower_bound; lia.
- apply Z.div_le_upper_bound; lia.
-
- split.
- apply Z.div_le_lower_bound; lia.
- apply Z.div_le_upper_bound; lia.
-
- admit. (* FIXME: Register size 32 bits *) }
- reflexivity.
-
remember (Integers.Ptrofs.add (Integers.Ptrofs.repr (valueToZ asr # r0))
(Integers.Ptrofs.of_int
(Integers.Int.add (Integers.Int.mul (valueToInt asr # r1) (Integers.Int.repr z))
(Integers.Int.repr z0)))) as OFFSET.
(** Read bounds assumption *)
- assert (Integers.Ptrofs.unsigned OFFSET < f.(RTL.fn_stacksize)) as READ_BOUND_HIGH by admit.
+ assert (0 <= Integers.Ptrofs.signed OFFSET) as READ_BOUND_LOW by admit.
+ assert (Integers.Ptrofs.signed OFFSET < f.(RTL.fn_stacksize)) as READ_BOUND_HIGH by admit.
+
+ (** Modular Preservation proof *)
+ assert (Integers.Ptrofs.signed OFFSET mod 4 = 0) as MOD_PRESERVE by admit.
+
+ (** Normalisation proof *)
+ assert (Integers.Ptrofs.repr
+ (4 * Integers.Ptrofs.unsigned
+ (Integers.Ptrofs.divs OFFSET (Integers.Ptrofs.repr 4))) = OFFSET)
+ as NORMALISE.
+ { replace 4 with (Integers.Ptrofs.unsigned (Integers.Ptrofs.repr 4)) at 1 by reflexivity.
+ rewrite <- PtrofsExtra.mul_unsigned.
+ apply PtrofsExtra.mul_divs; auto.
+ rewrite Integers.Ptrofs.signed_repr; simplify; lia. }
+
+ (** Normalised bounds proof *)
+ assert (0 <=
+ Integers.Ptrofs.unsigned (Integers.Ptrofs.divs OFFSET (Integers.Ptrofs.repr 4))
+ < (RTL.fn_stacksize f / 4))
+ as NORMALISE_BOUND.
+ { split.
+ apply Integers.Ptrofs.unsigned_range_2.
+ assert (forall x y, Integers.Ptrofs.divs x y = Integers.Ptrofs.divs x y ) by reflexivity.
+ unfold Integers.Ptrofs.divs at 2 in H0.
+ rewrite H0. clear H0.
+ rewrite Integers.Ptrofs.unsigned_repr; simplify.
+ rewrite Integers.Ptrofs.signed_repr; simplify; try lia.
+ rewrite Z.quot_div_nonneg; try (rewrite <- HeqOFFSET; assumption); try lia.
+ apply Zmult_lt_reg_r with (p := 4); try lia.
+ repeat rewrite ZLib.div_mul_undo; try lia.
+ rewrite Integers.Ptrofs.signed_repr.
+ rewrite Z.quot_div_nonneg; try (rewrite <- HeqOFFSET; assumption); try lia.
+ split.
+ apply Z.div_pos; try (rewrite <- HeqOFFSET; assumption); try lia.
+ apply Z.div_le_upper_bound; simplify; try (rewrite <- HeqOFFSET; lia); try lia.
+ simplify; lia. }
+
+ inversion NORMALISE_BOUND as [ NORMALISE_BOUND_LOW NORMALISE_BOUND_HIGH ];
+ clear NORMALISE_BOUND.
(** Start of proof proper *)
eexists. split.
@@ -806,25 +781,17 @@ Section CORRECTNESS.
eapply Verilog.erun_Vbinop with (EQ := ?[EQ3]). (* FIXME: These will be shelved and cause sadness. *)
eapply Verilog.erun_Vbinop with (EQ := ?[EQ4]).
eapply Verilog.erun_Vbinop with (EQ := ?[EQ5]).
- econstructor.
- econstructor.
- econstructor.
- econstructor.
- econstructor.
- econstructor.
- econstructor.
+ econstructor. econstructor. econstructor. econstructor.
econstructor.
eapply Verilog.erun_Vbinop with (EQ := ?[EQ6]).
- econstructor.
- econstructor.
- econstructor.
- econstructor.
- econstructor.
- econstructor.
- econstructor. (* FIXME: Oh dear. *)
+ econstructor. econstructor. econstructor. econstructor.
+ econstructor. econstructor. econstructor. econstructor.
+ econstructor. econstructor.
+
+ all: simplify.
(** Verilog array lookup *)
- unfold Verilog.arr_assocmap_lookup. simplify. setoid_rewrite H3.
+ unfold Verilog.arr_assocmap_lookup. setoid_rewrite H5.
f_equal.
(** State Lookup *)
@@ -844,42 +811,33 @@ Section CORRECTNESS.
apply regs_lessdef_add_match.
(** Equality proof *)
- assert (Integers.Ptrofs.repr 0 = Integers.Ptrofs.zero) by reflexivity.
- rewrite H0 in H5. clear H0.
- setoid_rewrite Integers.Ptrofs.add_zero_l in H5.
-
- specialize (H5
- (Integers.Ptrofs.unsigned
- (Integers.Ptrofs.add (Integers.Ptrofs.repr (valueToZ asr # r0 / 4))
- (Integers.Ptrofs.of_int
- (Integers.Int.add (Integers.Int.mul
- (valueToInt asr # r1)
- (Integers.Int.repr (z / 4)))
- (Integers.Int.repr (z0 / 4))))))).
-
- exploit H5; auto; intros.
- rewrite Z2Nat.id.
- split.
- apply Integers.Ptrofs.unsigned_range_2.
- admit.
- apply Z_div_pos; lia.
- clear H5.
+ assert (Integers.Ptrofs.repr 0 = Integers.Ptrofs.zero) as ZERO by reflexivity.
+ rewrite ZERO in H7. clear ZERO.
+ setoid_rewrite Integers.Ptrofs.add_zero_l in H7.
+ specialize (H7 (Integers.Ptrofs.unsigned
+ (Integers.Ptrofs.divs
+ OFFSET
+ (Integers.Ptrofs.repr 4)))).
+
+ exploit H7.
+ rewrite Z2Nat.id; eauto.
+ apply Z.div_pos; lia.
+
+ intros I.
assert (Z.to_nat
- (Integers.Ptrofs.unsigned
- (Integers.Ptrofs.add (Integers.Ptrofs.repr (valueToZ asr # r0 / 4))
- (Integers.Ptrofs.of_int
- (Integers.Int.add
- (Integers.Int.mul (valueToInt asr # r1) (Integers.Int.repr (z / 4)))
- (Integers.Int.repr (z0 / 4))))))
- =
- valueToNat
- (vplus (vplus (vdivs asr # r0 (ZToValue 32 4) ?EQ5) (ZToValue 32 (z0 / 4)) ?EQ4)
- (vmul asr # r1 (ZToValue 32 (z / 4)) ?EQ6) ?EQ3)) as EXPR_OK by admit.
+ (Integers.Ptrofs.unsigned
+ (Integers.Ptrofs.divs
+ OFFSET
+ (Integers.Ptrofs.repr 4)))
+ = valueToNat
+ (vdivs (vplus (vplus asr # r0 (ZToValue 32 z0) ?EQ5)
+ (vmul asr # r1 (ZToValue 32 z) ?EQ6) ?EQ4) (ZToValue 32 4) ?EQ3))
+ as EXPR_OK by admit.
rewrite <- EXPR_OK.
- rewrite <- NORMALISE in H0.
- rewrite H1 in H0.
- invert H0. assumption.
+ rewrite NORMALISE in I.
+ rewrite H1 in I.
+ invert I. assumption.
(** PC match *)
apply regs_lessdef_add_greater.
@@ -904,7 +862,7 @@ Section CORRECTNESS.
2: { reflexivity. }
rewrite AssocMap.gss.
unfold Verilog.merge_arr.
- setoid_rewrite H3.
+ setoid_rewrite H5.
reflexivity.
rewrite combine_length.
@@ -923,25 +881,20 @@ Section CORRECTNESS.
(** RSBP preservation *)
unfold reg_stack_based_pointers. intros.
- destruct (Pos.eq_dec r2 dst); subst.
+ destruct (Pos.eq_dec r2 dst); try rewrite e. (* FIXME: Prepare this for automation *)
rewrite Registers.Regmap.gss.
unfold arr_stack_based_pointers in ASBP.
specialize (ASBP (Integers.Ptrofs.unsigned
- (Integers.Ptrofs.add (Integers.Ptrofs.repr (valueToZ asr # r0 / 4))
- (Integers.Ptrofs.of_int
- (Integers.Int.add
- (Integers.Int.mul (valueToInt asr # r1) (Integers.Int.repr (z / 4)))
- (Integers.Int.repr (z0 / 4))))))).
- exploit ASBP; auto; intros.
- admit.
-
- rewrite <- NORMALISE in H0.
- assert (Integers.Ptrofs.repr 0 = Integers.Ptrofs.zero) by reflexivity.
- rewrite H19 in H0. clear H19.
+ (Integers.Ptrofs.divs OFFSET (Integers.Ptrofs.repr 4)))).
+ exploit ASBP; auto; intros I.
+
+ rewrite NORMALISE in I.
+ assert (Integers.Ptrofs.repr 0 = Integers.Ptrofs.zero) as ZERO by reflexivity.
+ rewrite ZERO in I. clear ZERO.
simplify.
- rewrite Integers.Ptrofs.add_zero_l in H0.
- rewrite H1 in H0.
+ rewrite Integers.Ptrofs.add_zero_l in I.
+ rewrite H1 in I.
assumption.
simplify.
@@ -955,67 +908,124 @@ Section CORRECTNESS.
unfold check_address_parameter in EQ; simplify.
- (** Here we are assuming that any stack read will be within the bounds
- of the activation record. *)
- assert (Integers.Ptrofs.unsigned i0 < f.(RTL.fn_stacksize)) as READ_BOUND_HIGH by admit.
+ assert (Integers.Ptrofs.repr 0 = Integers.Ptrofs.zero) as ZERO by reflexivity.
+ rewrite ZERO in H1. clear ZERO.
+ rewrite Integers.Ptrofs.add_zero_l in H1.
+
+ remember i0 as OFFSET.
+ (** Read bounds assumption *)
+ assert (0 <= Integers.Ptrofs.signed OFFSET) as READ_BOUND_LOW by admit.
+ assert (Integers.Ptrofs.signed OFFSET < f.(RTL.fn_stacksize)) as READ_BOUND_HIGH by admit.
+
+ (** Modular Preservation proof *)
+ assert (Integers.Ptrofs.signed OFFSET mod 4 = 0) as MOD_PRESERVE by admit.
+
+ (** Normalisation proof *)
+ assert (Integers.Ptrofs.repr
+ (4 * Integers.Ptrofs.unsigned
+ (Integers.Ptrofs.divs OFFSET (Integers.Ptrofs.repr 4))) = OFFSET)
+ as NORMALISE.
+ { replace 4 with (Integers.Ptrofs.unsigned (Integers.Ptrofs.repr 4)) at 1 by reflexivity.
+ rewrite <- PtrofsExtra.mul_unsigned.
+ apply PtrofsExtra.mul_divs; auto.
+ rewrite Integers.Ptrofs.signed_repr; simplify; lia. }
+
+ (** Normalised bounds proof *)
+ assert (0 <=
+ Integers.Ptrofs.unsigned (Integers.Ptrofs.divs OFFSET (Integers.Ptrofs.repr 4))
+ < (RTL.fn_stacksize f / 4))
+ as NORMALISE_BOUND.
+ { split.
+ apply Integers.Ptrofs.unsigned_range_2.
+ assert (forall x y, Integers.Ptrofs.divs x y = Integers.Ptrofs.divs x y ) by reflexivity.
+ unfold Integers.Ptrofs.divs at 2 in H0.
+ rewrite H0. clear H0.
+ rewrite Integers.Ptrofs.unsigned_repr; simplify.
+ rewrite Integers.Ptrofs.signed_repr; simplify; try lia.
+ rewrite Z.quot_div_nonneg; try (rewrite <- HeqOFFSET; assumption); try lia.
+ apply Zmult_lt_reg_r with (p := 4); try lia.
+ repeat rewrite ZLib.div_mul_undo; try lia.
+ rewrite Integers.Ptrofs.signed_repr.
+ rewrite Z.quot_div_nonneg; try (rewrite <- HeqOFFSET; assumption); try lia.
+ split.
+ apply Z.div_pos; try (rewrite <- HeqOFFSET; assumption); try lia.
+ apply Z.div_le_upper_bound; simplify; try (rewrite <- HeqOFFSET; lia); try lia.
+ simplify; lia. }
+
+ inversion NORMALISE_BOUND as [ NORMALISE_BOUND_LOW NORMALISE_BOUND_HIGH ];
+ clear NORMALISE_BOUND.
+
+ (** Start of proof proper *)
eexists. split.
eapply Smallstep.plus_one.
eapply HTL.step_module; eauto.
econstructor. econstructor. econstructor. simplify.
econstructor. econstructor. econstructor. econstructor. simplify.
- unfold Verilog.arr_assocmap_lookup. setoid_rewrite H3.
+ all: simplify.
+
+ (** Verilog array lookup *)
+ unfold Verilog.arr_assocmap_lookup. setoid_rewrite H5.
f_equal.
- simplify. unfold Verilog.merge_regs.
+ (** State Lookup *)
+ unfold Verilog.merge_regs.
+ simplify.
unfold_merge.
rewrite AssocMap.gso.
apply AssocMap.gss.
apply st_greater_than_res.
- simplify. rewrite assumption_32bit.
- econstructor; simplify; eauto.
+ (** Match states *)
+ rewrite assumption_32bit.
+ econstructor; eauto.
- unfold Verilog.merge_regs. unfold_merge.
+ (** Match assocmaps *)
+ unfold Verilog.merge_regs. simplify. unfold_merge.
apply regs_lessdef_add_match.
(** Equality proof *)
- (* FIXME: 32-bit issue. *)
- assert (forall x, valueToNat (ZToValue 32 x) = Z.to_nat x) as VALUE_IDENTITY by admit.
- rewrite VALUE_IDENTITY.
- specialize (H5 (Integers.Ptrofs.unsigned i0 / 4)).
- rewrite Z2Nat.id in H5; try lia.
- exploit H5; auto; intros.
- 1: {
- split.
- - apply Z.div_pos; try lia.
- apply Integers.Ptrofs.unsigned_range_2.
- - apply Zmult_lt_reg_r with (p := 4); try lia.
- repeat rewrite ZLib.div_mul_undo; lia.
- }
- 2: {
- apply Z.div_pos; lia.
- }
- replace (4 * (Integers.Ptrofs.unsigned i0 / 4)) with (Integers.Ptrofs.unsigned i0) in H0.
- 2: {
- rewrite Z.mul_comm.
- rewrite ZLib.div_mul_undo; lia.
- }
- rewrite Integers.Ptrofs.repr_unsigned in H0.
- rewrite H1 in H0.
- invert H0. assumption.
+ assert (Integers.Ptrofs.repr 0 = Integers.Ptrofs.zero) as ZERO by reflexivity.
+ rewrite ZERO in H7. clear ZERO.
+ setoid_rewrite Integers.Ptrofs.add_zero_l in H7.
+
+ specialize (H7 (Integers.Ptrofs.unsigned
+ (Integers.Ptrofs.divs
+ OFFSET
+ (Integers.Ptrofs.repr 4)))).
+
+ exploit H7.
+ rewrite Z2Nat.id; eauto.
+ apply Z.div_pos; lia.
+
+ intros I.
+ assert (Z.to_nat
+ (Integers.Ptrofs.unsigned
+ (Integers.Ptrofs.divs
+ OFFSET
+ (Integers.Ptrofs.repr 4)))
+ =
+ valueToNat (ZToValue 32 (Integers.Ptrofs.unsigned OFFSET / 4)))
+ as EXPR_OK by admit.
+ rewrite <- EXPR_OK.
+ rewrite NORMALISE in I.
+ rewrite H1 in I.
+ invert I. assumption.
+ (** PC match *)
apply regs_lessdef_add_greater.
apply greater_than_max_func.
assumption.
+ (** States well formed *)
unfold state_st_wf. inversion 1. simplify.
unfold Verilog.merge_regs.
unfold_merge. rewrite AssocMap.gso.
apply AssocMap.gss.
apply st_greater_than_res.
+ (** Match arrays *)
econstructor.
repeat split; simplify.
unfold HTL.empty_stack.
@@ -1026,7 +1036,7 @@ Section CORRECTNESS.
2: { reflexivity. }
rewrite AssocMap.gss.
unfold Verilog.merge_arr.
- setoid_rewrite H3.
+ setoid_rewrite H5.
reflexivity.
rewrite combine_length.
@@ -1043,30 +1053,24 @@ Section CORRECTNESS.
eauto. apply combine_none.
assumption.
+ (** RSBP preservation *)
unfold reg_stack_based_pointers. intros.
- destruct (Pos.eq_dec r0 dst); subst.
+ destruct (Pos.eq_dec r0 dst); try rewrite e. (* FIXME: Prepare this for automation *)
rewrite Registers.Regmap.gss.
unfold arr_stack_based_pointers in ASBP.
- specialize (ASBP (Integers.Ptrofs.unsigned i0 / 4)).
- exploit ASBP; auto; intros.
- 1: {
- split.
- - apply Z.div_pos; try lia.
- apply Integers.Ptrofs.unsigned_range_2.
- - apply Zmult_lt_reg_r with (p := 4); try lia.
- repeat rewrite ZLib.div_mul_undo; lia.
- }
- replace (4 * (Integers.Ptrofs.unsigned i0 / 4)) with (Integers.Ptrofs.unsigned i0) in H0.
- 2: {
- rewrite Z.mul_comm.
- rewrite ZLib.div_mul_undo; lia.
- }
- rewrite Integers.Ptrofs.repr_unsigned in H0.
- simplify.
- rewrite H1 in H0.
+ specialize (ASBP (Integers.Ptrofs.unsigned
+ (Integers.Ptrofs.divs OFFSET (Integers.Ptrofs.repr 4)))).
+ exploit ASBP; auto; intros I.
+
+ rewrite NORMALISE in I.
+ assert (Integers.Ptrofs.repr 0 = Integers.Ptrofs.zero) as ZERO by reflexivity.
+ rewrite ZERO in I. clear ZERO.
simplify.
+ rewrite Integers.Ptrofs.add_zero_l in I.
+ rewrite H1 in I.
assumption.
+ simplify.
rewrite Registers.Regmap.gso; auto.
diff --git a/src/translation/HTLgenspec.v b/src/translation/HTLgenspec.v
index 528c662..42de96b 100644
--- a/src/translation/HTLgenspec.v
+++ b/src/translation/HTLgenspec.v
@@ -189,7 +189,7 @@ Inductive tr_module (f : RTL.function) : module -> Prop :=
tr_code f.(RTL.fn_code) pc i data control fin rtrn st stk) ->
stk_len = Z.to_nat (f.(RTL.fn_stacksize) / 4) ->
Z.modulo (f.(RTL.fn_stacksize)) 4 = 0 ->
- 0 <= f.(RTL.fn_stacksize) ->
+ 0 <= f.(RTL.fn_stacksize) < Integers.Ptrofs.modulus ->
m = (mkmodule f.(RTL.fn_params)
data
control
@@ -454,7 +454,8 @@ Proof.
unfold transf_module in *.
unfold stack_correct in *.
- destruct (0 <=? RTL.fn_stacksize f) eqn:STACK_BOUND;
+ destruct (0 <=? RTL.fn_stacksize f) eqn:STACK_BOUND_LOW;
+ destruct (RTL.fn_stacksize f <? Integers.Ptrofs.modulus) eqn:STACK_BOUND_HIGH;
destruct (RTL.fn_stacksize f mod 4 =? 0) eqn:STACK_ALIGN;
simplify;
monadInv Heqr.
@@ -463,7 +464,7 @@ Proof.
pose proof (create_arr_inv _ _ _ _ _ _ _ _ EQ0) as STK_LEN.
rewrite <- STK_LEN.
- econstructor; simpl; trivial.
+ econstructor; simpl; auto.
intros.
inv_incr.
assert (EQ3D := EQ3).