aboutsummaryrefslogtreecommitdiffstats
path: root/lib/Integers.v
diff options
context:
space:
mode:
Diffstat (limited to 'lib/Integers.v')
-rw-r--r--lib/Integers.v116
1 files changed, 114 insertions, 2 deletions
diff --git a/lib/Integers.v b/lib/Integers.v
index 2addc78b..b6c41d8d 100644
--- a/lib/Integers.v
+++ b/lib/Integers.v
@@ -2851,7 +2851,7 @@ Qed.
Corollary sign_ext_shr_shl:
forall n x,
- 0 < n < zwordsize ->
+ 0 < n <= zwordsize ->
let y := repr (zwordsize - n) in
sign_ext n x = shr (shl x y) y.
Proof.
@@ -2886,7 +2886,7 @@ Qed.
Lemma sign_ext_range:
forall n x, 0 < n < zwordsize -> -two_p (n-1) <= signed (sign_ext n x) < two_p (n-1).
Proof.
- intros. rewrite sign_ext_shr_shl; auto.
+ intros. rewrite sign_ext_shr_shl by lia.
set (X := shl x (repr (zwordsize - n))).
assert (two_p (n - 1) > 0) by (apply two_p_gt_ZERO; lia).
assert (unsigned (repr (zwordsize - n)) = zwordsize - n).
@@ -3474,6 +3474,118 @@ Proof.
lia.
Qed.
+(** ** Accessing bit fields *)
+
+Definition unsigned_bitfield_extract (pos width: Z) (n: int) : int :=
+ zero_ext width (shru n (repr pos)).
+
+Definition signed_bitfield_extract (pos width: Z) (n: int) : int :=
+ sign_ext width (shru n (repr pos)).
+
+Definition bitfield_insert (pos width: Z) (n p: int) : int :=
+ let mask := shl (repr (two_p width - 1)) (repr pos) in
+ or (shl (zero_ext width p) (repr pos))
+ (and n (not mask)).
+
+Lemma bits_unsigned_bitfield_extract:
+ forall pos width n i,
+ 0 <= pos -> 0 < width -> pos + width <= zwordsize ->
+ 0 <= i < zwordsize ->
+ testbit (unsigned_bitfield_extract pos width n) i =
+ if zlt i width then testbit n (i + pos) else false.
+Proof.
+ intros. unfold unsigned_bitfield_extract. rewrite bits_zero_ext by lia.
+ destruct (zlt i width); auto.
+ rewrite bits_shru by auto. rewrite unsigned_repr, zlt_true. auto.
+ lia.
+ generalize wordsize_max_unsigned; lia.
+Qed.
+
+Lemma bits_signed_bitfield_extract:
+ forall pos width n i,
+ 0 <= pos -> 0 < width -> pos + width <= zwordsize ->
+ 0 <= i < zwordsize ->
+ testbit (signed_bitfield_extract pos width n) i =
+ testbit n (if zlt i width then i + pos else width - 1 + pos).
+Proof.
+ intros. unfold signed_bitfield_extract. rewrite bits_sign_ext by lia.
+ rewrite bits_shru, unsigned_repr, zlt_true.
+ destruct (zlt i width); auto.
+ destruct (zlt i width); lia.
+ generalize wordsize_max_unsigned; lia.
+ destruct (zlt i width); lia.
+Qed.
+
+Lemma bits_bitfield_insert:
+ forall pos width n p i,
+ 0 <= pos -> 0 < width -> pos + width <= zwordsize ->
+ 0 <= i < zwordsize ->
+ testbit (bitfield_insert pos width n p) i =
+ if zle pos i && zlt i (pos + width) then testbit p (i - pos) else testbit n i.
+Proof.
+ intros. unfold bitfield_insert.
+ assert (P: unsigned (repr pos) = pos).
+ { apply unsigned_repr. generalize wordsize_max_unsigned; lia. }
+ rewrite bits_or, bits_and, bits_not, ! bits_shl, ! P by auto.
+ destruct (zlt i pos).
+- unfold proj_sumbool; rewrite zle_false by lia. cbn. apply andb_true_r.
+- unfold proj_sumbool; rewrite zle_true by lia; cbn.
+ rewrite bits_zero_ext, testbit_repr, Ztestbit_two_p_m1 by lia.
+ destruct (zlt (i - pos) width); cbn.
++ rewrite zlt_true by lia. rewrite andb_false_r, orb_false_r. auto.
++ rewrite zlt_false by lia. apply andb_true_r.
+Qed.
+
+Lemma unsigned_bitfield_extract_by_shifts:
+ forall pos width n,
+ 0 <= pos -> 0 < width -> pos + width <= zwordsize ->
+ unsigned_bitfield_extract pos width n =
+ shru (shl n (repr (zwordsize - pos - width))) (repr (zwordsize - width)).
+Proof.
+ intros. apply same_bits_eq; intros.
+ rewrite bits_unsigned_bitfield_extract by lia.
+ rewrite bits_shru by auto.
+ rewrite unsigned_repr by (generalize wordsize_max_unsigned; lia).
+ destruct (zlt i width).
+- rewrite bits_shl by lia.
+ rewrite unsigned_repr by (generalize wordsize_max_unsigned; lia).
+ rewrite zlt_true by lia. rewrite zlt_false by lia. f_equal; lia.
+- rewrite zlt_false by lia. auto.
+Qed.
+
+Lemma signed_bitfield_extract_by_shifts:
+ forall pos width n,
+ 0 <= pos -> 0 < width -> pos + width <= zwordsize ->
+ signed_bitfield_extract pos width n =
+ shr (shl n (repr (zwordsize - pos - width))) (repr (zwordsize - width)).
+Proof.
+ intros. apply same_bits_eq; intros.
+ rewrite bits_signed_bitfield_extract by lia.
+ rewrite bits_shr by auto.
+ rewrite unsigned_repr by (generalize wordsize_max_unsigned; lia).
+ rewrite bits_shl.
+ rewrite unsigned_repr by (generalize wordsize_max_unsigned; lia).
+ symmetry. rewrite zlt_false. f_equal.
+ destruct (zlt i width); [rewrite zlt_true | rewrite zlt_false]; lia.
+ destruct zlt; lia.
+ destruct zlt; lia.
+Qed.
+
+Lemma bitfield_insert_alternative:
+ forall pos width n p,
+ 0 <= width ->
+ bitfield_insert pos width n p =
+ let mask := shl (repr (two_p width - 1)) (repr pos) in
+ or (and (shl p (repr pos)) mask)
+ (and n (not mask)).
+Proof.
+ intros. unfold bitfield_insert.
+ set (m1 := repr (two_p width - 1)).
+ set (m2 := shl m1 (repr pos)).
+ f_equal.
+ rewrite zero_ext_and by lia. fold m1. unfold m2. rewrite <- and_shl. auto.
+Qed.
+
End Make.
(** * Specialization to integers of size 8, 32, and 64 bits *)