aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorLéo Gourdin <leo.gourdin@univ-grenoble-alpes.fr>2020-11-25 17:23:12 +0100
committerLéo Gourdin <leo.gourdin@univ-grenoble-alpes.fr>2020-11-25 17:23:12 +0100
commit713d5663a6c7b75eee9a9ae57cfecf8332e72be4 (patch)
treef8edb58f1f9c92482b548dd320fab32324e7563e
parentfbe134640de11d69cef417e2e59e2e2669ccc7ad (diff)
downloadcompcert-kvx-713d5663a6c7b75eee9a9ae57cfecf8332e72be4.tar.gz
compcert-kvx-713d5663a6c7b75eee9a9ae57cfecf8332e72be4.zip
Removing OrigAsmgen by moving the necessary functions in Asmgen.v
-rw-r--r--aarch64/Asmexpand.ml6
-rw-r--r--aarch64/Asmgen.v77
-rw-r--r--aarch64/OrigAsmgen.ml282
-rw-r--r--extraction/extraction.v4
4 files changed, 83 insertions, 286 deletions
diff --git a/aarch64/Asmexpand.ml b/aarch64/Asmexpand.ml
index 0cee763e..c2519e6a 100644
--- a/aarch64/Asmexpand.ml
+++ b/aarch64/Asmexpand.ml
@@ -34,13 +34,13 @@ let _m1 = Z.of_sint (-1)
(* Emit instruction sequences that set or offset a register by a constant. *)
let expand_loadimm32 (dst: ireg) n =
- List.iter emit (OrigAsmgen.loadimm32 dst n [])
+ List.iter emit (Asmgen.Asmgen_expand.loadimm32 dst n [])
let expand_addimm64 (dst: iregsp) (src: iregsp) n =
- List.iter emit (OrigAsmgen.addimm64 dst src n [])
+ List.iter emit (Asmgen.Asmgen_expand.addimm64 dst src n [])
let expand_storeptr (src: ireg) (base: iregsp) ofs =
- List.iter emit (OrigAsmgen.storeptr src base ofs [])
+ List.iter emit (Asmgen.Asmgen_expand.storeptr src base ofs [])
(* Handling of varargs *)
diff --git a/aarch64/Asmgen.v b/aarch64/Asmgen.v
index 94db0e22..78c3f156 100644
--- a/aarch64/Asmgen.v
+++ b/aarch64/Asmgen.v
@@ -23,6 +23,83 @@ Require Import Mach Asm Asmblock Asmblockgen Machblockgen PseudoAsmblock PseudoA
Local Open Scope error_monad_scope.
+(** Functions called by the Asmexpand ocaml file, inspired and adapted from Asmblockgen.v *)
+
+Module Asmgen_expand.
+
+(* Load immediate *)
+
+Definition loadimm_k (sz: isize) (rd: ireg) (l: list (Z * Z)) (k: code) : code :=
+ List.fold_right (fun np k => Asm.Pmovk sz rd (fst np) (snd np) :: k) k l.
+
+Definition loadimm_z (sz: isize) (rd: ireg) (l: list (Z * Z)) (k: code) : code :=
+ match l with
+ | nil => Asm.Pmovz sz rd 0 0 :: k
+ | (n1, p1) :: l => Asm.Pmovz sz rd n1 p1 :: loadimm_k sz rd l k
+ end.
+
+Definition loadimm_n (sz: isize) (rd: ireg) (l: list (Z * Z)) (k: code) : code :=
+ match l with
+ | nil => Asm.Pmovn sz rd 0 0 :: k
+ | (n1, p1) :: l => Asm.Pmovn sz rd n1 p1 :: loadimm_k sz rd (negate_decomposition l) k
+ end.
+
+Definition loadimm (sz: isize) (rd: ireg) (n: Z) (k: code) : code :=
+ let N := match sz with W => 2%nat | X => 4%nat end in
+ let dz := decompose_int N n 0 in
+ let dn := decompose_int N (Z.lnot n) 0 in
+ if Nat.leb (List.length dz) (List.length dn)
+ then loadimm_z sz rd dz k
+ else loadimm_n sz rd dn k.
+
+Definition loadimm32 (rd: ireg) (n: int) (k: code) : code :=
+ if is_logical_imm32 n
+ then Asm.Porrimm W rd XZR (Int.unsigned n) :: k
+ else loadimm W rd (Int.unsigned n) k.
+
+Definition loadimm64 (rd: ireg) (n: int64) (k: code) : code :=
+ if is_logical_imm64 n
+ then Asm.Porrimm X rd XZR (Int64.unsigned n) :: k
+ else loadimm X rd (Int64.unsigned n) k.
+
+(* Add immediate *)
+
+Definition addimm_aux (insn: iregsp -> iregsp -> Z -> instruction)
+ (rd r1: iregsp) (n: Z) (k: code) :=
+ let nlo := Zzero_ext 12 n in
+ let nhi := n - nlo in
+ if Z.eqb nhi 0 then
+ insn rd r1 nlo :: k
+ else if Z.eqb nlo 0 then
+ insn rd r1 nhi :: k
+ else
+ insn rd r1 nhi :: insn rd rd nlo :: k.
+
+Definition addimm64 (rd r1: iregsp) (n: int64) (k: code) : code :=
+ let m := Int64.neg n in
+ if Int64.eq n (Int64.zero_ext 24 n) then
+ addimm_aux (Asm.Paddimm X) rd r1 (Int64.unsigned n) k
+ else if Int64.eq m (Int64.zero_ext 24 m) then
+ addimm_aux (Asm.Psubimm X) rd r1 (Int64.unsigned m) k
+ else if Int64.lt n Int64.zero then
+ loadimm64 X16 m (Asm.Psubext rd r1 X16 (EOuxtx Int.zero) :: k)
+ else
+ loadimm64 X16 n (Asm.Paddext rd r1 X16 (EOuxtx Int.zero) :: k).
+
+(** Register-indexed stores *)
+
+Definition indexed_memory_access (insn: Asm.addressing -> instruction)
+ (sz: Z) (base: iregsp) (ofs: ptrofs) (k: code) :=
+ let ofs := Ptrofs.to_int64 ofs in
+ if offset_representable sz ofs
+ then insn (ADimm base ofs) :: k
+ else loadimm64 X16 ofs (insn (ADreg base X16) :: k).
+
+Definition storeptr (src: ireg) (base: iregsp) (ofs: ptrofs) (k: code) :=
+ indexed_memory_access (Asm.Pstrx src) 8 base ofs k.
+
+End Asmgen_expand.
+
(** * Translation from Asmblock to assembly language
Inspired from the KVX backend (see kvx/Asm.v and kvx/Asmgen.v) *)
diff --git a/aarch64/OrigAsmgen.ml b/aarch64/OrigAsmgen.ml
deleted file mode 100644
index 1bbcf085..00000000
--- a/aarch64/OrigAsmgen.ml
+++ /dev/null
@@ -1,282 +0,0 @@
-(** File (more or less) extracted from the official aarch64/Asmgen.v of CompCert
-
-TODO: prune some definition in these files by reusing the ones of Asmblockgen that are identical...
-e.g. all those which do not involve Asm.code
-
-**)
-
-open Asm
-open BinInt
-open BinNums
-open Integers
-open List0
-open Zbits
-
-module I = Integers.Int
-module I64 = Integers.Int64
-module N = PeanoNat.Nat
-
-let s_ x = Datatypes.S x
-let o_ = Datatypes.O
-
-module Automaton =
- struct
- type state =
- | SA
- | SB
- | SC
- | SD
- | SE
- | SF
- | SG
- | Sbad
-
- (** val start : state **)
-
- let start =
- SA
-
- (** val next : state -> bool -> state **)
-
- let next s b =
- match s with
- | SA -> if b then SC else SB
- | SB -> if b then SD else SB
- | SC -> if b then SC else SE
- | SD -> if b then SD else SF
- | SE -> if b then SG else SE
- | SF -> if b then Sbad else SF
- | SG -> if b then SG else Sbad
- | Sbad -> Sbad
-
- (** val accepting : state -> bool **)
-
- let accepting = function
- | Sbad -> false
- | _ -> true
-
- (** val run : nat -> state -> coq_Z -> bool **)
-
- let rec run len s x =
- match len with
- | Datatypes.O -> accepting s
- | Datatypes.S len0 -> run len0 (next s (Z.odd x)) (Z.div2 x)
- end
-
-(** val logical_imm_length : coq_Z -> bool -> nat **)
-
-let logical_imm_length x sixtyfour =
- let test = fun n ->
- Z.eqb (coq_Zzero_ext n x) (coq_Zzero_ext n (Z.shiftr x n))
- in
- if (&&) sixtyfour
- (Datatypes.negb
- (test (Zpos (Coq_xO (Coq_xO (Coq_xO (Coq_xO (Coq_xO Coq_xH))))))))
- then s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_
- (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_
- (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_
- o_)))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))
- else if Datatypes.negb (test (Zpos (Coq_xO (Coq_xO (Coq_xO (Coq_xO Coq_xH))))))
- then s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_
- (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ o_)))))))))))))))))))))))))))))))
- else if Datatypes.negb (test (Zpos (Coq_xO (Coq_xO (Coq_xO Coq_xH)))))
- then s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_
- o_)))))))))))))))
- else if Datatypes.negb (test (Zpos (Coq_xO (Coq_xO Coq_xH))))
- then s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ o_)))))))
- else if Datatypes.negb (test (Zpos (Coq_xO Coq_xH)))
- then s_ (s_ (s_ (s_ o_)))
- else s_ (s_ o_)
-
-(** val is_logical_imm32 : I.int -> bool **)
-
-let is_logical_imm32 x =
- (&&) ((&&) (Datatypes.negb (I.eq x I.zero)) (Datatypes.negb (I.eq x I.mone)))
- (Automaton.run (logical_imm_length (I.unsigned x) false)
- Automaton.start (I.unsigned x))
-
-(** val is_logical_imm64 : I64.int -> bool **)
-
-let is_logical_imm64 x =
- (&&) ((&&) (Datatypes.negb (I64.eq x I64.zero)) (Datatypes.negb (I64.eq x I64.mone)))
- (Automaton.run (logical_imm_length (I64.unsigned x) true)
- Automaton.start (I64.unsigned x))
-
-(** val is_arith_imm32 : I.int -> bool **)
-
-let is_arith_imm32 x =
- (||) (I.eq x (I.zero_ext (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH)))) x))
- (I.eq x
- (I.shl
- (I.zero_ext (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH))))
- (I.shru x (I.repr (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH)))))))
- (I.repr (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH)))))))
-
-(** val is_arith_imm64 : I64.int -> bool **)
-
-let is_arith_imm64 x =
- (||)
- (I64.eq x (I64.zero_ext (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH)))) x))
- (I64.eq x
- (I64.shl
- (I64.zero_ext (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH))))
- (I64.shru x (I64.repr (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH)))))))
- (I64.repr (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH)))))))
-
-(** val decompose_int : nat -> coq_Z -> coq_Z -> (coq_Z * coq_Z) list **)
-
-let rec decompose_int n n0 p =
- match n with
- | Datatypes.O -> []
- | Datatypes.S n1 ->
- let frag =
- coq_Zzero_ext (Zpos (Coq_xO (Coq_xO (Coq_xO (Coq_xO Coq_xH)))))
- (Z.shiftr n0 p)
- in
- if Z.eqb frag Z0
- then decompose_int n1 n0
- (Z.add p (Zpos (Coq_xO (Coq_xO (Coq_xO (Coq_xO Coq_xH))))))
- else (frag,
- p) :: (decompose_int n1
- (Z.ldiff n0
- (Z.shiftl (Zpos (Coq_xI (Coq_xI (Coq_xI (Coq_xI (Coq_xI
- (Coq_xI (Coq_xI (Coq_xI (Coq_xI (Coq_xI (Coq_xI
- (Coq_xI (Coq_xI (Coq_xI (Coq_xI Coq_xH))))))))))))))))
- p))
- (Z.add p (Zpos (Coq_xO (Coq_xO (Coq_xO (Coq_xO Coq_xH)))))))
-
-(** val negate_decomposition :
- (coq_Z * coq_Z) list -> (coq_Z * coq_Z) list **)
-
-let negate_decomposition l =
- map (fun np ->
- ((Z.coq_lxor (fst np) (Zpos (Coq_xI (Coq_xI (Coq_xI (Coq_xI (Coq_xI
- (Coq_xI (Coq_xI (Coq_xI (Coq_xI (Coq_xI (Coq_xI (Coq_xI (Coq_xI
- (Coq_xI (Coq_xI Coq_xH))))))))))))))))), (snd np))) l
-
-(** val loadimm_k :
- isize -> ireg -> (coq_Z * coq_Z) list -> Asm.code -> Asm.code **)
-
-let loadimm_k sz rd l k =
- fold_right (fun np k0 -> (Pmovk (sz, rd, (fst np), (snd np))) :: k0) k l
-
-(** val loadimm_z :
- isize -> ireg -> (coq_Z * coq_Z) list -> Asm.code -> Asm.code **)
-
-let loadimm_z sz rd l k =
- match l with
- | [] -> (Pmovz (sz, rd, Z0, Z0)) :: k
- | p :: l0 ->
- let (n1, p1) = p in (Pmovz (sz, rd, n1, p1)) :: (loadimm_k sz rd l0 k)
-
-(** val loadimm_n :
- isize -> ireg -> (coq_Z * coq_Z) list -> Asm.code -> Asm.code **)
-
-let loadimm_n sz rd l k =
- match l with
- | [] -> (Pmovn (sz, rd, Z0, Z0)) :: k
- | p :: l0 ->
- let (n1, p1) = p in
- (Pmovn (sz, rd, n1, p1)) :: (loadimm_k sz rd (negate_decomposition l0) k)
-
-(** val loadimm : isize -> ireg -> coq_Z -> Asm.code -> Asm.code **)
-
-let loadimm sz rd n k =
- let n0 = match sz with
- | W -> s_ (s_ o_)
- | X -> s_ (s_ (s_ (s_ o_))) in
- let dz = decompose_int n0 n Z0 in
- let dn = decompose_int n0 (Z.lnot n) Z0 in
- if N.leb (Datatypes.length dz) (Datatypes.length dn)
- then loadimm_z sz rd dz k
- else loadimm_n sz rd dn k
-
-(** val loadimm32 : ireg -> I.int -> Asm.code -> Asm.code **)
-
-let loadimm32 rd n k =
- if is_logical_imm32 n
- then (Porrimm (W, rd, XZR, (I.unsigned n))) :: k
- else loadimm W rd (I.unsigned n) k
-
-(** val loadimm64 : ireg -> I64.int -> Asm.code -> Asm.code **)
-
-let loadimm64 rd n k =
- if is_logical_imm64 n
- then (Porrimm (X, rd, XZR, (I64.unsigned n))) :: k
- else loadimm X rd (I64.unsigned n) k
-
-(** val addimm_aux :
- (iregsp -> iregsp -> coq_Z -> Asm.instruction) -> iregsp -> iregsp ->
- coq_Z -> Asm.code -> Asm.instruction list **)
-
-let addimm_aux insn rd r1 n k =
- let nlo = coq_Zzero_ext (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH)))) n in
- let nhi = Z.sub n nlo in
- if Z.eqb nhi Z0
- then (insn rd r1 nlo) :: k
- else if Z.eqb nlo Z0
- then (insn rd r1 nhi) :: k
- else (insn rd r1 nhi) :: ((insn rd rd nlo) :: k)
-
-(** val addimm32 : ireg -> ireg -> I.int -> Asm.code -> Asm.code **)
-
-let addimm32 rd r1 n k =
- let m = I.neg n in
- if I.eq n
- (I.zero_ext (Zpos (Coq_xO (Coq_xO (Coq_xO (Coq_xI Coq_xH))))) n)
- then addimm_aux (fun x x0 x1 -> Paddimm (W, x, x0, x1)) (RR1 rd) (RR1 r1)
- (I.unsigned n) k
- else if I.eq m
- (I.zero_ext (Zpos (Coq_xO (Coq_xO (Coq_xO (Coq_xI Coq_xH))))) m)
- then addimm_aux (fun x x0 x1 -> Psubimm (W, x, x0, x1)) (RR1 rd) (RR1
- r1) (I.unsigned m) k
- else if I.lt n I.zero
- then loadimm32 X16 m ((Psub (W, rd, (RR0 r1), X16, SOnone)) :: k)
- else loadimm32 X16 n ((Padd (W, rd, (RR0 r1), X16, SOnone)) :: k)
-
-(** val addimm64 : iregsp -> iregsp -> I64.int -> Asm.code -> Asm.code **)
-
-let addimm64 rd r1 n k =
- let m = I64.neg n in
- if I64.eq n
- (I64.zero_ext (Zpos (Coq_xO (Coq_xO (Coq_xO (Coq_xI Coq_xH))))) n)
- then addimm_aux (fun x x0 x1 -> Paddimm (X, x, x0, x1)) rd r1
- (I64.unsigned n) k
- else if I64.eq m
- (I64.zero_ext (Zpos (Coq_xO (Coq_xO (Coq_xO (Coq_xI Coq_xH)))))
- m)
- then addimm_aux (fun x x0 x1 -> Psubimm (X, x, x0, x1)) rd r1
- (I64.unsigned m) k
- else if I64.lt n I64.zero
- then loadimm64 X16 m ((Psubext (rd, r1, X16, (EOuxtx
- I.zero))) :: k)
- else loadimm64 X16 n ((Paddext (rd, r1, X16, (EOuxtx
- I.zero))) :: k)
-
-(** val offset_representable : coq_Z -> I64.int -> bool **)
-
-let offset_representable sz ofs =
- let isz = I64.repr sz in
- (||)
- (I64.eq ofs
- (I64.sign_ext (Zpos (Coq_xI (Coq_xO (Coq_xO Coq_xH)))) ofs))
- ((&&) (I64.eq (I64.modu ofs isz) I64.zero)
- (I64.ltu ofs
- (I64.shl isz (I64.repr (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH))))))))
-
-(** val indexed_memory_access :
- (Asm.addressing -> Asm.instruction) -> coq_Z -> iregsp -> Ptrofs.int ->
- Asm.code -> Asm.instruction list **)
-
-let indexed_memory_access insn sz base ofs k =
- let ofs0 = Ptrofs.to_int64 ofs in
- if offset_representable sz ofs0
- then (insn (ADimm (base, ofs0))) :: k
- else loadimm64 X16 ofs0 ((insn (ADreg (base, X16))) :: k)
-
-(** val storeptr :
- ireg -> iregsp -> Ptrofs.int -> Asm.code -> Asm.instruction list **)
-
-let storeptr src base ofs k =
- indexed_memory_access (fun x -> Pstrx (src, x)) (Zpos (Coq_xO (Coq_xO
- (Coq_xO Coq_xH)))) base ofs k
diff --git a/extraction/extraction.v b/extraction/extraction.v
index f5b8291b..ffcebaaa 100644
--- a/extraction/extraction.v
+++ b/extraction/extraction.v
@@ -247,4 +247,6 @@ Separate Extraction
Globalenvs.Senv.invert_symbol
Parser.translation_unit_file
Compopts.optim_postpass
- Archi.has_notrap_loads.
+ Archi.has_notrap_loads
+ Asmgen.Asmgen_expand.loadimm32 Asmgen.Asmgen_expand.addimm64
+ Asmgen.Asmgen_expand.storeptr.