aboutsummaryrefslogtreecommitdiffstats
path: root/aarch64/Asmgen.v
diff options
context:
space:
mode:
authorLéo Gourdin <leo.gourdin@univ-grenoble-alpes.fr>2020-11-25 17:23:12 +0100
committerLéo Gourdin <leo.gourdin@univ-grenoble-alpes.fr>2020-11-25 17:23:12 +0100
commit713d5663a6c7b75eee9a9ae57cfecf8332e72be4 (patch)
treef8edb58f1f9c92482b548dd320fab32324e7563e /aarch64/Asmgen.v
parentfbe134640de11d69cef417e2e59e2e2669ccc7ad (diff)
downloadcompcert-kvx-713d5663a6c7b75eee9a9ae57cfecf8332e72be4.tar.gz
compcert-kvx-713d5663a6c7b75eee9a9ae57cfecf8332e72be4.zip
Removing OrigAsmgen by moving the necessary functions in Asmgen.v
Diffstat (limited to 'aarch64/Asmgen.v')
-rw-r--r--aarch64/Asmgen.v77
1 files changed, 77 insertions, 0 deletions
diff --git a/aarch64/Asmgen.v b/aarch64/Asmgen.v
index 94db0e22..78c3f156 100644
--- a/aarch64/Asmgen.v
+++ b/aarch64/Asmgen.v
@@ -23,6 +23,83 @@ Require Import Mach Asm Asmblock Asmblockgen Machblockgen PseudoAsmblock PseudoA
Local Open Scope error_monad_scope.
+(** Functions called by the Asmexpand ocaml file, inspired and adapted from Asmblockgen.v *)
+
+Module Asmgen_expand.
+
+(* Load immediate *)
+
+Definition loadimm_k (sz: isize) (rd: ireg) (l: list (Z * Z)) (k: code) : code :=
+ List.fold_right (fun np k => Asm.Pmovk sz rd (fst np) (snd np) :: k) k l.
+
+Definition loadimm_z (sz: isize) (rd: ireg) (l: list (Z * Z)) (k: code) : code :=
+ match l with
+ | nil => Asm.Pmovz sz rd 0 0 :: k
+ | (n1, p1) :: l => Asm.Pmovz sz rd n1 p1 :: loadimm_k sz rd l k
+ end.
+
+Definition loadimm_n (sz: isize) (rd: ireg) (l: list (Z * Z)) (k: code) : code :=
+ match l with
+ | nil => Asm.Pmovn sz rd 0 0 :: k
+ | (n1, p1) :: l => Asm.Pmovn sz rd n1 p1 :: loadimm_k sz rd (negate_decomposition l) k
+ end.
+
+Definition loadimm (sz: isize) (rd: ireg) (n: Z) (k: code) : code :=
+ let N := match sz with W => 2%nat | X => 4%nat end in
+ let dz := decompose_int N n 0 in
+ let dn := decompose_int N (Z.lnot n) 0 in
+ if Nat.leb (List.length dz) (List.length dn)
+ then loadimm_z sz rd dz k
+ else loadimm_n sz rd dn k.
+
+Definition loadimm32 (rd: ireg) (n: int) (k: code) : code :=
+ if is_logical_imm32 n
+ then Asm.Porrimm W rd XZR (Int.unsigned n) :: k
+ else loadimm W rd (Int.unsigned n) k.
+
+Definition loadimm64 (rd: ireg) (n: int64) (k: code) : code :=
+ if is_logical_imm64 n
+ then Asm.Porrimm X rd XZR (Int64.unsigned n) :: k
+ else loadimm X rd (Int64.unsigned n) k.
+
+(* Add immediate *)
+
+Definition addimm_aux (insn: iregsp -> iregsp -> Z -> instruction)
+ (rd r1: iregsp) (n: Z) (k: code) :=
+ let nlo := Zzero_ext 12 n in
+ let nhi := n - nlo in
+ if Z.eqb nhi 0 then
+ insn rd r1 nlo :: k
+ else if Z.eqb nlo 0 then
+ insn rd r1 nhi :: k
+ else
+ insn rd r1 nhi :: insn rd rd nlo :: k.
+
+Definition addimm64 (rd r1: iregsp) (n: int64) (k: code) : code :=
+ let m := Int64.neg n in
+ if Int64.eq n (Int64.zero_ext 24 n) then
+ addimm_aux (Asm.Paddimm X) rd r1 (Int64.unsigned n) k
+ else if Int64.eq m (Int64.zero_ext 24 m) then
+ addimm_aux (Asm.Psubimm X) rd r1 (Int64.unsigned m) k
+ else if Int64.lt n Int64.zero then
+ loadimm64 X16 m (Asm.Psubext rd r1 X16 (EOuxtx Int.zero) :: k)
+ else
+ loadimm64 X16 n (Asm.Paddext rd r1 X16 (EOuxtx Int.zero) :: k).
+
+(** Register-indexed stores *)
+
+Definition indexed_memory_access (insn: Asm.addressing -> instruction)
+ (sz: Z) (base: iregsp) (ofs: ptrofs) (k: code) :=
+ let ofs := Ptrofs.to_int64 ofs in
+ if offset_representable sz ofs
+ then insn (ADimm base ofs) :: k
+ else loadimm64 X16 ofs (insn (ADreg base X16) :: k).
+
+Definition storeptr (src: ireg) (base: iregsp) (ofs: ptrofs) (k: code) :=
+ indexed_memory_access (Asm.Pstrx src) 8 base ofs k.
+
+End Asmgen_expand.
+
(** * Translation from Asmblock to assembly language
Inspired from the KVX backend (see kvx/Asm.v and kvx/Asmgen.v) *)