aboutsummaryrefslogtreecommitdiffstats
path: root/aarch64/OrigAsmgen.ml
diff options
context:
space:
mode:
Diffstat (limited to 'aarch64/OrigAsmgen.ml')
-rw-r--r--aarch64/OrigAsmgen.ml282
1 files changed, 282 insertions, 0 deletions
diff --git a/aarch64/OrigAsmgen.ml b/aarch64/OrigAsmgen.ml
new file mode 100644
index 00000000..1bbcf085
--- /dev/null
+++ b/aarch64/OrigAsmgen.ml
@@ -0,0 +1,282 @@
+(** File (more or less) extracted from the official aarch64/Asmgen.v of CompCert
+
+TODO: prune some definition in these files by reusing the ones of Asmblockgen that are identical...
+e.g. all those which do not involve Asm.code
+
+**)
+
+open Asm
+open BinInt
+open BinNums
+open Integers
+open List0
+open Zbits
+
+module I = Integers.Int
+module I64 = Integers.Int64
+module N = PeanoNat.Nat
+
+let s_ x = Datatypes.S x
+let o_ = Datatypes.O
+
+module Automaton =
+ struct
+ type state =
+ | SA
+ | SB
+ | SC
+ | SD
+ | SE
+ | SF
+ | SG
+ | Sbad
+
+ (** val start : state **)
+
+ let start =
+ SA
+
+ (** val next : state -> bool -> state **)
+
+ let next s b =
+ match s with
+ | SA -> if b then SC else SB
+ | SB -> if b then SD else SB
+ | SC -> if b then SC else SE
+ | SD -> if b then SD else SF
+ | SE -> if b then SG else SE
+ | SF -> if b then Sbad else SF
+ | SG -> if b then SG else Sbad
+ | Sbad -> Sbad
+
+ (** val accepting : state -> bool **)
+
+ let accepting = function
+ | Sbad -> false
+ | _ -> true
+
+ (** val run : nat -> state -> coq_Z -> bool **)
+
+ let rec run len s x =
+ match len with
+ | Datatypes.O -> accepting s
+ | Datatypes.S len0 -> run len0 (next s (Z.odd x)) (Z.div2 x)
+ end
+
+(** val logical_imm_length : coq_Z -> bool -> nat **)
+
+let logical_imm_length x sixtyfour =
+ let test = fun n ->
+ Z.eqb (coq_Zzero_ext n x) (coq_Zzero_ext n (Z.shiftr x n))
+ in
+ if (&&) sixtyfour
+ (Datatypes.negb
+ (test (Zpos (Coq_xO (Coq_xO (Coq_xO (Coq_xO (Coq_xO Coq_xH))))))))
+ then s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_
+ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_
+ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_
+ o_)))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))
+ else if Datatypes.negb (test (Zpos (Coq_xO (Coq_xO (Coq_xO (Coq_xO Coq_xH))))))
+ then s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_
+ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ o_)))))))))))))))))))))))))))))))
+ else if Datatypes.negb (test (Zpos (Coq_xO (Coq_xO (Coq_xO Coq_xH)))))
+ then s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_
+ o_)))))))))))))))
+ else if Datatypes.negb (test (Zpos (Coq_xO (Coq_xO Coq_xH))))
+ then s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ o_)))))))
+ else if Datatypes.negb (test (Zpos (Coq_xO Coq_xH)))
+ then s_ (s_ (s_ (s_ o_)))
+ else s_ (s_ o_)
+
+(** val is_logical_imm32 : I.int -> bool **)
+
+let is_logical_imm32 x =
+ (&&) ((&&) (Datatypes.negb (I.eq x I.zero)) (Datatypes.negb (I.eq x I.mone)))
+ (Automaton.run (logical_imm_length (I.unsigned x) false)
+ Automaton.start (I.unsigned x))
+
+(** val is_logical_imm64 : I64.int -> bool **)
+
+let is_logical_imm64 x =
+ (&&) ((&&) (Datatypes.negb (I64.eq x I64.zero)) (Datatypes.negb (I64.eq x I64.mone)))
+ (Automaton.run (logical_imm_length (I64.unsigned x) true)
+ Automaton.start (I64.unsigned x))
+
+(** val is_arith_imm32 : I.int -> bool **)
+
+let is_arith_imm32 x =
+ (||) (I.eq x (I.zero_ext (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH)))) x))
+ (I.eq x
+ (I.shl
+ (I.zero_ext (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH))))
+ (I.shru x (I.repr (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH)))))))
+ (I.repr (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH)))))))
+
+(** val is_arith_imm64 : I64.int -> bool **)
+
+let is_arith_imm64 x =
+ (||)
+ (I64.eq x (I64.zero_ext (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH)))) x))
+ (I64.eq x
+ (I64.shl
+ (I64.zero_ext (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH))))
+ (I64.shru x (I64.repr (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH)))))))
+ (I64.repr (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH)))))))
+
+(** val decompose_int : nat -> coq_Z -> coq_Z -> (coq_Z * coq_Z) list **)
+
+let rec decompose_int n n0 p =
+ match n with
+ | Datatypes.O -> []
+ | Datatypes.S n1 ->
+ let frag =
+ coq_Zzero_ext (Zpos (Coq_xO (Coq_xO (Coq_xO (Coq_xO Coq_xH)))))
+ (Z.shiftr n0 p)
+ in
+ if Z.eqb frag Z0
+ then decompose_int n1 n0
+ (Z.add p (Zpos (Coq_xO (Coq_xO (Coq_xO (Coq_xO Coq_xH))))))
+ else (frag,
+ p) :: (decompose_int n1
+ (Z.ldiff n0
+ (Z.shiftl (Zpos (Coq_xI (Coq_xI (Coq_xI (Coq_xI (Coq_xI
+ (Coq_xI (Coq_xI (Coq_xI (Coq_xI (Coq_xI (Coq_xI
+ (Coq_xI (Coq_xI (Coq_xI (Coq_xI Coq_xH))))))))))))))))
+ p))
+ (Z.add p (Zpos (Coq_xO (Coq_xO (Coq_xO (Coq_xO Coq_xH)))))))
+
+(** val negate_decomposition :
+ (coq_Z * coq_Z) list -> (coq_Z * coq_Z) list **)
+
+let negate_decomposition l =
+ map (fun np ->
+ ((Z.coq_lxor (fst np) (Zpos (Coq_xI (Coq_xI (Coq_xI (Coq_xI (Coq_xI
+ (Coq_xI (Coq_xI (Coq_xI (Coq_xI (Coq_xI (Coq_xI (Coq_xI (Coq_xI
+ (Coq_xI (Coq_xI Coq_xH))))))))))))))))), (snd np))) l
+
+(** val loadimm_k :
+ isize -> ireg -> (coq_Z * coq_Z) list -> Asm.code -> Asm.code **)
+
+let loadimm_k sz rd l k =
+ fold_right (fun np k0 -> (Pmovk (sz, rd, (fst np), (snd np))) :: k0) k l
+
+(** val loadimm_z :
+ isize -> ireg -> (coq_Z * coq_Z) list -> Asm.code -> Asm.code **)
+
+let loadimm_z sz rd l k =
+ match l with
+ | [] -> (Pmovz (sz, rd, Z0, Z0)) :: k
+ | p :: l0 ->
+ let (n1, p1) = p in (Pmovz (sz, rd, n1, p1)) :: (loadimm_k sz rd l0 k)
+
+(** val loadimm_n :
+ isize -> ireg -> (coq_Z * coq_Z) list -> Asm.code -> Asm.code **)
+
+let loadimm_n sz rd l k =
+ match l with
+ | [] -> (Pmovn (sz, rd, Z0, Z0)) :: k
+ | p :: l0 ->
+ let (n1, p1) = p in
+ (Pmovn (sz, rd, n1, p1)) :: (loadimm_k sz rd (negate_decomposition l0) k)
+
+(** val loadimm : isize -> ireg -> coq_Z -> Asm.code -> Asm.code **)
+
+let loadimm sz rd n k =
+ let n0 = match sz with
+ | W -> s_ (s_ o_)
+ | X -> s_ (s_ (s_ (s_ o_))) in
+ let dz = decompose_int n0 n Z0 in
+ let dn = decompose_int n0 (Z.lnot n) Z0 in
+ if N.leb (Datatypes.length dz) (Datatypes.length dn)
+ then loadimm_z sz rd dz k
+ else loadimm_n sz rd dn k
+
+(** val loadimm32 : ireg -> I.int -> Asm.code -> Asm.code **)
+
+let loadimm32 rd n k =
+ if is_logical_imm32 n
+ then (Porrimm (W, rd, XZR, (I.unsigned n))) :: k
+ else loadimm W rd (I.unsigned n) k
+
+(** val loadimm64 : ireg -> I64.int -> Asm.code -> Asm.code **)
+
+let loadimm64 rd n k =
+ if is_logical_imm64 n
+ then (Porrimm (X, rd, XZR, (I64.unsigned n))) :: k
+ else loadimm X rd (I64.unsigned n) k
+
+(** val addimm_aux :
+ (iregsp -> iregsp -> coq_Z -> Asm.instruction) -> iregsp -> iregsp ->
+ coq_Z -> Asm.code -> Asm.instruction list **)
+
+let addimm_aux insn rd r1 n k =
+ let nlo = coq_Zzero_ext (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH)))) n in
+ let nhi = Z.sub n nlo in
+ if Z.eqb nhi Z0
+ then (insn rd r1 nlo) :: k
+ else if Z.eqb nlo Z0
+ then (insn rd r1 nhi) :: k
+ else (insn rd r1 nhi) :: ((insn rd rd nlo) :: k)
+
+(** val addimm32 : ireg -> ireg -> I.int -> Asm.code -> Asm.code **)
+
+let addimm32 rd r1 n k =
+ let m = I.neg n in
+ if I.eq n
+ (I.zero_ext (Zpos (Coq_xO (Coq_xO (Coq_xO (Coq_xI Coq_xH))))) n)
+ then addimm_aux (fun x x0 x1 -> Paddimm (W, x, x0, x1)) (RR1 rd) (RR1 r1)
+ (I.unsigned n) k
+ else if I.eq m
+ (I.zero_ext (Zpos (Coq_xO (Coq_xO (Coq_xO (Coq_xI Coq_xH))))) m)
+ then addimm_aux (fun x x0 x1 -> Psubimm (W, x, x0, x1)) (RR1 rd) (RR1
+ r1) (I.unsigned m) k
+ else if I.lt n I.zero
+ then loadimm32 X16 m ((Psub (W, rd, (RR0 r1), X16, SOnone)) :: k)
+ else loadimm32 X16 n ((Padd (W, rd, (RR0 r1), X16, SOnone)) :: k)
+
+(** val addimm64 : iregsp -> iregsp -> I64.int -> Asm.code -> Asm.code **)
+
+let addimm64 rd r1 n k =
+ let m = I64.neg n in
+ if I64.eq n
+ (I64.zero_ext (Zpos (Coq_xO (Coq_xO (Coq_xO (Coq_xI Coq_xH))))) n)
+ then addimm_aux (fun x x0 x1 -> Paddimm (X, x, x0, x1)) rd r1
+ (I64.unsigned n) k
+ else if I64.eq m
+ (I64.zero_ext (Zpos (Coq_xO (Coq_xO (Coq_xO (Coq_xI Coq_xH)))))
+ m)
+ then addimm_aux (fun x x0 x1 -> Psubimm (X, x, x0, x1)) rd r1
+ (I64.unsigned m) k
+ else if I64.lt n I64.zero
+ then loadimm64 X16 m ((Psubext (rd, r1, X16, (EOuxtx
+ I.zero))) :: k)
+ else loadimm64 X16 n ((Paddext (rd, r1, X16, (EOuxtx
+ I.zero))) :: k)
+
+(** val offset_representable : coq_Z -> I64.int -> bool **)
+
+let offset_representable sz ofs =
+ let isz = I64.repr sz in
+ (||)
+ (I64.eq ofs
+ (I64.sign_ext (Zpos (Coq_xI (Coq_xO (Coq_xO Coq_xH)))) ofs))
+ ((&&) (I64.eq (I64.modu ofs isz) I64.zero)
+ (I64.ltu ofs
+ (I64.shl isz (I64.repr (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH))))))))
+
+(** val indexed_memory_access :
+ (Asm.addressing -> Asm.instruction) -> coq_Z -> iregsp -> Ptrofs.int ->
+ Asm.code -> Asm.instruction list **)
+
+let indexed_memory_access insn sz base ofs k =
+ let ofs0 = Ptrofs.to_int64 ofs in
+ if offset_representable sz ofs0
+ then (insn (ADimm (base, ofs0))) :: k
+ else loadimm64 X16 ofs0 ((insn (ADreg (base, X16))) :: k)
+
+(** val storeptr :
+ ireg -> iregsp -> Ptrofs.int -> Asm.code -> Asm.instruction list **)
+
+let storeptr src base ofs k =
+ indexed_memory_access (fun x -> Pstrx (src, x)) (Zpos (Coq_xO (Coq_xO
+ (Coq_xO Coq_xH)))) base ofs k