aboutsummaryrefslogtreecommitdiffstats
path: root/aarch64/OrigAsmgen.ml
blob: 1bbcf085c3eed882483fdb101a0f7828371cf256 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
(** File (more or less) extracted from the official aarch64/Asmgen.v of CompCert 

TODO: prune some definition in these files by reusing the ones of Asmblockgen that are identical...
e.g. all those which do not involve Asm.code

**)

open Asm
open BinInt
open BinNums
open Integers
open List0
open Zbits

module I = Integers.Int
module I64 = Integers.Int64
module N = PeanoNat.Nat
   
let s_ x = Datatypes.S x
let o_ = Datatypes.O

module Automaton =
 struct
  type state =
  | SA
  | SB
  | SC
  | SD
  | SE
  | SF
  | SG
  | Sbad

  (** val start : state **)

  let start =
    SA

  (** val next : state -> bool -> state **)

  let next s b =
    match s with
    | SA -> if b then SC else SB
    | SB -> if b then SD else SB
    | SC -> if b then SC else SE
    | SD -> if b then SD else SF
    | SE -> if b then SG else SE
    | SF -> if b then Sbad else SF
    | SG -> if b then SG else Sbad
    | Sbad -> Sbad

  (** val accepting : state -> bool **)

  let accepting = function
  | Sbad -> false
  | _ -> true

  (** val run : nat -> state -> coq_Z -> bool **)

  let rec run len s x =
    match len with
    | Datatypes.O -> accepting s
    | Datatypes.S len0 -> run len0 (next s (Z.odd x)) (Z.div2 x)
 end

(** val logical_imm_length : coq_Z -> bool -> nat **)

let logical_imm_length x sixtyfour =
  let test = fun n ->
    Z.eqb (coq_Zzero_ext n x) (coq_Zzero_ext n (Z.shiftr x n))
  in
  if (&&) sixtyfour
       (Datatypes.negb
         (test (Zpos (Coq_xO (Coq_xO (Coq_xO (Coq_xO (Coq_xO Coq_xH))))))))
  then s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_
         (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_
         (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_
         o_)))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))
  else if Datatypes.negb (test (Zpos (Coq_xO (Coq_xO (Coq_xO (Coq_xO Coq_xH))))))
       then s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_
              (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ o_)))))))))))))))))))))))))))))))
       else if Datatypes.negb (test (Zpos (Coq_xO (Coq_xO (Coq_xO Coq_xH)))))
            then s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_
                   o_)))))))))))))))
            else if Datatypes.negb (test (Zpos (Coq_xO (Coq_xO Coq_xH))))
                 then s_ (s_ (s_ (s_ (s_ (s_ (s_ (s_ o_)))))))
                 else if Datatypes.negb (test (Zpos (Coq_xO Coq_xH)))
                      then s_ (s_ (s_ (s_ o_)))
                      else s_ (s_ o_)

(** val is_logical_imm32 : I.int -> bool **)

let is_logical_imm32 x =
  (&&) ((&&) (Datatypes.negb (I.eq x I.zero)) (Datatypes.negb (I.eq x I.mone)))
    (Automaton.run (logical_imm_length (I.unsigned x) false)
      Automaton.start (I.unsigned x))

(** val is_logical_imm64 : I64.int -> bool **)

let is_logical_imm64 x =
  (&&) ((&&) (Datatypes.negb (I64.eq x I64.zero)) (Datatypes.negb (I64.eq x I64.mone)))
    (Automaton.run (logical_imm_length (I64.unsigned x) true)
      Automaton.start (I64.unsigned x))

(** val is_arith_imm32 : I.int -> bool **)

let is_arith_imm32 x =
  (||) (I.eq x (I.zero_ext (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH)))) x))
    (I.eq x
      (I.shl
        (I.zero_ext (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH))))
          (I.shru x (I.repr (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH)))))))
        (I.repr (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH)))))))

(** val is_arith_imm64 : I64.int -> bool **)

let is_arith_imm64 x =
  (||)
    (I64.eq x (I64.zero_ext (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH)))) x))
    (I64.eq x
      (I64.shl
        (I64.zero_ext (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH))))
          (I64.shru x (I64.repr (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH)))))))
        (I64.repr (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH)))))))

(** val decompose_int : nat -> coq_Z -> coq_Z -> (coq_Z * coq_Z) list **)

let rec decompose_int n n0 p =
  match n with
  | Datatypes.O -> []
  | Datatypes.S n1 ->
    let frag =
      coq_Zzero_ext (Zpos (Coq_xO (Coq_xO (Coq_xO (Coq_xO Coq_xH)))))
        (Z.shiftr n0 p)
    in
    if Z.eqb frag Z0
    then decompose_int n1 n0
           (Z.add p (Zpos (Coq_xO (Coq_xO (Coq_xO (Coq_xO Coq_xH))))))
    else (frag,
           p) :: (decompose_int n1
                   (Z.ldiff n0
                     (Z.shiftl (Zpos (Coq_xI (Coq_xI (Coq_xI (Coq_xI (Coq_xI
                       (Coq_xI (Coq_xI (Coq_xI (Coq_xI (Coq_xI (Coq_xI
                       (Coq_xI (Coq_xI (Coq_xI (Coq_xI Coq_xH))))))))))))))))
                       p))
                   (Z.add p (Zpos (Coq_xO (Coq_xO (Coq_xO (Coq_xO Coq_xH)))))))

(** val negate_decomposition :
    (coq_Z * coq_Z) list -> (coq_Z * coq_Z) list **)

let negate_decomposition l =
  map (fun np ->
    ((Z.coq_lxor (fst np) (Zpos (Coq_xI (Coq_xI (Coq_xI (Coq_xI (Coq_xI
       (Coq_xI (Coq_xI (Coq_xI (Coq_xI (Coq_xI (Coq_xI (Coq_xI (Coq_xI
       (Coq_xI (Coq_xI Coq_xH))))))))))))))))), (snd np))) l

(** val loadimm_k :
    isize -> ireg -> (coq_Z * coq_Z) list -> Asm.code -> Asm.code **)

let loadimm_k sz rd l k =
  fold_right (fun np k0 -> (Pmovk (sz, rd, (fst np), (snd np))) :: k0) k l

(** val loadimm_z :
    isize -> ireg -> (coq_Z * coq_Z) list -> Asm.code -> Asm.code **)

let loadimm_z sz rd l k =
  match l with
  | [] -> (Pmovz (sz, rd, Z0, Z0)) :: k
  | p :: l0 ->
    let (n1, p1) = p in (Pmovz (sz, rd, n1, p1)) :: (loadimm_k sz rd l0 k)

(** val loadimm_n :
    isize -> ireg -> (coq_Z * coq_Z) list -> Asm.code -> Asm.code **)

let loadimm_n sz rd l k =
  match l with
  | [] -> (Pmovn (sz, rd, Z0, Z0)) :: k
  | p :: l0 ->
    let (n1, p1) = p in
    (Pmovn (sz, rd, n1, p1)) :: (loadimm_k sz rd (negate_decomposition l0) k)

(** val loadimm : isize -> ireg -> coq_Z -> Asm.code -> Asm.code **)

let loadimm sz rd n k =
  let n0 = match sz with
           | W -> s_ (s_ o_)
           | X -> s_ (s_ (s_ (s_ o_))) in
  let dz = decompose_int n0 n Z0 in
  let dn = decompose_int n0 (Z.lnot n) Z0 in
  if N.leb (Datatypes.length dz) (Datatypes.length dn)
  then loadimm_z sz rd dz k
  else loadimm_n sz rd dn k

(** val loadimm32 : ireg -> I.int -> Asm.code -> Asm.code **)

let loadimm32 rd n k =
  if is_logical_imm32 n
  then (Porrimm (W, rd, XZR, (I.unsigned n))) :: k
  else loadimm W rd (I.unsigned n) k

(** val loadimm64 : ireg -> I64.int -> Asm.code -> Asm.code **)

let loadimm64 rd n k =
  if is_logical_imm64 n
  then (Porrimm (X, rd, XZR, (I64.unsigned n))) :: k
  else loadimm X rd (I64.unsigned n) k

(** val addimm_aux :
    (iregsp -> iregsp -> coq_Z -> Asm.instruction) -> iregsp -> iregsp ->
    coq_Z -> Asm.code -> Asm.instruction list **)

let addimm_aux insn rd r1 n k =
  let nlo = coq_Zzero_ext (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH)))) n in
  let nhi = Z.sub n nlo in
  if Z.eqb nhi Z0
  then (insn rd r1 nlo) :: k
  else if Z.eqb nlo Z0
       then (insn rd r1 nhi) :: k
       else (insn rd r1 nhi) :: ((insn rd rd nlo) :: k)

(** val addimm32 : ireg -> ireg -> I.int -> Asm.code -> Asm.code **)

let addimm32 rd r1 n k =
  let m = I.neg n in
  if I.eq n
       (I.zero_ext (Zpos (Coq_xO (Coq_xO (Coq_xO (Coq_xI Coq_xH))))) n)
  then addimm_aux (fun x x0 x1 -> Paddimm (W, x, x0, x1)) (RR1 rd) (RR1 r1)
         (I.unsigned n) k
  else if I.eq m
            (I.zero_ext (Zpos (Coq_xO (Coq_xO (Coq_xO (Coq_xI Coq_xH))))) m)
       then addimm_aux (fun x x0 x1 -> Psubimm (W, x, x0, x1)) (RR1 rd) (RR1
              r1) (I.unsigned m) k
       else if I.lt n I.zero
            then loadimm32 X16 m ((Psub (W, rd, (RR0 r1), X16, SOnone)) :: k)
            else loadimm32 X16 n ((Padd (W, rd, (RR0 r1), X16, SOnone)) :: k)

(** val addimm64 : iregsp -> iregsp -> I64.int -> Asm.code -> Asm.code **)

let addimm64 rd r1 n k =
  let m = I64.neg n in
  if I64.eq n
       (I64.zero_ext (Zpos (Coq_xO (Coq_xO (Coq_xO (Coq_xI Coq_xH))))) n)
  then addimm_aux (fun x x0 x1 -> Paddimm (X, x, x0, x1)) rd r1
         (I64.unsigned n) k
  else if I64.eq m
            (I64.zero_ext (Zpos (Coq_xO (Coq_xO (Coq_xO (Coq_xI Coq_xH)))))
              m)
       then addimm_aux (fun x x0 x1 -> Psubimm (X, x, x0, x1)) rd r1
              (I64.unsigned m) k
       else if I64.lt n I64.zero
            then loadimm64 X16 m ((Psubext (rd, r1, X16, (EOuxtx
                   I.zero))) :: k)
            else loadimm64 X16 n ((Paddext (rd, r1, X16, (EOuxtx
                   I.zero))) :: k)

(** val offset_representable : coq_Z -> I64.int -> bool **)

let offset_representable sz ofs =
  let isz = I64.repr sz in
  (||)
    (I64.eq ofs
      (I64.sign_ext (Zpos (Coq_xI (Coq_xO (Coq_xO Coq_xH)))) ofs))
    ((&&) (I64.eq (I64.modu ofs isz) I64.zero)
      (I64.ltu ofs
        (I64.shl isz (I64.repr (Zpos (Coq_xO (Coq_xO (Coq_xI Coq_xH))))))))
  
(** val indexed_memory_access :
    (Asm.addressing -> Asm.instruction) -> coq_Z -> iregsp -> Ptrofs.int ->
    Asm.code -> Asm.instruction list **)

let indexed_memory_access insn sz base ofs k =
  let ofs0 = Ptrofs.to_int64 ofs in
  if offset_representable sz ofs0
  then (insn (ADimm (base, ofs0))) :: k
  else loadimm64 X16 ofs0 ((insn (ADreg (base, X16))) :: k)

(** val storeptr :
    ireg -> iregsp -> Ptrofs.int -> Asm.code -> Asm.instruction list **)

let storeptr src base ofs k =
  indexed_memory_access (fun x -> Pstrx (src, x)) (Zpos (Coq_xO (Coq_xO
    (Coq_xO Coq_xH)))) base ofs k