blob: 0c96b2ffa294ae1a58c90e12c7fce153ba75a406 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
|
// typedef void * va_list;
// unsigned int __compcert_va_int32(va_list * ap);
// unsigned long long __compcert_va_int64(va_list * ap);
.text
.balign 2
.globl __compcert_va_int32
__compcert_va_int32:
# Prologue
addd $r14 = $r12, 0
;;
addd $r12 = $r12, -16
;;
sd 0[$r12] = $r14
;;
get $r16 = $ra
;;
sd 8[$r12] = $r16
;;
# Body
ld $r32 = 0[$r0] # $r32 <- *ap
;;
addd $r32 = $r32, 8 # $r32 <- $r32 + WORDSIZE
;;
sd 0[$r0] = $r32 # *ap <- $r32
;;
lws $r0 = -8[$r32] # retvalue <- 32-bits at *ap - WORDSIZE
;;
# Prologue
ld $r16 = 8[$r12]
;;
set $ra = $r16
;;
addd $r12 = $r12, 16
;;
ret
;;
.text
.balign 2
.globl __compcert_va_int64
.globl __compcert_va_float64
.globl __compcert_va_composite
__compcert_va_int64:
__compcert_va_float64:
# FIXME this assumes pass-by-reference
__compcert_va_composite:
# Prologue
addd $r14 = $r12, 0
;;
addd $r12 = $r12, -16
;;
sd 0[$r12] = $r14
;;
get $r16 = $ra
;;
sd 8[$r12] = $r16
;;
# Body
ld $r32 = 0[$r0] # $r32 <- *ap
;;
addd $r32 = $r32, 8 # $r32 <- $r32 + WORDSIZE
;;
sd 0[$r0] = $r32 # *ap <- $r32
;;
ld $r0 = -8[$r32] # retvalue <- 64-bits at *ap - WORDSIZE
;;
# Prologue
ld $r16 = 8[$r12]
;;
set $ra = $r16
;;
addd $r12 = $r12, 16
;;
ret
;;
# FIXME this assumes pass-by-reference
.globl __compcert_acswapd
__compcert_acswapd:
acswapd 0[$r1] = $r2r3
;;
sq 0[$r0] = $r2r3
ret
;;
.globl __compcert_acswapw
__compcert_acswapw:
acswapw 0[$r1] = $r2r3
;;
sq 0[$r0] = $r2r3
ret
;;
|