summaryrefslogtreecommitdiff
path: root/test/CodeGen/X86/x86-64-ms_abi-vararg.ll
blob: e3436521a5bda9e17b2f3fab76593ed1b3afff6b (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
; RUN: llc < %s -mcpu=generic -mtriple=x86_64-pc-linux-gnu | FileCheck %s

; Verify that the var arg parameters which are passed in registers are stored
; in home stack slots allocated by the caller and that AP is correctly
; calculated.
define x86_64_win64cc void @average_va(i32 %count, ...) nounwind {
entry:
; CHECK: pushq
; CHECK: movq   %r9, 40(%rsp)
; CHECK: movq   %r8, 32(%rsp)
; CHECK: movq   %rdx, 24(%rsp)
; CHECK: leaq   24(%rsp), %rax

  %ap = alloca i8*, align 8                       ; <i8**> [#uses=1]
  %ap.0 = bitcast i8** %ap to i8*
  call void @llvm.va_start(i8* %ap.0)
  ret void
}

declare void @llvm.va_start(i8*) nounwind
declare void @llvm.va_copy(i8*, i8*) nounwind
declare void @llvm.va_end(i8*) nounwind

; CHECK-LABEL: f5:
; CHECK: pushq
; CHECK: leaq 56(%rsp),
define x86_64_win64cc i8** @f5(i64 %a0, i64 %a1, i64 %a2, i64 %a3, i64 %a4, ...) nounwind {
entry:
  %ap = alloca i8*, align 8
  %ap.0 = bitcast i8** %ap to i8*
  call void @llvm.va_start(i8* %ap.0)
  ret i8** %ap
}

; CHECK-LABEL: f4:
; CHECK: pushq
; CHECK: leaq 48(%rsp),
define x86_64_win64cc i8** @f4(i64 %a0, i64 %a1, i64 %a2, i64 %a3, ...) nounwind {
entry:
  %ap = alloca i8*, align 8
  %ap.0 = bitcast i8** %ap to i8*
  call void @llvm.va_start(i8* %ap.0)
  ret i8** %ap
}

; CHECK-LABEL: f3:
; CHECK: pushq
; CHECK: leaq 40(%rsp),
define x86_64_win64cc i8** @f3(i64 %a0, i64 %a1, i64 %a2, ...) nounwind {
entry:
  %ap = alloca i8*, align 8
  %ap.0 = bitcast i8** %ap to i8*
  call void @llvm.va_start(i8* %ap.0)
  ret i8** %ap
}

; WinX86_64 uses char* for va_list. Verify that the correct amount of bytes
; are copied using va_copy.

; CHECK-LABEL: copy1:
; CHECK: leaq 32(%rsp), [[REG_copy1:%[a-z]+]]
; CHECK: movq [[REG_copy1]], 8(%rsp)
; CHECK: movq [[REG_copy1]], (%rsp)
; CHECK: ret
define x86_64_win64cc void @copy1(i64 %a0, ...) nounwind {
entry:
  %ap = alloca i8*, align 8
  %cp = alloca i8*, align 8
  %ap.0 = bitcast i8** %ap to i8*
  %cp.0 = bitcast i8** %cp to i8*
  call void @llvm.va_start(i8* %ap.0)
  call void @llvm.va_copy(i8* %cp.0, i8* %ap.0)
  ret void
}

; CHECK-LABEL: copy4:
; CHECK: leaq 56(%rsp), [[REG_copy4:%[a-z]+]]
; CHECK: movq [[REG_copy4]], 8(%rsp)
; CHECK: movq [[REG_copy4]], (%rsp)
; CHECK: ret
define x86_64_win64cc void @copy4(i64 %a0, i64 %a1, i64 %a2, i64 %a3, ...) nounwind {
entry:
  %ap = alloca i8*, align 8
  %cp = alloca i8*, align 8
  %ap.0 = bitcast i8** %ap to i8*
  %cp.0 = bitcast i8** %cp to i8*
  call void @llvm.va_start(i8* %ap.0)
  call void @llvm.va_copy(i8* %cp.0, i8* %ap.0)
  ret void
}

; CHECK-LABEL: arg4:
; va_start:
; CHECK: leaq 48(%rsp), [[REG_arg4_1:%[a-z]+]]
; CHECK: movq [[REG_arg4_1]], (%rsp)
; va_arg:
; CHECK: leaq 52(%rsp), [[REG_arg4_2:%[a-z]+]]
; CHECK: movq [[REG_arg4_2]], (%rsp)
; CHECK: movl 48(%rsp), %eax
; CHECK: ret
define x86_64_win64cc i32 @arg4(i64 %a0, i64 %a1, i64 %a2, i64 %a3, ...) nounwind {
entry:
  %ap = alloca i8*, align 8
  %ap.0 = bitcast i8** %ap to i8*
  call void @llvm.va_start(i8* %ap.0)
  %tmp = va_arg i8** %ap, i32
  ret i32 %tmp
}