summaryrefslogtreecommitdiffstats
path: root/test/CodeGen/X86/musttail-varargs.ll
diff options
context:
space:
mode:
Diffstat (limited to 'test/CodeGen/X86/musttail-varargs.ll')
-rw-r--r--test/CodeGen/X86/musttail-varargs.ll21
1 files changed, 21 insertions, 0 deletions
diff --git a/test/CodeGen/X86/musttail-varargs.ll b/test/CodeGen/X86/musttail-varargs.ll
index 1e99c14..7f105a1 100644
--- a/test/CodeGen/X86/musttail-varargs.ll
+++ b/test/CodeGen/X86/musttail-varargs.ll
@@ -1,13 +1,21 @@
; RUN: llc < %s -enable-tail-merge=0 -mtriple=x86_64-linux | FileCheck %s --check-prefix=LINUX
; RUN: llc < %s -enable-tail-merge=0 -mtriple=x86_64-windows | FileCheck %s --check-prefix=WINDOWS
+; RUN: llc < %s -enable-tail-merge=0 -mtriple=i686-windows | FileCheck %s --check-prefix=X86
; Test that we actually spill and reload all arguments in the variadic argument
; pack. Doing a normal call will clobber all argument registers, and we will
; spill around it. A simple adjustment should not require any XMM spills.
+declare void @llvm.va_start(i8*) nounwind
+
declare void(i8*, ...)* @get_f(i8* %this)
define void @f_thunk(i8* %this, ...) {
+ ; Use va_start so that we exercise the combination.
+ %ap = alloca [4 x i8*], align 16
+ %ap_i8 = bitcast [4 x i8*]* %ap to i8*
+ call void @llvm.va_start(i8* %ap_i8)
+
%fptr = call void(i8*, ...)*(i8*)* @get_f(i8* %this)
musttail call void (i8*, ...)* %fptr(i8* %this, ...)
ret void
@@ -65,6 +73,12 @@ define void @f_thunk(i8* %this, ...) {
; WINDOWS-NOT: mov{{.}}ps
; WINDOWS: jmpq *{{.*}} # TAILCALL
+; No regparms on normal x86 conventions.
+
+; X86-LABEL: _f_thunk:
+; X86: calll _get_f
+; X86: jmpl *{{.*}} # TAILCALL
+
; This thunk shouldn't require any spills and reloads, assuming the register
; allocator knows what it's doing.
@@ -82,6 +96,9 @@ define void @g_thunk(i8* %fptr_i8, ...) {
; WINDOWS-NOT: movq
; WINDOWS: jmpq *%rcx # TAILCALL
+; X86-LABEL: _g_thunk:
+; X86: jmpl *%eax # TAILCALL
+
; Do a simple multi-exit multi-bb test.
%struct.Foo = type { i1, i8*, i8* }
@@ -117,3 +134,7 @@ else:
; WINDOWS: jne
; WINDOWS: jmpq *{{.*}} # TAILCALL
; WINDOWS: jmpq *{{.*}} # TAILCALL
+; X86-LABEL: _h_thunk:
+; X86: jne
+; X86: jmpl *{{.*}} # TAILCALL
+; X86: jmpl *{{.*}} # TAILCALL