From 63e86a3ca3f3899981a5f5ea8201712e76aba0e1 Mon Sep 17 00:00:00 2001 From: Nikita Popov Date: Thu, 11 Dec 2025 09:47:43 +0100 Subject: [PATCH] [FastISel] Don't force SDAG fallback for libcalls The fast instruction selector should should not force an SDAG fallback to potentially make use of optimized libcall implementations. Note that clang will already directly emit intrinsics for most of these anyway, so there should be little change in practice. --- llvm/lib/CodeGen/SelectionDAG/FastISel.cpp | 8 -------- llvm/test/CodeGen/X86/stack-protector-msvc-oz.ll | 7 +++---- llvm/test/CodeGen/X86/stack-protector-msvc.ll | 7 +++---- 3 files changed, 6 insertions(+), 16 deletions(-) diff --git a/llvm/lib/CodeGen/SelectionDAG/FastISel.cpp b/llvm/lib/CodeGen/SelectionDAG/FastISel.cpp index 5c84059da273b..51391f1aeecde 100644 --- a/llvm/lib/CodeGen/SelectionDAG/FastISel.cpp +++ b/llvm/lib/CodeGen/SelectionDAG/FastISel.cpp @@ -1565,14 +1565,6 @@ bool FastISel::selectInstruction(const Instruction *I) { if (const auto *Call = dyn_cast(I)) { const Function *F = Call->getCalledFunction(); - LibFunc Func; - - // As a special case, don't handle calls to builtin library functions that - // may be translated directly to target instructions. - if (F && !F->hasLocalLinkage() && F->hasName() && - LibInfo->getLibFunc(F->getName(), Func) && - LibInfo->hasOptimizedCodeGen(Func)) - return false; // Don't handle Intrinsic::trap if a trap function is specified. if (F && F->getIntrinsicID() == Intrinsic::trap && diff --git a/llvm/test/CodeGen/X86/stack-protector-msvc-oz.ll b/llvm/test/CodeGen/X86/stack-protector-msvc-oz.ll index d8a772efbd7ed..4ab23cdde74c6 100644 --- a/llvm/test/CodeGen/X86/stack-protector-msvc-oz.ll +++ b/llvm/test/CodeGen/X86/stack-protector-msvc-oz.ll @@ -63,11 +63,10 @@ define void @test(ptr %a) nounwind ssp minsize { ; MSVC-X86-O0-NEXT: movl ___security_cookie, %eax ; MSVC-X86-O0-NEXT: xorl %esp, %eax ; MSVC-X86-O0-NEXT: movl %eax, {{[0-9]+}}(%esp) -; MSVC-X86-O0-NEXT: movl {{[0-9]+}}(%esp), %ecx -; MSVC-X86-O0-NEXT: movl %esp, %eax -; MSVC-X86-O0-NEXT: movl %ecx, 4(%eax) +; MSVC-X86-O0-NEXT: movl {{[0-9]+}}(%esp), %eax ; MSVC-X86-O0-NEXT: leal {{[0-9]+}}(%esp), %ecx -; MSVC-X86-O0-NEXT: movl %ecx, (%eax) +; MSVC-X86-O0-NEXT: movl %ecx, (%esp) +; MSVC-X86-O0-NEXT: movl %eax, {{[0-9]+}}(%esp) ; MSVC-X86-O0-NEXT: calll _strcpy ; MSVC-X86-O0-NEXT: leal LC, %ecx ; MSVC-X86-O0-NEXT: leal {{[0-9]+}}(%esp), %eax diff --git a/llvm/test/CodeGen/X86/stack-protector-msvc.ll b/llvm/test/CodeGen/X86/stack-protector-msvc.ll index a868fa549296d..3109733e0b0b7 100644 --- a/llvm/test/CodeGen/X86/stack-protector-msvc.ll +++ b/llvm/test/CodeGen/X86/stack-protector-msvc.ll @@ -75,11 +75,10 @@ define void @test(ptr %a) nounwind ssp { ; MSVC-X86-O0-NEXT: movl ___security_cookie, %eax ; MSVC-X86-O0-NEXT: xorl %esp, %eax ; MSVC-X86-O0-NEXT: movl %eax, {{[0-9]+}}(%esp) -; MSVC-X86-O0-NEXT: movl {{[0-9]+}}(%esp), %ecx -; MSVC-X86-O0-NEXT: movl %esp, %eax -; MSVC-X86-O0-NEXT: movl %ecx, 4(%eax) +; MSVC-X86-O0-NEXT: movl {{[0-9]+}}(%esp), %eax ; MSVC-X86-O0-NEXT: leal {{[0-9]+}}(%esp), %ecx -; MSVC-X86-O0-NEXT: movl %ecx, (%eax) +; MSVC-X86-O0-NEXT: movl %ecx, (%esp) +; MSVC-X86-O0-NEXT: movl %eax, {{[0-9]+}}(%esp) ; MSVC-X86-O0-NEXT: calll _strcpy ; MSVC-X86-O0-NEXT: leal LC, %ecx ; MSVC-X86-O0-NEXT: leal {{[0-9]+}}(%esp), %eax