1 | /* |
2 | * Copyright (C) 2018 Apple Inc. All rights reserved. |
3 | * |
4 | * Redistribution and use in source and binary forms, with or without |
5 | * modification, are permitted provided that the following conditions |
6 | * are met: |
7 | * 1. Redistributions of source code must retain the above copyright |
8 | * notice, this list of conditions and the following disclaimer. |
9 | * 2. Redistributions in binary form must reproduce the above copyright |
10 | * notice, this list of conditions and the following disclaimer in the |
11 | * documentation and/or other materials provided with the distribution. |
12 | * |
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY |
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR |
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, |
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR |
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY |
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
24 | */ |
25 | |
26 | #pragma once |
27 | |
28 | namespace WTF { |
29 | |
30 | #if defined(NDEBUG) && COMPILER(GCC_COMPATIBLE) \ |
31 | && (CPU(X86_64) || CPU(X86) || CPU(ARM64) || CPU(ARM_THUMB2) || CPU(ARM_TRADITIONAL)) |
32 | |
33 | // We can only use the inline asm implementation on release builds because it |
34 | // needs to be inlinable in order to be correct. |
35 | ALWAYS_INLINE void* currentStackPointer() |
36 | { |
37 | void* stackPointer = nullptr; |
38 | #if CPU(X86_64) |
39 | __asm__ volatile ("movq %%rsp, %0" : "=r" (stackPointer) ::); |
40 | #elif CPU(X86) |
41 | __asm__ volatile ("movl %%esp, %0" : "=r" (stackPointer) ::); |
42 | #elif CPU(ARM64) && defined(__ILP32__) |
43 | uint64_t stackPointerRegister = 0; |
44 | __asm__ volatile ("mov %0, sp" : "=r" (stackPointerRegister) ::); |
45 | stackPointer = reinterpret_cast<void*>(stackPointerRegister); |
46 | #elif CPU(ARM64) || CPU(ARM_THUMB2) || CPU(ARM_TRADITIONAL) |
47 | __asm__ volatile ("mov %0, sp" : "=r" (stackPointer) ::); |
48 | #endif |
49 | return stackPointer; |
50 | } |
51 | |
52 | #else |
53 | |
54 | #define USE_GENERIC_CURRENT_STACK_POINTER 1 |
55 | WTF_EXPORT_PRIVATE void* currentStackPointer(); |
56 | |
57 | #endif |
58 | |
59 | } // namespace WTF |
60 | |
61 | using WTF::currentStackPointer; |
62 | |