1 | /* |
2 | * Copyright (C) 2016-2017 Apple Inc. All rights reserved. |
3 | * |
4 | * Redistribution and use in source and binary forms, with or without |
5 | * modification, are permitted provided that the following conditions |
6 | * are met: |
7 | * 1. Redistributions of source code must retain the above copyright |
8 | * notice, this list of conditions and the following disclaimer. |
9 | * 2. Redistributions in binary form must reproduce the above copyright |
10 | * notice, this list of conditions and the following disclaimer in the |
11 | * documentation and/or other materials provided with the distribution. |
12 | * |
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY |
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR |
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, |
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR |
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY |
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
24 | */ |
25 | |
26 | #include "config.h" |
27 | #include "WebAssemblyFunction.h" |
28 | |
29 | #if ENABLE(WEBASSEMBLY) |
30 | |
31 | #include "B3Compilation.h" |
32 | #include "JSCInlines.h" |
33 | #include "JSFunctionInlines.h" |
34 | #include "JSObject.h" |
35 | #include "JSWebAssemblyHelpers.h" |
36 | #include "JSWebAssemblyInstance.h" |
37 | #include "JSWebAssemblyMemory.h" |
38 | #include "JSWebAssemblyRuntimeError.h" |
39 | #include "LLIntThunks.h" |
40 | #include "LinkBuffer.h" |
41 | #include "ProtoCallFrame.h" |
42 | #include "VM.h" |
43 | #include "WasmCallee.h" |
44 | #include "WasmCallingConvention.h" |
45 | #include "WasmContextInlines.h" |
46 | #include "WasmFormat.h" |
47 | #include "WasmMemory.h" |
48 | #include "WasmMemoryInformation.h" |
49 | #include "WasmModuleInformation.h" |
50 | #include "WasmSignatureInlines.h" |
51 | #include <wtf/FastTLS.h> |
52 | #include <wtf/StackPointer.h> |
53 | #include <wtf/SystemTracing.h> |
54 | |
55 | namespace JSC { |
56 | |
57 | const ClassInfo WebAssemblyFunction::s_info = { "WebAssemblyFunction" , &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(WebAssemblyFunction) }; |
58 | |
59 | static EncodedJSValue JSC_HOST_CALL callWebAssemblyFunction(ExecState* exec) |
60 | { |
61 | VM& vm = exec->vm(); |
62 | auto scope = DECLARE_THROW_SCOPE(vm); |
63 | WebAssemblyFunction* wasmFunction = jsCast<WebAssemblyFunction*>(exec->jsCallee()); |
64 | Wasm::SignatureIndex signatureIndex = wasmFunction->signatureIndex(); |
65 | const Wasm::Signature& signature = Wasm::SignatureInformation::get(signatureIndex); |
66 | |
67 | // Make sure that the memory we think we are going to run with matches the one we expect. |
68 | ASSERT(wasmFunction->instance()->instance().codeBlock()->isSafeToRun(wasmFunction->instance()->memory()->memory().mode())); |
69 | |
70 | Optional<TraceScope> traceScope; |
71 | if (Options::useTracePoints()) |
72 | traceScope.emplace(WebAssemblyExecuteStart, WebAssemblyExecuteEnd); |
73 | |
74 | Vector<JSValue, MarkedArgumentBuffer::inlineCapacity> boxedArgs; |
75 | JSWebAssemblyInstance* instance = wasmFunction->instance(); |
76 | Wasm::Instance* wasmInstance = &instance->instance(); |
77 | // When we don't use fast TLS to store the context, the JS |
78 | // entry wrapper expects a JSWebAssemblyInstance as the first argument. |
79 | if (!Wasm::Context::useFastTLS()) |
80 | boxedArgs.append(instance); |
81 | |
82 | for (unsigned argIndex = 0; argIndex < signature.argumentCount(); ++argIndex) { |
83 | JSValue arg = exec->argument(argIndex); |
84 | switch (signature.argument(argIndex)) { |
85 | case Wasm::I32: |
86 | arg = JSValue::decode(arg.toInt32(exec)); |
87 | break; |
88 | case Wasm::Funcref: { |
89 | if (!isWebAssemblyHostFunction(vm, arg) && !arg.isNull()) |
90 | return JSValue::encode(throwException(exec, scope, createJSWebAssemblyRuntimeError(exec, vm, "Funcref must be an exported wasm function" ))); |
91 | break; |
92 | } |
93 | case Wasm::Anyref: |
94 | break; |
95 | case Wasm::I64: |
96 | arg = JSValue(); |
97 | break; |
98 | case Wasm::F32: |
99 | arg = JSValue::decode(bitwise_cast<uint32_t>(arg.toFloat(exec))); |
100 | break; |
101 | case Wasm::F64: |
102 | arg = JSValue::decode(bitwise_cast<uint64_t>(arg.toNumber(exec))); |
103 | break; |
104 | case Wasm::Void: |
105 | case Wasm::Func: |
106 | RELEASE_ASSERT_NOT_REACHED(); |
107 | } |
108 | RETURN_IF_EXCEPTION(scope, encodedJSValue()); |
109 | boxedArgs.append(arg); |
110 | } |
111 | |
112 | JSValue firstArgument = JSValue(); |
113 | int argCount = 1; |
114 | JSValue* remainingArgs = nullptr; |
115 | if (boxedArgs.size()) { |
116 | remainingArgs = boxedArgs.data(); |
117 | firstArgument = *remainingArgs; |
118 | remainingArgs++; |
119 | argCount = boxedArgs.size(); |
120 | } |
121 | |
122 | // Note: we specifically use the WebAssemblyFunction as the callee to begin with in the ProtoCallFrame. |
123 | // The reason for this is that calling into the llint may stack overflow, and the stack overflow |
124 | // handler might read the global object from the callee. |
125 | ProtoCallFrame protoCallFrame; |
126 | protoCallFrame.init(nullptr, wasmFunction, firstArgument, argCount, remainingArgs); |
127 | |
128 | // FIXME Do away with this entire function, and only use the entrypoint generated by B3. https://bugs.webkit.org/show_bug.cgi?id=166486 |
129 | Wasm::Instance* prevWasmInstance = vm.wasmContext.load(); |
130 | { |
131 | // We do the stack check here for the wrapper function because we don't |
132 | // want to emit a stack check inside every wrapper function. |
133 | const intptr_t sp = bitwise_cast<intptr_t>(currentStackPointer()); |
134 | const intptr_t frameSize = (boxedArgs.size() + CallFrame::headerSizeInRegisters) * sizeof(Register); |
135 | const intptr_t stackSpaceUsed = 2 * frameSize; // We're making two calls. One to the wrapper, and one to the actual wasm code. |
136 | if (UNLIKELY((sp < stackSpaceUsed) || ((sp - stackSpaceUsed) < bitwise_cast<intptr_t>(vm.softStackLimit())))) |
137 | return JSValue::encode(throwException(exec, scope, createStackOverflowError(exec))); |
138 | } |
139 | vm.wasmContext.store(wasmInstance, vm.softStackLimit()); |
140 | ASSERT(wasmFunction->instance()); |
141 | ASSERT(&wasmFunction->instance()->instance() == vm.wasmContext.load()); |
142 | EncodedJSValue rawResult = vmEntryToWasm(wasmFunction->jsEntrypoint(MustCheckArity).executableAddress(), &vm, &protoCallFrame); |
143 | // We need to make sure this is in a register or on the stack since it's stored in Vector<JSValue>. |
144 | // This probably isn't strictly necessary, since the WebAssemblyFunction* should keep the instance |
145 | // alive. But it's good hygiene. |
146 | instance->use(); |
147 | if (prevWasmInstance != wasmInstance) { |
148 | // This is just for some extra safety instead of leaving a cached |
149 | // value in there. If we ever forget to set the value to be a real |
150 | // bounds, this will force every stack overflow check to immediately |
151 | // fire. The stack limit never changes while executing except when |
152 | // WebAssembly is used through the JSC API: API users can ask the code |
153 | // to migrate threads. |
154 | wasmInstance->setCachedStackLimit(bitwise_cast<void*>(std::numeric_limits<uintptr_t>::max())); |
155 | } |
156 | vm.wasmContext.store(prevWasmInstance, vm.softStackLimit()); |
157 | RETURN_IF_EXCEPTION(scope, { }); |
158 | |
159 | return rawResult; |
160 | } |
161 | |
162 | bool WebAssemblyFunction::useTagRegisters() const |
163 | { |
164 | const auto& signature = Wasm::SignatureInformation::get(signatureIndex()); |
165 | return signature.argumentCount() || signature.returnType() != Wasm::Void; |
166 | } |
167 | |
168 | RegisterSet WebAssemblyFunction::calleeSaves() const |
169 | { |
170 | RegisterSet toSave = Wasm::PinnedRegisterInfo::get().toSave(instance()->memoryMode()); |
171 | if (useTagRegisters()) { |
172 | RegisterSet tagRegisters = RegisterSet::runtimeTagRegisters(); |
173 | // We rely on these being disjoint sets. |
174 | #if !ASSERT_DISABLED |
175 | for (Reg reg : tagRegisters) |
176 | ASSERT(!toSave.contains(reg)); |
177 | #endif |
178 | toSave.merge(tagRegisters); |
179 | } |
180 | return toSave; |
181 | } |
182 | |
183 | RegisterAtOffsetList WebAssemblyFunction::usedCalleeSaveRegisters() const |
184 | { |
185 | return RegisterAtOffsetList { calleeSaves(), RegisterAtOffsetList::OffsetBaseType::FramePointerBased }; |
186 | } |
187 | |
188 | ptrdiff_t WebAssemblyFunction::previousInstanceOffset() const |
189 | { |
190 | ptrdiff_t result = calleeSaves().numberOfSetRegisters() * sizeof(CPURegister); |
191 | result = -result - sizeof(CPURegister); |
192 | #if !ASSERT_DISABLED |
193 | ptrdiff_t minOffset = 1; |
194 | for (const RegisterAtOffset& regAtOffset : usedCalleeSaveRegisters()) { |
195 | ptrdiff_t offset = regAtOffset.offset(); |
196 | ASSERT(offset < 0); |
197 | minOffset = std::min(offset, minOffset); |
198 | } |
199 | ASSERT(minOffset - static_cast<ptrdiff_t>(sizeof(CPURegister)) == result); |
200 | #endif |
201 | return result; |
202 | } |
203 | |
204 | Wasm::Instance* WebAssemblyFunction::previousInstance(CallFrame* callFrame) |
205 | { |
206 | ASSERT(callFrame->callee().rawPtr() == m_jsToWasmICCallee.get()); |
207 | auto* result = *bitwise_cast<Wasm::Instance**>(bitwise_cast<char*>(callFrame) + previousInstanceOffset()); |
208 | return result; |
209 | } |
210 | |
211 | MacroAssemblerCodePtr<JSEntryPtrTag> WebAssemblyFunction::jsCallEntrypointSlow() |
212 | { |
213 | VM& vm = *this->vm(); |
214 | CCallHelpers jit; |
215 | |
216 | const auto& signature = Wasm::SignatureInformation::get(signatureIndex()); |
217 | const auto& pinnedRegs = Wasm::PinnedRegisterInfo::get(); |
218 | RegisterAtOffsetList registersToSpill = usedCalleeSaveRegisters(); |
219 | |
220 | auto& moduleInformation = instance()->instance().module().moduleInformation(); |
221 | |
222 | unsigned totalFrameSize = registersToSpill.size() * sizeof(CPURegister); |
223 | totalFrameSize += sizeof(CPURegister); // Slot for the VM's previous wasm instance. |
224 | totalFrameSize += Wasm::WasmCallingConvention::headerSizeInBytes(); |
225 | totalFrameSize -= sizeof(CallerFrameAndPC); |
226 | |
227 | unsigned numGPRs = 0; |
228 | unsigned numFPRs = 0; |
229 | bool argumentsIncludeI64 = false; |
230 | for (unsigned i = 0; i < signature.argumentCount(); i++) { |
231 | switch (signature.argument(i)) { |
232 | case Wasm::I64: |
233 | argumentsIncludeI64 = true; |
234 | break; |
235 | case Wasm::Anyref: |
236 | case Wasm::Funcref: |
237 | case Wasm::I32: |
238 | if (numGPRs >= Wasm::wasmCallingConvention().m_gprArgs.size()) |
239 | totalFrameSize += sizeof(CPURegister); |
240 | ++numGPRs; |
241 | break; |
242 | case Wasm::F32: |
243 | case Wasm::F64: |
244 | if (numFPRs >= Wasm::wasmCallingConvention().m_fprArgs.size()) |
245 | totalFrameSize += sizeof(CPURegister); |
246 | ++numFPRs; |
247 | break; |
248 | default: |
249 | RELEASE_ASSERT_NOT_REACHED(); |
250 | } |
251 | } |
252 | |
253 | if (argumentsIncludeI64) |
254 | return nullptr; |
255 | |
256 | totalFrameSize = WTF::roundUpToMultipleOf(stackAlignmentBytes(), totalFrameSize); |
257 | |
258 | jit.emitFunctionPrologue(); |
259 | jit.subPtr(MacroAssembler::TrustedImm32(totalFrameSize), MacroAssembler::stackPointerRegister); |
260 | jit.store64(CCallHelpers::TrustedImm64(0), CCallHelpers::addressFor(CallFrameSlot::codeBlock)); |
261 | |
262 | for (const RegisterAtOffset& regAtOffset : registersToSpill) { |
263 | GPRReg reg = regAtOffset.reg().gpr(); |
264 | ptrdiff_t offset = regAtOffset.offset(); |
265 | jit.storePtr(reg, CCallHelpers::Address(GPRInfo::callFrameRegister, offset)); |
266 | } |
267 | |
268 | GPRReg scratchGPR = Wasm::wasmCallingConventionAir().prologueScratch(1); |
269 | GPRReg scratch2GPR = Wasm::wasmCallingConventionAir().prologueScratch(0); |
270 | jit.loadPtr(vm.addressOfSoftStackLimit(), scratch2GPR); |
271 | |
272 | CCallHelpers::JumpList slowPath; |
273 | slowPath.append(jit.branchPtr(CCallHelpers::Above, MacroAssembler::stackPointerRegister, GPRInfo::callFrameRegister)); |
274 | slowPath.append(jit.branchPtr(CCallHelpers::Below, MacroAssembler::stackPointerRegister, scratch2GPR)); |
275 | |
276 | // Ensure: |
277 | // argCountPlusThis - 1 >= signature.argumentCount() |
278 | // argCountPlusThis >= signature.argumentCount() + 1 |
279 | // FIXME: We should handle mismatched arity |
280 | // https://bugs.webkit.org/show_bug.cgi?id=196564 |
281 | slowPath.append(jit.branch32(CCallHelpers::Below, |
282 | CCallHelpers::payloadFor(CallFrameSlot::argumentCount), CCallHelpers::TrustedImm32(signature.argumentCount() + 1))); |
283 | |
284 | if (useTagRegisters()) |
285 | jit.emitMaterializeTagCheckRegisters(); |
286 | |
287 | // First we do stack slots for FPRs so we can use FPR argument registers as scratch. |
288 | // After that, we handle FPR argument registers. |
289 | // We also handle all GPR types here as we have GPR scratch registers. |
290 | { |
291 | CCallHelpers::Address calleeFrame = CCallHelpers::Address(MacroAssembler::stackPointerRegister, -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC))); |
292 | numGPRs = 0; |
293 | numFPRs = 0; |
294 | FPRReg scratchFPR = Wasm::wasmCallingConvention().m_fprArgs[0].fpr(); |
295 | |
296 | ptrdiff_t jsOffset = CallFrameSlot::firstArgument * sizeof(EncodedJSValue); |
297 | |
298 | ptrdiff_t wasmOffset = CallFrame::headerSizeInRegisters * sizeof(CPURegister); |
299 | for (unsigned i = 0; i < signature.argumentCount(); i++) { |
300 | switch (signature.argument(i)) { |
301 | case Wasm::I32: |
302 | jit.load64(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), scratchGPR); |
303 | slowPath.append(jit.branchIfNotInt32(scratchGPR)); |
304 | if (numGPRs >= Wasm::wasmCallingConvention().m_gprArgs.size()) { |
305 | jit.store32(scratchGPR, calleeFrame.withOffset(wasmOffset)); |
306 | wasmOffset += sizeof(CPURegister); |
307 | } else { |
308 | jit.zeroExtend32ToPtr(scratchGPR, Wasm::wasmCallingConvention().m_gprArgs[numGPRs].gpr()); |
309 | ++numGPRs; |
310 | } |
311 | break; |
312 | case Wasm::Funcref: { |
313 | // FIXME: Emit this inline <https://bugs.webkit.org/show_bug.cgi?id=198506>. |
314 | bool (*shouldThrow)(Wasm::Instance*, JSValue) = [] (Wasm::Instance* wasmInstance, JSValue arg) -> bool { |
315 | JSWebAssemblyInstance* instance = wasmInstance->owner<JSWebAssemblyInstance>(); |
316 | JSGlobalObject* globalObject = instance->globalObject(); |
317 | VM& vm = globalObject->vm(); |
318 | return !isWebAssemblyHostFunction(vm, arg) && !arg.isNull(); |
319 | }; |
320 | jit.move(CCallHelpers::TrustedImmPtr(&instance()->instance()), GPRInfo::argumentGPR0); |
321 | jit.load64(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), GPRInfo::argumentGPR1); |
322 | jit.setupArguments<decltype(shouldThrow)>(GPRInfo::argumentGPR0, GPRInfo::argumentGPR1); |
323 | auto call = jit.call(OperationPtrTag); |
324 | |
325 | jit.addLinkTask([=] (LinkBuffer& linkBuffer) { |
326 | linkBuffer.link(call, FunctionPtr<OperationPtrTag>(shouldThrow)); |
327 | }); |
328 | |
329 | slowPath.append(jit.branchTest32(CCallHelpers::NonZero, GPRInfo::returnValueGPR)); |
330 | |
331 | FALLTHROUGH; |
332 | } |
333 | case Wasm::Anyref: { |
334 | jit.load64(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), scratchGPR); |
335 | |
336 | if (numGPRs >= Wasm::wasmCallingConvention().m_gprArgs.size()) { |
337 | jit.store64(scratchGPR, calleeFrame.withOffset(wasmOffset)); |
338 | wasmOffset += sizeof(CPURegister); |
339 | } else { |
340 | jit.move(scratchGPR, Wasm::wasmCallingConvention().m_gprArgs[numGPRs].gpr()); |
341 | ++numGPRs; |
342 | } |
343 | break; |
344 | } |
345 | case Wasm::F32: |
346 | case Wasm::F64: |
347 | if (numFPRs >= Wasm::wasmCallingConvention().m_fprArgs.size()) { |
348 | jit.load64(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), scratchGPR); |
349 | slowPath.append(jit.branchIfNotNumber(scratchGPR)); |
350 | auto isInt32 = jit.branchIfInt32(scratchGPR); |
351 | if (signature.argument(i) == Wasm::F32) { |
352 | jit.unboxDouble(scratchGPR, scratchGPR, scratchFPR); |
353 | jit.convertDoubleToFloat(scratchFPR, scratchFPR); |
354 | jit.storeFloat(scratchFPR, calleeFrame.withOffset(wasmOffset)); |
355 | } else { |
356 | jit.add64(GPRInfo::tagTypeNumberRegister, scratchGPR, scratchGPR); |
357 | jit.store64(scratchGPR, calleeFrame.withOffset(wasmOffset)); |
358 | } |
359 | auto done = jit.jump(); |
360 | |
361 | isInt32.link(&jit); |
362 | if (signature.argument(i) == Wasm::F32) { |
363 | jit.convertInt32ToFloat(scratchGPR, scratchFPR); |
364 | jit.storeFloat(scratchFPR, calleeFrame.withOffset(wasmOffset)); |
365 | } else { |
366 | jit.convertInt32ToDouble(scratchGPR, scratchFPR); |
367 | jit.storeDouble(scratchFPR, calleeFrame.withOffset(wasmOffset)); |
368 | } |
369 | done.link(&jit); |
370 | |
371 | wasmOffset += sizeof(CPURegister); |
372 | } else |
373 | ++numFPRs; |
374 | break; |
375 | default: |
376 | RELEASE_ASSERT_NOT_REACHED(); |
377 | } |
378 | |
379 | jsOffset += sizeof(EncodedJSValue); |
380 | } |
381 | } |
382 | |
383 | // Now handle FPR arguments in registers. |
384 | { |
385 | numFPRs = 0; |
386 | ptrdiff_t jsOffset = CallFrameSlot::firstArgument * sizeof(EncodedJSValue); |
387 | for (unsigned i = 0; i < signature.argumentCount(); i++) { |
388 | switch (signature.argument(i)) { |
389 | case Wasm::F32: |
390 | case Wasm::F64: |
391 | if (numFPRs < Wasm::wasmCallingConvention().m_fprArgs.size()) { |
392 | FPRReg argFPR = Wasm::wasmCallingConvention().m_fprArgs[numFPRs].fpr(); |
393 | jit.load64(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), scratchGPR); |
394 | slowPath.append(jit.branchIfNotNumber(scratchGPR)); |
395 | auto isInt32 = jit.branchIfInt32(scratchGPR); |
396 | jit.unboxDouble(scratchGPR, scratchGPR, argFPR); |
397 | if (signature.argument(i) == Wasm::F32) |
398 | jit.convertDoubleToFloat(argFPR, argFPR); |
399 | auto done = jit.jump(); |
400 | |
401 | isInt32.link(&jit); |
402 | if (signature.argument(i) == Wasm::F32) |
403 | jit.convertInt32ToFloat(scratchGPR, argFPR); |
404 | else |
405 | jit.convertInt32ToDouble(scratchGPR, argFPR); |
406 | |
407 | done.link(&jit); |
408 | ++numFPRs; |
409 | } |
410 | break; |
411 | default: |
412 | break; |
413 | } |
414 | |
415 | jsOffset += sizeof(EncodedJSValue); |
416 | } |
417 | } |
418 | |
419 | // At this point, we're committed to doing a fast call. |
420 | |
421 | if (Wasm::Context::useFastTLS()) |
422 | jit.loadWasmContextInstance(scratchGPR); |
423 | else |
424 | jit.loadPtr(vm.wasmContext.pointerToInstance(), scratchGPR); |
425 | ptrdiff_t previousInstanceOffset = this->previousInstanceOffset(); |
426 | jit.storePtr(scratchGPR, CCallHelpers::Address(GPRInfo::callFrameRegister, previousInstanceOffset)); |
427 | |
428 | jit.move(CCallHelpers::TrustedImmPtr(&instance()->instance()), scratchGPR); |
429 | if (Wasm::Context::useFastTLS()) |
430 | jit.storeWasmContextInstance(scratchGPR); |
431 | else { |
432 | jit.move(scratchGPR, pinnedRegs.wasmContextInstancePointer); |
433 | jit.storePtr(scratchGPR, vm.wasmContext.pointerToInstance()); |
434 | } |
435 | // This contains the cached stack limit still. |
436 | jit.storePtr(scratch2GPR, CCallHelpers::Address(scratchGPR, Wasm::Instance::offsetOfCachedStackLimit())); |
437 | |
438 | if (!!moduleInformation.memory) { |
439 | GPRReg baseMemory = pinnedRegs.baseMemoryPointer; |
440 | GPRReg scratchOrSize = scratch2GPR; |
441 | auto mode = instance()->memoryMode(); |
442 | |
443 | if (isARM64E()) { |
444 | if (mode != Wasm::MemoryMode::Signaling) |
445 | scratchOrSize = pinnedRegs.sizeRegister; |
446 | jit.loadPtr(CCallHelpers::Address(scratchGPR, Wasm::Instance::offsetOfCachedMemorySize()), scratchOrSize); |
447 | } else { |
448 | if (mode != Wasm::MemoryMode::Signaling) |
449 | jit.loadPtr(CCallHelpers::Address(scratchGPR, Wasm::Instance::offsetOfCachedMemorySize()), pinnedRegs.sizeRegister); |
450 | } |
451 | |
452 | jit.loadPtr(CCallHelpers::Address(scratchGPR, Wasm::Instance::offsetOfCachedMemory()), baseMemory); |
453 | jit.cageConditionally(Gigacage::Primitive, baseMemory, scratchOrSize, scratchOrSize); |
454 | } |
455 | |
456 | // We use this callee to indicate how to unwind past these types of frames: |
457 | // 1. We need to know where to get callee saves. |
458 | // 2. We need to know to restore the previous wasm context. |
459 | if (!m_jsToWasmICCallee) |
460 | m_jsToWasmICCallee.set(vm, this, JSToWasmICCallee::create(vm, globalObject(), this)); |
461 | jit.storePtr(CCallHelpers::TrustedImmPtr(m_jsToWasmICCallee.get()), CCallHelpers::addressFor(CallFrameSlot::callee)); |
462 | |
463 | { |
464 | // FIXME: Currently we just do an indirect jump. But we should teach the Module |
465 | // how to repatch us: |
466 | // https://bugs.webkit.org/show_bug.cgi?id=196570 |
467 | jit.loadPtr(entrypointLoadLocation(), scratchGPR); |
468 | jit.call(scratchGPR, WasmEntryPtrTag); |
469 | } |
470 | |
471 | ASSERT(!RegisterSet::runtimeTagRegisters().contains(GPRInfo::nonPreservedNonReturnGPR)); |
472 | jit.loadPtr(CCallHelpers::Address(GPRInfo::callFrameRegister, previousInstanceOffset), GPRInfo::nonPreservedNonReturnGPR); |
473 | if (Wasm::Context::useFastTLS()) |
474 | jit.storeWasmContextInstance(GPRInfo::nonPreservedNonReturnGPR); |
475 | else |
476 | jit.storePtr(GPRInfo::nonPreservedNonReturnGPR, vm.wasmContext.pointerToInstance()); |
477 | |
478 | switch (signature.returnType()) { |
479 | case Wasm::Void: |
480 | jit.moveTrustedValue(jsUndefined(), JSValueRegs { GPRInfo::returnValueGPR }); |
481 | break; |
482 | case Wasm::I32: |
483 | jit.zeroExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR); |
484 | jit.boxInt32(GPRInfo::returnValueGPR, JSValueRegs { GPRInfo::returnValueGPR }); |
485 | break; |
486 | case Wasm::F32: |
487 | jit.convertFloatToDouble(FPRInfo::returnValueFPR, FPRInfo::returnValueFPR); |
488 | FALLTHROUGH; |
489 | case Wasm::F64: { |
490 | jit.moveTrustedValue(jsNumber(pureNaN()), JSValueRegs { GPRInfo::returnValueGPR }); |
491 | auto isNaN = jit.branchIfNaN(FPRInfo::returnValueFPR); |
492 | jit.boxDouble(FPRInfo::returnValueFPR, JSValueRegs { GPRInfo::returnValueGPR }); |
493 | isNaN.link(&jit); |
494 | break; |
495 | } |
496 | case Wasm::Funcref: |
497 | case Wasm::Anyref: |
498 | break; |
499 | case Wasm::I64: |
500 | case Wasm::Func: |
501 | return nullptr; |
502 | default: |
503 | break; |
504 | } |
505 | |
506 | auto emitRestoreCalleeSaves = [&] { |
507 | for (const RegisterAtOffset& regAtOffset : registersToSpill) { |
508 | GPRReg reg = regAtOffset.reg().gpr(); |
509 | ASSERT(reg != GPRInfo::returnValueGPR); |
510 | ptrdiff_t offset = regAtOffset.offset(); |
511 | jit.loadPtr(CCallHelpers::Address(GPRInfo::callFrameRegister, offset), reg); |
512 | } |
513 | }; |
514 | |
515 | emitRestoreCalleeSaves(); |
516 | |
517 | jit.emitFunctionEpilogue(); |
518 | jit.ret(); |
519 | |
520 | slowPath.link(&jit); |
521 | emitRestoreCalleeSaves(); |
522 | jit.move(CCallHelpers::TrustedImmPtr(this), GPRInfo::regT0); |
523 | jit.emitFunctionEpilogue(); |
524 | #if CPU(ARM64E) |
525 | jit.untagReturnAddress(); |
526 | #endif |
527 | auto jumpToHostCallThunk = jit.jump(); |
528 | |
529 | LinkBuffer linkBuffer(jit, nullptr, JITCompilationCanFail); |
530 | if (UNLIKELY(linkBuffer.didFailToAllocate())) |
531 | return nullptr; |
532 | |
533 | linkBuffer.link(jumpToHostCallThunk, CodeLocationLabel<JSEntryPtrTag>(executable()->entrypointFor(CodeForCall, MustCheckArity).executableAddress())); |
534 | m_jsCallEntrypoint = FINALIZE_CODE(linkBuffer, WasmEntryPtrTag, "JS->Wasm IC" ); |
535 | return m_jsCallEntrypoint.code(); |
536 | } |
537 | |
538 | WebAssemblyFunction* WebAssemblyFunction::create(VM& vm, JSGlobalObject* globalObject, Structure* structure, unsigned length, const String& name, JSWebAssemblyInstance* instance, Wasm::Callee& jsEntrypoint, Wasm::WasmToWasmImportableFunction::LoadLocation wasmToWasmEntrypointLoadLocation, Wasm::SignatureIndex signatureIndex) |
539 | { |
540 | NativeExecutable* executable = vm.getHostFunction(callWebAssemblyFunction, NoIntrinsic, callHostFunctionAsConstructor, nullptr, name); |
541 | WebAssemblyFunction* function = new (NotNull, allocateCell<WebAssemblyFunction>(vm.heap)) WebAssemblyFunction(vm, globalObject, structure, jsEntrypoint, wasmToWasmEntrypointLoadLocation, signatureIndex); |
542 | function->finishCreation(vm, executable, length, name, instance); |
543 | ASSERT_WITH_MESSAGE(!function->isLargeAllocation(), "WebAssemblyFunction should be allocated not in large allocation since it is JSCallee." ); |
544 | return function; |
545 | } |
546 | |
547 | Structure* WebAssemblyFunction::createStructure(VM& vm, JSGlobalObject* globalObject, JSValue prototype) |
548 | { |
549 | ASSERT(globalObject); |
550 | return Structure::create(vm, globalObject, prototype, TypeInfo(JSFunctionType, StructureFlags), info()); |
551 | } |
552 | |
553 | WebAssemblyFunction::WebAssemblyFunction(VM& vm, JSGlobalObject* globalObject, Structure* structure, Wasm::Callee& jsEntrypoint, Wasm::WasmToWasmImportableFunction::LoadLocation wasmToWasmEntrypointLoadLocation, Wasm::SignatureIndex signatureIndex) |
554 | : Base { vm, globalObject, structure } |
555 | , m_jsEntrypoint { jsEntrypoint.entrypoint() } |
556 | , m_importableFunction { signatureIndex, wasmToWasmEntrypointLoadLocation } |
557 | { } |
558 | |
559 | void WebAssemblyFunction::visitChildren(JSCell* cell, SlotVisitor& visitor) |
560 | { |
561 | WebAssemblyFunction* thisObject = jsCast<WebAssemblyFunction*>(cell); |
562 | ASSERT_GC_OBJECT_INHERITS(thisObject, info()); |
563 | |
564 | Base::visitChildren(thisObject, visitor); |
565 | visitor.append(thisObject->m_jsToWasmICCallee); |
566 | } |
567 | |
568 | void WebAssemblyFunction::destroy(JSCell* cell) |
569 | { |
570 | static_cast<WebAssemblyFunction*>(cell)->WebAssemblyFunction::~WebAssemblyFunction(); |
571 | } |
572 | |
573 | } // namespace JSC |
574 | |
575 | #endif // ENABLE(WEBASSEMBLY) |
576 | |