1/*
2 * Copyright (C) 2016-2019 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "WasmToJS.h"
28
29#if ENABLE(WEBASSEMBLY)
30
31#include "CCallHelpers.h"
32#include "FrameTracers.h"
33#include "IteratorOperations.h"
34#include "JITExceptions.h"
35#include "JSCInlines.h"
36#include "JSWebAssemblyHelpers.h"
37#include "JSWebAssemblyInstance.h"
38#include "JSWebAssemblyRuntimeError.h"
39#include "LinkBuffer.h"
40#include "NativeErrorConstructor.h"
41#include "ThunkGenerators.h"
42#include "WasmCallingConvention.h"
43#include "WasmContextInlines.h"
44#include "WasmExceptionType.h"
45#include "WasmInstance.h"
46#include "WasmOperations.h"
47#include "WasmSignatureInlines.h"
48
49#include <wtf/FunctionTraits.h>
50
51
52namespace JSC { namespace Wasm {
53
54using JIT = CCallHelpers;
55
56static void materializeImportJSCell(JIT& jit, unsigned importIndex, GPRReg result)
57{
58 // We're calling out of the current WebAssembly.Instance. That Instance has a list of all its import functions.
59 jit.loadWasmContextInstance(result);
60 jit.loadPtr(JIT::Address(result, Instance::offsetOfImportFunction(importIndex)), result);
61}
62
63static Expected<MacroAssemblerCodeRef<WasmEntryPtrTag>, BindingFailure> handleBadI64Use(VM& vm, JIT& jit, unsigned importIndex)
64{
65 jit.copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm.topEntryFrame);
66 jit.loadWasmContextInstance(GPRInfo::argumentGPR0);
67
68 // Store Callee.
69 jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR0, Instance::offsetOfOwner()), GPRInfo::argumentGPR0);
70 jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR0, JSWebAssemblyInstance::offsetOfCallee()), GPRInfo::argumentGPR1);
71 jit.prepareCallOperation(vm);
72 jit.storePtr(GPRInfo::argumentGPR1, JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
73
74 auto call = jit.call(OperationPtrTag);
75 jit.jumpToExceptionHandler(vm);
76
77 LinkBuffer linkBuffer(jit, GLOBAL_THUNK_ID, JITCompilationCanFail);
78 if (UNLIKELY(linkBuffer.didFailToAllocate()))
79 return makeUnexpected(BindingFailure::OutOfMemory);
80
81 linkBuffer.link(call, FunctionPtr<OperationPtrTag>(operationWasmThrowBadI64));
82 return FINALIZE_WASM_CODE(linkBuffer, WasmEntryPtrTag, "WebAssembly->JavaScript invalid i64 use in import[%i]", importIndex);
83}
84
85Expected<MacroAssemblerCodeRef<WasmEntryPtrTag>, BindingFailure> wasmToJS(VM& vm, Bag<CallLinkInfo>& callLinkInfos, SignatureIndex signatureIndex, unsigned importIndex)
86{
87 // FIXME: This function doesn't properly abstract away the calling convention.
88 // It'd be super easy to do so: https://bugs.webkit.org/show_bug.cgi?id=169401
89 const auto& wasmCC = wasmCallingConvention();
90 const auto& jsCC = jsCallingConvention();
91 const Signature& signature = SignatureInformation::get(signatureIndex);
92 unsigned argCount = signature.argumentCount();
93 JIT jit;
94
95 CallInformation wasmCallInfo = wasmCC.callInformationFor(signature, CallRole::Callee);
96 RegisterAtOffsetList savedResultRegisters = wasmCallInfo.computeResultsOffsetList();
97
98 // Note: WasmB3IRGenerator assumes that this stub treats SP as a callee save.
99 // If we ever change this, we will also need to change WasmB3IRGenerator.
100
101 // Below, we assume that the JS calling convention is always on the stack.
102 ASSERT(!jsCC.gprArgs.size());
103 ASSERT(!jsCC.fprArgs.size());
104
105 jit.emitFunctionPrologue();
106 jit.store64(JIT::TrustedImm32(0), JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::codeBlock * static_cast<int>(sizeof(Register)))); // FIXME Stop using 0 as codeBlocks. https://bugs.webkit.org/show_bug.cgi?id=165321
107
108 if (wasmCallInfo.argumentsIncludeI64 || wasmCallInfo.resultsIncludeI64)
109 return handleBadI64Use(vm, jit, importIndex);
110
111 // Here we assume that the JS calling convention saves at least all the wasm callee saved. We therefore don't need to save and restore more registers since the wasm callee already took care of this.
112 RegisterSet missingCalleeSaves = wasmCC.calleeSaveRegisters;
113 missingCalleeSaves.exclude(jsCC.calleeSaveRegisters);
114 ASSERT(missingCalleeSaves.isEmpty());
115
116 // Note: We don't need to perform a stack check here since WasmB3IRGenerator
117 // will do the stack check for us. Whenever it detects that it might make
118 // a call to this thunk, it'll make sure its stack check includes space
119 // for us here.
120
121 const unsigned numberOfParameters = argCount + 1; // There is a "this" argument.
122 const unsigned numberOfRegsForCall = CallFrame::headerSizeInRegisters + numberOfParameters;
123 const unsigned numberOfBytesForCall = numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
124 const unsigned stackOffset = WTF::roundUpToMultipleOf(stackAlignmentBytes(), std::max<unsigned>(numberOfBytesForCall, savedResultRegisters.size() * sizeof(CPURegister)));
125 jit.subPtr(MacroAssembler::TrustedImm32(stackOffset), MacroAssembler::stackPointerRegister);
126 JIT::Address calleeFrame = CCallHelpers::Address(MacroAssembler::stackPointerRegister, -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
127
128 // FIXME make these loops which switch on Signature if there are many arguments on the stack. It'll otherwise be huge for huge signatures. https://bugs.webkit.org/show_bug.cgi?id=165547
129
130 // First go through the integer parameters, freeing up their register for use afterwards.
131 {
132 unsigned marshalledGPRs = 0;
133 unsigned marshalledFPRs = 0;
134 unsigned calleeFrameOffset = CallFrameSlot::firstArgument * static_cast<int>(sizeof(Register));
135 unsigned frOffset = CallFrame::headerSizeInRegisters * static_cast<int>(sizeof(Register));
136 for (unsigned argNum = 0; argNum < argCount; ++argNum) {
137 Type argType = signature.argument(argNum);
138 switch (argType) {
139 case Void:
140 case Func:
141 case I64:
142 RELEASE_ASSERT_NOT_REACHED(); // Handled above.
143 case Anyref:
144 case Funcref:
145 case I32: {
146 GPRReg gprReg;
147 if (marshalledGPRs < wasmCC.gprArgs.size())
148 gprReg = wasmCC.gprArgs[marshalledGPRs].gpr();
149 else {
150 // We've already spilled all arguments, these registers are available as scratch.
151 gprReg = GPRInfo::argumentGPR0;
152 jit.load64(JIT::Address(GPRInfo::callFrameRegister, frOffset), gprReg);
153 frOffset += sizeof(Register);
154 }
155 ++marshalledGPRs;
156 if (argType == I32) {
157 jit.zeroExtend32ToPtr(gprReg, gprReg); // Clear non-int32 and non-tag bits.
158 jit.boxInt32(gprReg, JSValueRegs(gprReg), DoNotHaveTagRegisters);
159 }
160 jit.store64(gprReg, calleeFrame.withOffset(calleeFrameOffset));
161 calleeFrameOffset += sizeof(Register);
162 break;
163 }
164 case F32:
165 case F64:
166 // Skipped: handled below.
167 if (marshalledFPRs >= wasmCC.fprArgs.size())
168 frOffset += sizeof(Register);
169 ++marshalledFPRs;
170 calleeFrameOffset += sizeof(Register);
171 break;
172 }
173 }
174 }
175
176 {
177 // Integer registers have already been spilled, these are now available.
178 GPRReg doubleEncodeOffsetGPRReg = GPRInfo::argumentGPR0;
179 GPRReg scratch = GPRInfo::argumentGPR1;
180 bool hasMaterializedDoubleEncodeOffset = false;
181 auto materializeDoubleEncodeOffset = [&hasMaterializedDoubleEncodeOffset, &jit] (GPRReg dest) {
182 if (!hasMaterializedDoubleEncodeOffset) {
183#if CPU(ARM64)
184 jit.move(JIT::TrustedImm64(JSValue::DoubleEncodeOffset), dest);
185#else
186 jit.move(JIT::TrustedImm32(1), dest);
187 jit.lshift64(JIT::TrustedImm32(JSValue::DoubleEncodeOffsetBit), dest);
188#endif
189 hasMaterializedDoubleEncodeOffset = true;
190 }
191 };
192
193 unsigned marshalledGPRs = 0;
194 unsigned marshalledFPRs = 0;
195 unsigned calleeFrameOffset = CallFrameSlot::firstArgument * static_cast<int>(sizeof(Register));
196 unsigned frOffset = CallFrame::headerSizeInRegisters * static_cast<int>(sizeof(Register));
197
198 auto marshallFPR = [&] (FPRReg fprReg) {
199 jit.purifyNaN(fprReg);
200 jit.moveDoubleTo64(fprReg, scratch);
201 materializeDoubleEncodeOffset(doubleEncodeOffsetGPRReg);
202 jit.add64(doubleEncodeOffsetGPRReg, scratch);
203 jit.store64(scratch, calleeFrame.withOffset(calleeFrameOffset));
204 calleeFrameOffset += sizeof(Register);
205 ++marshalledFPRs;
206 };
207
208 for (unsigned argNum = 0; argNum < argCount; ++argNum) {
209 Type argType = signature.argument(argNum);
210 switch (argType) {
211 case Void:
212 case Func:
213 case I64:
214 RELEASE_ASSERT_NOT_REACHED(); // Handled above.
215 case Anyref:
216 case Funcref:
217 case I32:
218 // Skipped: handled above.
219 if (marshalledGPRs >= wasmCC.gprArgs.size())
220 frOffset += sizeof(Register);
221 ++marshalledGPRs;
222 calleeFrameOffset += sizeof(Register);
223 break;
224 case F32: {
225 FPRReg fprReg;
226 if (marshalledFPRs < wasmCC.fprArgs.size())
227 fprReg = wasmCC.fprArgs[marshalledFPRs].fpr();
228 else {
229 // We've already spilled all arguments, these registers are available as scratch.
230 fprReg = FPRInfo::argumentFPR0;
231 jit.loadFloat(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
232 frOffset += sizeof(Register);
233 }
234 jit.convertFloatToDouble(fprReg, fprReg);
235 marshallFPR(fprReg);
236 break;
237 }
238 case F64: {
239 FPRReg fprReg;
240 if (marshalledFPRs < wasmCC.fprArgs.size())
241 fprReg = wasmCC.fprArgs[marshalledFPRs].fpr();
242 else {
243 // We've already spilled all arguments, these registers are available as scratch.
244 fprReg = FPRInfo::argumentFPR0;
245 jit.loadDouble(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
246 frOffset += sizeof(Register);
247 }
248 marshallFPR(fprReg);
249 break;
250 }
251 }
252 }
253 }
254
255 jit.loadWasmContextInstance(GPRInfo::argumentGPR0);
256 jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR0, Instance::offsetOfOwner()), GPRInfo::argumentGPR0);
257 jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR0, JSWebAssemblyInstance::offsetOfCallee()), GPRInfo::argumentGPR0);
258 jit.storePtr(GPRInfo::argumentGPR0, JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
259
260 GPRReg importJSCellGPRReg = GPRInfo::regT0; // Callee needs to be in regT0 for slow path below.
261
262 ASSERT(!wasmCC.calleeSaveRegisters.get(importJSCellGPRReg));
263 materializeImportJSCell(jit, importIndex, importJSCellGPRReg);
264
265 jit.store64(importJSCellGPRReg, calleeFrame.withOffset(CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
266 jit.store32(JIT::TrustedImm32(numberOfParameters), calleeFrame.withOffset(CallFrameSlot::argumentCount * static_cast<int>(sizeof(Register)) + PayloadOffset));
267 jit.store64(JIT::TrustedImm64(JSValue::ValueUndefined), calleeFrame.withOffset(CallFrameSlot::thisArgument * static_cast<int>(sizeof(Register))));
268
269 // FIXME Tail call if the wasm return type is void and no registers were spilled. https://bugs.webkit.org/show_bug.cgi?id=165488
270
271 CallLinkInfo* callLinkInfo = callLinkInfos.add();
272 callLinkInfo->setUpCall(CallLinkInfo::Call, CodeOrigin(), importJSCellGPRReg);
273 JIT::DataLabelPtr targetToCheck;
274 JIT::TrustedImmPtr initialRightValue(nullptr);
275 JIT::Jump slowPath = jit.branchPtrWithPatch(MacroAssembler::NotEqual, importJSCellGPRReg, targetToCheck, initialRightValue);
276 JIT::Call fastCall = jit.nearCall();
277 JIT::Jump done = jit.jump();
278 slowPath.link(&jit);
279 // Callee needs to be in regT0 here.
280 jit.move(MacroAssembler::TrustedImmPtr(callLinkInfo), GPRInfo::regT2); // Link info needs to be in regT2.
281 jit.loadWasmContextInstance(GPRInfo::regT3);
282 jit.loadPtr(CCallHelpers::Address(GPRInfo::regT3, Instance::offsetOfOwner()), GPRInfo::regT3);
283 jit.loadPtr(CCallHelpers::Address(GPRInfo::regT3, JSWebAssemblyInstance::offsetOfGlobalObject()), GPRInfo::regT3);
284 JIT::Call slowCall = jit.nearCall();
285 done.link(&jit);
286
287 CCallHelpers::JumpList exceptionChecks;
288
289 if (signature.returnCount() == 1) {
290 switch (signature.returnType(0)) {
291 case Void:
292 case Func:
293 // For the JavaScript embedding, imports with these types in their signature return are a WebAssembly.Module validation error.
294 RELEASE_ASSERT_NOT_REACHED();
295 break;
296 case I64: {
297 RELEASE_ASSERT_NOT_REACHED(); // Handled above.
298 }
299 case I32: {
300 CCallHelpers::JumpList done;
301 CCallHelpers::JumpList slowPath;
302 GPRReg dest = wasmCallInfo.results[0].gpr();
303
304 slowPath.append(jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters));
305 slowPath.append(jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters));
306 jit.zeroExtend32ToPtr(GPRInfo::returnValueGPR, dest);
307 done.append(jit.jump());
308
309 slowPath.link(&jit);
310 jit.setupArguments<decltype(operationConvertToI32)>(GPRInfo::returnValueGPR);
311 auto call = jit.call(OperationPtrTag);
312 exceptionChecks.append(jit.emitJumpIfException(vm));
313 jit.move(GPRInfo::returnValueGPR, dest);
314
315 jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
316 linkBuffer.link(call, FunctionPtr<OperationPtrTag>(operationConvertToI32));
317 });
318
319 done.link(&jit);
320 break;
321 }
322 case Funcref:
323 case Anyref:
324 jit.move(GPRInfo::returnValueGPR, wasmCallInfo.results[0].gpr());
325 break;
326 case F32: {
327 CCallHelpers::JumpList done;
328 FPRReg dest = wasmCallInfo.results[0].fpr();
329
330 auto notANumber = jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters);
331 auto isDouble = jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters);
332 // We're an int32
333 jit.signExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
334 jit.convertInt64ToFloat(GPRInfo::returnValueGPR, dest);
335 done.append(jit.jump());
336
337 isDouble.link(&jit);
338 jit.move(JIT::TrustedImm64(JSValue::NumberTag), GPRInfo::returnValueGPR2);
339 jit.add64(GPRInfo::returnValueGPR2, GPRInfo::returnValueGPR);
340 jit.move64ToDouble(GPRInfo::returnValueGPR, dest);
341 jit.convertDoubleToFloat(dest, dest);
342 done.append(jit.jump());
343
344 notANumber.link(&jit);
345 jit.setupArguments<decltype(operationConvertToF32)>(GPRInfo::returnValueGPR);
346 auto call = jit.call(OperationPtrTag);
347 exceptionChecks.append(jit.emitJumpIfException(vm));
348 jit.move(FPRInfo::returnValueFPR , dest);
349
350 jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
351 linkBuffer.link(call, FunctionPtr<OperationPtrTag>(operationConvertToF32));
352 });
353
354 done.link(&jit);
355 break;
356 }
357 case F64: {
358 CCallHelpers::JumpList done;
359 FPRReg dest = wasmCallInfo.results[0].fpr();
360
361 auto notANumber = jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters);
362 auto isDouble = jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters);
363 // We're an int32
364 jit.signExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
365 jit.convertInt64ToDouble(GPRInfo::returnValueGPR, dest);
366 done.append(jit.jump());
367
368 isDouble.link(&jit);
369 jit.move(JIT::TrustedImm64(JSValue::NumberTag), GPRInfo::returnValueGPR2);
370 jit.add64(GPRInfo::returnValueGPR2, GPRInfo::returnValueGPR);
371 jit.move64ToDouble(GPRInfo::returnValueGPR, dest);
372 done.append(jit.jump());
373
374 notANumber.link(&jit);
375 jit.setupArguments<decltype(operationConvertToF64)>(GPRInfo::returnValueGPR);
376 auto call = jit.call(OperationPtrTag);
377 exceptionChecks.append(jit.emitJumpIfException(vm));
378 jit.move(FPRInfo::returnValueFPR, dest);
379
380 jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
381 linkBuffer.link(call, FunctionPtr<OperationPtrTag>(operationConvertToF64));
382 });
383
384 done.link(&jit);
385 break;
386 }
387 }
388 } else if (signature.returnCount() > 1) {
389 GPRReg wasmContextInstanceGPR = PinnedRegisterInfo::get().wasmContextInstancePointer;
390 if (Context::useFastTLS()) {
391 wasmContextInstanceGPR = GPRInfo::argumentGPR1;
392 static_assert(std::is_same_v<Wasm::Instance*, typename FunctionTraits<decltype(operationIterateResults)>::ArgumentType<1>>, "Instance should be the second parameter.");
393 jit.loadWasmContextInstance(wasmContextInstanceGPR);
394 }
395
396 jit.setupArguments<decltype(operationIterateResults)>(wasmContextInstanceGPR, &signature, GPRInfo::returnValueGPR, CCallHelpers::stackPointerRegister, CCallHelpers::framePointerRegister);
397 jit.callOperation(FunctionPtr<OperationPtrTag>(operationIterateResults));
398 exceptionChecks.append(jit.emitJumpIfException(vm));
399
400 for (RegisterAtOffset location : savedResultRegisters)
401 jit.load64ToReg(CCallHelpers::Address(CCallHelpers::stackPointerRegister, location.offset()), location.reg());
402 }
403
404 jit.emitFunctionEpilogue();
405 jit.ret();
406
407 if (!exceptionChecks.empty()) {
408 exceptionChecks.link(&jit);
409 jit.copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm.topEntryFrame);
410 jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
411 auto call = jit.call(OperationPtrTag);
412 jit.jumpToExceptionHandler(vm);
413
414 jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
415 linkBuffer.link(call, FunctionPtr<OperationPtrTag>(operationWasmUnwind));
416 });
417 }
418
419 LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID, JITCompilationCanFail);
420 if (UNLIKELY(patchBuffer.didFailToAllocate()))
421 return makeUnexpected(BindingFailure::OutOfMemory);
422
423 patchBuffer.link(slowCall, FunctionPtr<JITThunkPtrTag>(vm.getCTIStub(linkCallThunkGenerator).code()));
424 CodeLocationLabel<JSInternalPtrTag> callReturnLocation(patchBuffer.locationOfNearCall<JSInternalPtrTag>(slowCall));
425 CodeLocationLabel<JSInternalPtrTag> hotPathBegin(patchBuffer.locationOf<JSInternalPtrTag>(targetToCheck));
426 CodeLocationNearCall<JSInternalPtrTag> hotPathOther = patchBuffer.locationOfNearCall<JSInternalPtrTag>(fastCall);
427 callLinkInfo->setCallLocations(callReturnLocation, hotPathBegin, hotPathOther);
428
429 return FINALIZE_WASM_CODE(patchBuffer, WasmEntryPtrTag, "WebAssembly->JavaScript import[%i] %s", importIndex, signature.toString().ascii().data());
430}
431
432void emitThrowWasmToJSException(CCallHelpers& jit, GPRReg wasmInstance, Wasm::ExceptionType type)
433{
434 ASSERT(wasmInstance != GPRInfo::argumentGPR0);
435 jit.loadPtr(CCallHelpers::Address(wasmInstance, Wasm::Instance::offsetOfPointerToTopEntryFrame()), GPRInfo::argumentGPR0);
436 jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR0), GPRInfo::argumentGPR0);
437 jit.copyCalleeSavesToEntryFrameCalleeSavesBuffer(GPRInfo::argumentGPR0);
438 jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
439 jit.move(CCallHelpers::TrustedImm32(static_cast<int32_t>(type)), GPRInfo::argumentGPR1);
440
441 CCallHelpers::Call call = jit.call(OperationPtrTag);
442
443 jit.farJump(GPRInfo::returnValueGPR, ExceptionHandlerPtrTag);
444 jit.breakpoint(); // We should not reach this.
445
446 jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
447 linkBuffer.link(call, FunctionPtr<OperationPtrTag>(Wasm::operationWasmToJSException));
448 });
449}
450
451} } // namespace JSC::Wasm
452
453#endif // ENABLE(WEBASSEMBLY)
454