1 | /* |
2 | * Copyright (C) 2015-2019 Apple Inc. All rights reserved. |
3 | * |
4 | * Redistribution and use in source and binary forms, with or without |
5 | * modification, are permitted provided that the following conditions |
6 | * are met: |
7 | * 1. Redistributions of source code must retain the above copyright |
8 | * notice, this list of conditions and the following disclaimer. |
9 | * 2. Redistributions in binary form must reproduce the above copyright |
10 | * notice, this list of conditions and the following disclaimer in the |
11 | * documentation and/or other materials provided with the distribution. |
12 | * |
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY |
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR |
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, |
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR |
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY |
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
24 | */ |
25 | |
26 | #pragma once |
27 | |
28 | #if ENABLE(B3_JIT) |
29 | |
30 | #include "AirTmp.h" |
31 | #include "B3Bank.h" |
32 | #include "B3Common.h" |
33 | #include "B3Type.h" |
34 | #include "B3Value.h" |
35 | #include "B3Width.h" |
36 | #include <wtf/Optional.h> |
37 | |
38 | #if ASSERT_DISABLED |
39 | IGNORE_RETURN_TYPE_WARNINGS_BEGIN |
40 | #endif |
41 | |
42 | namespace JSC { namespace B3 { |
43 | |
44 | class Value; |
45 | |
46 | namespace Air { |
47 | |
48 | class Special; |
49 | class StackSlot; |
50 | |
51 | // This class name is also intentionally terse because we will say it a lot. You'll see code like |
52 | // Inst(..., Arg::imm(5), Arg::addr(thing, blah), ...) |
53 | class Arg { |
54 | public: |
55 | // These enum members are intentionally terse because we have to mention them a lot. |
56 | enum Kind : int8_t { |
57 | Invalid, |
58 | |
59 | // This is either an unassigned temporary or a register. All unassigned temporaries |
60 | // eventually become registers. |
61 | Tmp, |
62 | |
63 | // This is an immediate that the instruction will materialize. Imm is the immediate that can be |
64 | // inlined into most instructions, while BigImm indicates a constant materialization and is |
65 | // usually only usable with Move. Specials may also admit it, for example for stackmaps used for |
66 | // OSR exit and tail calls. |
67 | // BitImm is an immediate for Bitwise operation (And, Xor, etc). |
68 | Imm, |
69 | BigImm, |
70 | BitImm, |
71 | BitImm64, |
72 | |
73 | // These are the addresses. Instructions may load from (Use), store to (Def), or evaluate |
74 | // (UseAddr) addresses. |
75 | SimpleAddr, |
76 | Addr, |
77 | ExtendedOffsetAddr, |
78 | Stack, |
79 | CallArg, |
80 | Index, |
81 | |
82 | // Immediate operands that customize the behavior of an operation. You can think of them as |
83 | // secondary opcodes. They are always "Use"'d. |
84 | RelCond, |
85 | ResCond, |
86 | DoubleCond, |
87 | StatusCond, |
88 | Special, |
89 | WidthArg |
90 | }; |
91 | |
92 | enum Temperature : int8_t { |
93 | Cold, |
94 | Warm |
95 | }; |
96 | |
97 | enum Phase : int8_t { |
98 | Early, |
99 | Late |
100 | }; |
101 | |
102 | enum Timing : int8_t { |
103 | OnlyEarly, |
104 | OnlyLate, |
105 | EarlyAndLate |
106 | }; |
107 | |
108 | enum Role : int8_t { |
109 | // Use means that the Inst will read from this value before doing anything else. |
110 | // |
111 | // For Tmp: The Inst will read this Tmp. |
112 | // For Arg::addr and friends: The Inst will load from this address. |
113 | // For Arg::imm and friends: The Inst will materialize and use this immediate. |
114 | // For RelCond/ResCond/Special: This is the only valid role for these kinds. |
115 | // |
116 | // Note that Use of an address does not mean escape. It only means that the instruction will |
117 | // load from the address before doing anything else. This is a bit tricky; for example |
118 | // Specials could theoretically squirrel away the address and effectively escape it. However, |
119 | // this is not legal. On the other hand, any address other than Stack is presumed to be |
120 | // always escaping, and Stack is presumed to be always escaping if it's Locked. |
121 | Use, |
122 | |
123 | // Exactly like Use, except that it also implies that the use is cold: that is, replacing the |
124 | // use with something on the stack is free. |
125 | ColdUse, |
126 | |
127 | // LateUse means that the Inst will read from this value after doing its Def's. Note that LateUse |
128 | // on an Addr or Index still means Use on the internal temporaries. Note that specifying the |
129 | // same Tmp once as Def and once as LateUse has undefined behavior: the use may happen before |
130 | // the def, or it may happen after it. |
131 | LateUse, |
132 | |
133 | // Combination of LateUse and ColdUse. |
134 | LateColdUse, |
135 | |
136 | // Def means that the Inst will write to this value after doing everything else. |
137 | // |
138 | // For Tmp: The Inst will write to this Tmp. |
139 | // For Arg::addr and friends: The Inst will store to this address. |
140 | // This isn't valid for any other kinds. |
141 | // |
142 | // Like Use of address, Def of address does not mean escape. |
143 | Def, |
144 | |
145 | // This is a special variant of Def that implies that the upper bits of the target register are |
146 | // zero-filled. Specifically, if the Width of a ZDef is less than the largest possible width of |
147 | // the argument (for example, we're on a 64-bit machine and we have a Width32 ZDef of a GPR) then |
148 | // this has different implications for the upper bits (i.e. the top 32 bits in our example) |
149 | // depending on the kind of the argument: |
150 | // |
151 | // For register: the upper bits are zero-filled. |
152 | // For anonymous stack slot: the upper bits are zero-filled. |
153 | // For address: the upper bits are not touched (i.e. we do a 32-bit store in our example). |
154 | // For tmp: either the upper bits are not touched or they are zero-filled, and we won't know |
155 | // which until we lower the tmp to either a StackSlot or a Reg. |
156 | // |
157 | // The behavior of ZDef is consistent with what happens when you perform 32-bit operations on a |
158 | // 64-bit GPR. It's not consistent with what happens with 8-bit or 16-bit Defs on x86 GPRs, or |
159 | // what happens with float Defs in ARM NEON or X86 SSE. Hence why we have both Def and ZDef. |
160 | ZDef, |
161 | |
162 | // This is a combined Use and Def. It means that both things happen. |
163 | UseDef, |
164 | |
165 | // This is a combined Use and ZDef. It means that both things happen. |
166 | UseZDef, |
167 | |
168 | // This is like Def, but implies that the assignment occurs before the start of the Inst's |
169 | // execution rather than after. Note that specifying the same Tmp once as EarlyDef and once |
170 | // as Use has undefined behavior: the use may happen before the def, or it may happen after |
171 | // it. |
172 | EarlyDef, |
173 | |
174 | EarlyZDef, |
175 | |
176 | // Some instructions need a scratch register. We model this by saying that the temporary is |
177 | // defined early and used late. This role implies that. |
178 | Scratch, |
179 | |
180 | // This is a special kind of use that is only valid for addresses. It means that the |
181 | // instruction will evaluate the address expression and consume the effective address, but it |
182 | // will neither load nor store. This is an escaping use, because now the address may be |
183 | // passed along to who-knows-where. Note that this isn't really a Use of the Arg, but it does |
184 | // imply that we're Use'ing any registers that the Arg contains. |
185 | UseAddr |
186 | }; |
187 | |
188 | enum Signedness : int8_t { |
189 | Signed, |
190 | Unsigned |
191 | }; |
192 | |
193 | // Returns true if the Role implies that the Inst will Use the Arg. It's deliberately false for |
194 | // UseAddr, since isAnyUse() for an Arg::addr means that we are loading from the address. |
195 | static bool isAnyUse(Role role) |
196 | { |
197 | switch (role) { |
198 | case Use: |
199 | case ColdUse: |
200 | case UseDef: |
201 | case UseZDef: |
202 | case LateUse: |
203 | case LateColdUse: |
204 | case Scratch: |
205 | return true; |
206 | case Def: |
207 | case ZDef: |
208 | case UseAddr: |
209 | case EarlyDef: |
210 | case EarlyZDef: |
211 | return false; |
212 | } |
213 | ASSERT_NOT_REACHED(); |
214 | } |
215 | |
216 | static bool isColdUse(Role role) |
217 | { |
218 | switch (role) { |
219 | case ColdUse: |
220 | case LateColdUse: |
221 | return true; |
222 | case Use: |
223 | case UseDef: |
224 | case UseZDef: |
225 | case LateUse: |
226 | case Def: |
227 | case ZDef: |
228 | case UseAddr: |
229 | case Scratch: |
230 | case EarlyDef: |
231 | case EarlyZDef: |
232 | return false; |
233 | } |
234 | ASSERT_NOT_REACHED(); |
235 | } |
236 | |
237 | static bool isWarmUse(Role role) |
238 | { |
239 | return isAnyUse(role) && !isColdUse(role); |
240 | } |
241 | |
242 | static Role cooled(Role role) |
243 | { |
244 | switch (role) { |
245 | case ColdUse: |
246 | case LateColdUse: |
247 | case UseDef: |
248 | case UseZDef: |
249 | case Def: |
250 | case ZDef: |
251 | case UseAddr: |
252 | case Scratch: |
253 | case EarlyDef: |
254 | case EarlyZDef: |
255 | return role; |
256 | case Use: |
257 | return ColdUse; |
258 | case LateUse: |
259 | return LateColdUse; |
260 | } |
261 | ASSERT_NOT_REACHED(); |
262 | } |
263 | |
264 | static Temperature temperature(Role role) |
265 | { |
266 | return isColdUse(role) ? Cold : Warm; |
267 | } |
268 | |
269 | static bool activeAt(Role role, Phase phase) |
270 | { |
271 | switch (role) { |
272 | case Use: |
273 | case ColdUse: |
274 | case EarlyDef: |
275 | case EarlyZDef: |
276 | case UseAddr: |
277 | return phase == Early; |
278 | case LateUse: |
279 | case LateColdUse: |
280 | case Def: |
281 | case ZDef: |
282 | return phase == Late; |
283 | case UseDef: |
284 | case UseZDef: |
285 | case Scratch: |
286 | return true; |
287 | } |
288 | ASSERT_NOT_REACHED(); |
289 | } |
290 | |
291 | static bool activeAt(Timing timing, Phase phase) |
292 | { |
293 | switch (timing) { |
294 | case OnlyEarly: |
295 | return phase == Early; |
296 | case OnlyLate: |
297 | return phase == Late; |
298 | case EarlyAndLate: |
299 | return true; |
300 | } |
301 | ASSERT_NOT_REACHED(); |
302 | } |
303 | |
304 | static Timing timing(Role role) |
305 | { |
306 | switch (role) { |
307 | case Use: |
308 | case ColdUse: |
309 | case EarlyDef: |
310 | case EarlyZDef: |
311 | case UseAddr: |
312 | return OnlyEarly; |
313 | case LateUse: |
314 | case LateColdUse: |
315 | case Def: |
316 | case ZDef: |
317 | return OnlyLate; |
318 | case UseDef: |
319 | case UseZDef: |
320 | case Scratch: |
321 | return EarlyAndLate; |
322 | } |
323 | ASSERT_NOT_REACHED(); |
324 | } |
325 | |
326 | template<typename Func> |
327 | static void forEachPhase(Timing timing, const Func& func) |
328 | { |
329 | if (activeAt(timing, Early)) |
330 | func(Early); |
331 | if (activeAt(timing, Late)) |
332 | func(Late); |
333 | } |
334 | |
335 | template<typename Func> |
336 | static void forEachPhase(Role role, const Func& func) |
337 | { |
338 | if (activeAt(role, Early)) |
339 | func(Early); |
340 | if (activeAt(role, Late)) |
341 | func(Late); |
342 | } |
343 | |
344 | // Returns true if the Role implies that the Inst will Use the Arg before doing anything else. |
345 | static bool isEarlyUse(Role role) |
346 | { |
347 | switch (role) { |
348 | case Use: |
349 | case ColdUse: |
350 | case UseDef: |
351 | case UseZDef: |
352 | return true; |
353 | case Def: |
354 | case ZDef: |
355 | case UseAddr: |
356 | case LateUse: |
357 | case LateColdUse: |
358 | case Scratch: |
359 | case EarlyDef: |
360 | case EarlyZDef: |
361 | return false; |
362 | } |
363 | ASSERT_NOT_REACHED(); |
364 | } |
365 | |
366 | // Returns true if the Role implies that the Inst will Use the Arg after doing everything else. |
367 | static bool isLateUse(Role role) |
368 | { |
369 | switch (role) { |
370 | case LateUse: |
371 | case LateColdUse: |
372 | case Scratch: |
373 | return true; |
374 | case ColdUse: |
375 | case Use: |
376 | case UseDef: |
377 | case UseZDef: |
378 | case Def: |
379 | case ZDef: |
380 | case UseAddr: |
381 | case EarlyDef: |
382 | case EarlyZDef: |
383 | return false; |
384 | } |
385 | ASSERT_NOT_REACHED(); |
386 | } |
387 | |
388 | // Returns true if the Role implies that the Inst will Def the Arg. |
389 | static bool isAnyDef(Role role) |
390 | { |
391 | switch (role) { |
392 | case Use: |
393 | case ColdUse: |
394 | case UseAddr: |
395 | case LateUse: |
396 | case LateColdUse: |
397 | return false; |
398 | case Def: |
399 | case UseDef: |
400 | case ZDef: |
401 | case UseZDef: |
402 | case EarlyDef: |
403 | case EarlyZDef: |
404 | case Scratch: |
405 | return true; |
406 | } |
407 | ASSERT_NOT_REACHED(); |
408 | } |
409 | |
410 | // Returns true if the Role implies that the Inst will Def the Arg before start of execution. |
411 | static bool isEarlyDef(Role role) |
412 | { |
413 | switch (role) { |
414 | case Use: |
415 | case ColdUse: |
416 | case UseAddr: |
417 | case LateUse: |
418 | case Def: |
419 | case UseDef: |
420 | case ZDef: |
421 | case UseZDef: |
422 | case LateColdUse: |
423 | return false; |
424 | case EarlyDef: |
425 | case EarlyZDef: |
426 | case Scratch: |
427 | return true; |
428 | } |
429 | ASSERT_NOT_REACHED(); |
430 | } |
431 | |
432 | // Returns true if the Role implies that the Inst will Def the Arg after the end of execution. |
433 | static bool isLateDef(Role role) |
434 | { |
435 | switch (role) { |
436 | case Use: |
437 | case ColdUse: |
438 | case UseAddr: |
439 | case LateUse: |
440 | case EarlyDef: |
441 | case EarlyZDef: |
442 | case Scratch: |
443 | case LateColdUse: |
444 | return false; |
445 | case Def: |
446 | case UseDef: |
447 | case ZDef: |
448 | case UseZDef: |
449 | return true; |
450 | } |
451 | ASSERT_NOT_REACHED(); |
452 | } |
453 | |
454 | // Returns true if the Role implies that the Inst will ZDef the Arg. |
455 | static bool isZDef(Role role) |
456 | { |
457 | switch (role) { |
458 | case Use: |
459 | case ColdUse: |
460 | case UseAddr: |
461 | case LateUse: |
462 | case Def: |
463 | case UseDef: |
464 | case EarlyDef: |
465 | case Scratch: |
466 | case LateColdUse: |
467 | return false; |
468 | case ZDef: |
469 | case UseZDef: |
470 | case EarlyZDef: |
471 | return true; |
472 | } |
473 | ASSERT_NOT_REACHED(); |
474 | } |
475 | |
476 | Arg() |
477 | : m_kind(Invalid) |
478 | { |
479 | } |
480 | |
481 | Arg(Air::Tmp tmp) |
482 | : m_kind(Tmp) |
483 | , m_base(tmp) |
484 | { |
485 | } |
486 | |
487 | Arg(Reg reg) |
488 | : Arg(Air::Tmp(reg)) |
489 | { |
490 | } |
491 | |
492 | static Arg imm(int64_t value) |
493 | { |
494 | Arg result; |
495 | result.m_kind = Imm; |
496 | result.m_offset = value; |
497 | return result; |
498 | } |
499 | |
500 | static Arg bigImm(int64_t value) |
501 | { |
502 | Arg result; |
503 | result.m_kind = BigImm; |
504 | result.m_offset = value; |
505 | return result; |
506 | } |
507 | |
508 | static Arg bitImm(int64_t value) |
509 | { |
510 | Arg result; |
511 | result.m_kind = BitImm; |
512 | result.m_offset = value; |
513 | return result; |
514 | } |
515 | |
516 | static Arg bitImm64(int64_t value) |
517 | { |
518 | Arg result; |
519 | result.m_kind = BitImm64; |
520 | result.m_offset = value; |
521 | return result; |
522 | } |
523 | |
524 | static Arg immPtr(const void* address) |
525 | { |
526 | return bigImm(bitwise_cast<intptr_t>(address)); |
527 | } |
528 | |
529 | static Arg simpleAddr(Air::Tmp ptr) |
530 | { |
531 | ASSERT(ptr.isGP()); |
532 | Arg result; |
533 | result.m_kind = SimpleAddr; |
534 | result.m_base = ptr; |
535 | return result; |
536 | } |
537 | |
538 | template<typename Int, typename = Value::IsLegalOffset<Int>> |
539 | static Arg addr(Air::Tmp base, Int offset) |
540 | { |
541 | ASSERT(base.isGP()); |
542 | Arg result; |
543 | result.m_kind = Addr; |
544 | result.m_base = base; |
545 | result.m_offset = offset; |
546 | return result; |
547 | } |
548 | |
549 | template<typename Int, typename = Value::IsLegalOffset<Int>> |
550 | static Arg extendedOffsetAddr(Int offsetFromFP) |
551 | { |
552 | Arg result; |
553 | result.m_kind = ExtendedOffsetAddr; |
554 | result.m_base = Air::Tmp(MacroAssembler::framePointerRegister); |
555 | result.m_offset = offsetFromFP; |
556 | return result; |
557 | } |
558 | |
559 | static Arg addr(Air::Tmp base) |
560 | { |
561 | return addr(base, 0); |
562 | } |
563 | |
564 | template<typename Int, typename = Value::IsLegalOffset<Int>> |
565 | static Arg stack(StackSlot* value, Int offset) |
566 | { |
567 | Arg result; |
568 | result.m_kind = Stack; |
569 | result.m_offset = bitwise_cast<intptr_t>(value); |
570 | result.m_scale = offset; // I know, yuck. |
571 | return result; |
572 | } |
573 | |
574 | static Arg stack(StackSlot* value) |
575 | { |
576 | return stack(value, 0); |
577 | } |
578 | |
579 | template<typename Int, typename = Value::IsLegalOffset<Int>> |
580 | static Arg callArg(Int offset) |
581 | { |
582 | Arg result; |
583 | result.m_kind = CallArg; |
584 | result.m_offset = offset; |
585 | return result; |
586 | } |
587 | |
588 | // If you don't pass a Width, this optimistically assumes that you're using the right width. |
589 | static bool isValidScale(unsigned scale, Optional<Width> width = WTF::nullopt) |
590 | { |
591 | switch (scale) { |
592 | case 1: |
593 | if (isX86() || isARM64()) |
594 | return true; |
595 | return false; |
596 | case 2: |
597 | case 4: |
598 | case 8: |
599 | if (isX86()) |
600 | return true; |
601 | if (isARM64()) { |
602 | if (!width) |
603 | return true; |
604 | return scale == 1 || scale == bytes(*width); |
605 | } |
606 | return false; |
607 | default: |
608 | return false; |
609 | } |
610 | } |
611 | |
612 | static unsigned logScale(unsigned scale) |
613 | { |
614 | switch (scale) { |
615 | case 1: |
616 | return 0; |
617 | case 2: |
618 | return 1; |
619 | case 4: |
620 | return 2; |
621 | case 8: |
622 | return 3; |
623 | default: |
624 | ASSERT_NOT_REACHED(); |
625 | return 0; |
626 | } |
627 | } |
628 | |
629 | template<typename Int, typename = Value::IsLegalOffset<Int>> |
630 | static Arg index(Air::Tmp base, Air::Tmp index, unsigned scale, Int offset) |
631 | { |
632 | ASSERT(base.isGP()); |
633 | ASSERT(index.isGP()); |
634 | ASSERT(isValidScale(scale)); |
635 | Arg result; |
636 | result.m_kind = Index; |
637 | result.m_base = base; |
638 | result.m_index = index; |
639 | result.m_scale = static_cast<int32_t>(scale); |
640 | result.m_offset = offset; |
641 | return result; |
642 | } |
643 | |
644 | static Arg index(Air::Tmp base, Air::Tmp index, unsigned scale = 1) |
645 | { |
646 | return Arg::index(base, index, scale, 0); |
647 | } |
648 | |
649 | static Arg relCond(MacroAssembler::RelationalCondition condition) |
650 | { |
651 | Arg result; |
652 | result.m_kind = RelCond; |
653 | result.m_offset = condition; |
654 | return result; |
655 | } |
656 | |
657 | static Arg resCond(MacroAssembler::ResultCondition condition) |
658 | { |
659 | Arg result; |
660 | result.m_kind = ResCond; |
661 | result.m_offset = condition; |
662 | return result; |
663 | } |
664 | |
665 | static Arg doubleCond(MacroAssembler::DoubleCondition condition) |
666 | { |
667 | Arg result; |
668 | result.m_kind = DoubleCond; |
669 | result.m_offset = condition; |
670 | return result; |
671 | } |
672 | |
673 | static Arg statusCond(MacroAssembler::StatusCondition condition) |
674 | { |
675 | Arg result; |
676 | result.m_kind = StatusCond; |
677 | result.m_offset = condition; |
678 | return result; |
679 | } |
680 | |
681 | static Arg special(Air::Special* special) |
682 | { |
683 | Arg result; |
684 | result.m_kind = Special; |
685 | result.m_offset = bitwise_cast<intptr_t>(special); |
686 | return result; |
687 | } |
688 | |
689 | static Arg widthArg(Width width) |
690 | { |
691 | Arg result; |
692 | result.m_kind = WidthArg; |
693 | result.m_offset = width; |
694 | return result; |
695 | } |
696 | |
697 | bool operator==(const Arg& other) const |
698 | { |
699 | return m_offset == other.m_offset |
700 | && m_kind == other.m_kind |
701 | && m_base == other.m_base |
702 | && m_index == other.m_index |
703 | && m_scale == other.m_scale; |
704 | } |
705 | |
706 | bool operator!=(const Arg& other) const |
707 | { |
708 | return !(*this == other); |
709 | } |
710 | |
711 | explicit operator bool() const { return *this != Arg(); } |
712 | |
713 | Kind kind() const |
714 | { |
715 | return m_kind; |
716 | } |
717 | |
718 | bool isTmp() const |
719 | { |
720 | return kind() == Tmp; |
721 | } |
722 | |
723 | bool isImm() const |
724 | { |
725 | return kind() == Imm; |
726 | } |
727 | |
728 | bool isBigImm() const |
729 | { |
730 | return kind() == BigImm; |
731 | } |
732 | |
733 | bool isBitImm() const |
734 | { |
735 | return kind() == BitImm; |
736 | } |
737 | |
738 | bool isBitImm64() const |
739 | { |
740 | return kind() == BitImm64; |
741 | } |
742 | |
743 | bool isSomeImm() const |
744 | { |
745 | switch (kind()) { |
746 | case Imm: |
747 | case BigImm: |
748 | case BitImm: |
749 | case BitImm64: |
750 | return true; |
751 | default: |
752 | return false; |
753 | } |
754 | } |
755 | |
756 | bool isSimpleAddr() const |
757 | { |
758 | return kind() == SimpleAddr; |
759 | } |
760 | |
761 | bool isAddr() const |
762 | { |
763 | return kind() == Addr; |
764 | } |
765 | |
766 | bool isExtendedOffsetAddr() const |
767 | { |
768 | return kind() == ExtendedOffsetAddr; |
769 | } |
770 | |
771 | bool isStack() const |
772 | { |
773 | return kind() == Stack; |
774 | } |
775 | |
776 | bool isCallArg() const |
777 | { |
778 | return kind() == CallArg; |
779 | } |
780 | |
781 | bool isIndex() const |
782 | { |
783 | return kind() == Index; |
784 | } |
785 | |
786 | bool isMemory() const |
787 | { |
788 | switch (kind()) { |
789 | case SimpleAddr: |
790 | case Addr: |
791 | case ExtendedOffsetAddr: |
792 | case Stack: |
793 | case CallArg: |
794 | case Index: |
795 | return true; |
796 | default: |
797 | return false; |
798 | } |
799 | } |
800 | |
801 | // Returns true if this is an idiomatic stack reference. It may return false for some kinds of |
802 | // stack references. The following idioms are recognized: |
803 | // - the Stack kind |
804 | // - the CallArg kind |
805 | // - the ExtendedOffsetAddr kind |
806 | // - the Addr kind with the base being either SP or FP |
807 | // Callers of this function are allowed to expect that if it returns true, then it must be one of |
808 | // these easy-to-recognize kinds. So, making this function recognize more kinds could break things. |
809 | bool isStackMemory() const; |
810 | |
811 | bool isRelCond() const |
812 | { |
813 | return kind() == RelCond; |
814 | } |
815 | |
816 | bool isResCond() const |
817 | { |
818 | return kind() == ResCond; |
819 | } |
820 | |
821 | bool isDoubleCond() const |
822 | { |
823 | return kind() == DoubleCond; |
824 | } |
825 | |
826 | bool isStatusCond() const |
827 | { |
828 | return kind() == StatusCond; |
829 | } |
830 | |
831 | bool isCondition() const |
832 | { |
833 | switch (kind()) { |
834 | case RelCond: |
835 | case ResCond: |
836 | case DoubleCond: |
837 | case StatusCond: |
838 | return true; |
839 | default: |
840 | return false; |
841 | } |
842 | } |
843 | |
844 | bool isSpecial() const |
845 | { |
846 | return kind() == Special; |
847 | } |
848 | |
849 | bool isWidthArg() const |
850 | { |
851 | return kind() == WidthArg; |
852 | } |
853 | |
854 | bool isAlive() const |
855 | { |
856 | return isTmp() || isStack(); |
857 | } |
858 | |
859 | Air::Tmp tmp() const |
860 | { |
861 | ASSERT(kind() == Tmp); |
862 | return m_base; |
863 | } |
864 | |
865 | int64_t value() const |
866 | { |
867 | ASSERT(isSomeImm()); |
868 | return m_offset; |
869 | } |
870 | |
871 | template<typename T> |
872 | bool isRepresentableAs() const |
873 | { |
874 | return B3::isRepresentableAs<T>(value()); |
875 | } |
876 | |
877 | static bool isRepresentableAs(Width width, Signedness signedness, int64_t value) |
878 | { |
879 | switch (signedness) { |
880 | case Signed: |
881 | switch (width) { |
882 | case Width8: |
883 | return B3::isRepresentableAs<int8_t>(value); |
884 | case Width16: |
885 | return B3::isRepresentableAs<int16_t>(value); |
886 | case Width32: |
887 | return B3::isRepresentableAs<int32_t>(value); |
888 | case Width64: |
889 | return B3::isRepresentableAs<int64_t>(value); |
890 | } |
891 | RELEASE_ASSERT_NOT_REACHED(); |
892 | case Unsigned: |
893 | switch (width) { |
894 | case Width8: |
895 | return B3::isRepresentableAs<uint8_t>(value); |
896 | case Width16: |
897 | return B3::isRepresentableAs<uint16_t>(value); |
898 | case Width32: |
899 | return B3::isRepresentableAs<uint32_t>(value); |
900 | case Width64: |
901 | return B3::isRepresentableAs<uint64_t>(value); |
902 | } |
903 | } |
904 | RELEASE_ASSERT_NOT_REACHED(); |
905 | } |
906 | |
907 | bool isRepresentableAs(Width, Signedness) const; |
908 | |
909 | static int64_t castToType(Width width, Signedness signedness, int64_t value) |
910 | { |
911 | switch (signedness) { |
912 | case Signed: |
913 | switch (width) { |
914 | case Width8: |
915 | return static_cast<int8_t>(value); |
916 | case Width16: |
917 | return static_cast<int16_t>(value); |
918 | case Width32: |
919 | return static_cast<int32_t>(value); |
920 | case Width64: |
921 | return static_cast<int64_t>(value); |
922 | } |
923 | RELEASE_ASSERT_NOT_REACHED(); |
924 | case Unsigned: |
925 | switch (width) { |
926 | case Width8: |
927 | return static_cast<uint8_t>(value); |
928 | case Width16: |
929 | return static_cast<uint16_t>(value); |
930 | case Width32: |
931 | return static_cast<uint32_t>(value); |
932 | case Width64: |
933 | return static_cast<uint64_t>(value); |
934 | } |
935 | } |
936 | RELEASE_ASSERT_NOT_REACHED(); |
937 | } |
938 | |
939 | template<typename T> |
940 | T asNumber() const |
941 | { |
942 | return static_cast<T>(value()); |
943 | } |
944 | |
945 | void* pointerValue() const |
946 | { |
947 | ASSERT(kind() == BigImm); |
948 | return bitwise_cast<void*>(static_cast<intptr_t>(m_offset)); |
949 | } |
950 | |
951 | Air::Tmp ptr() const |
952 | { |
953 | ASSERT(kind() == SimpleAddr); |
954 | return m_base; |
955 | } |
956 | |
957 | Air::Tmp base() const |
958 | { |
959 | ASSERT(kind() == SimpleAddr || kind() == Addr || kind() == ExtendedOffsetAddr || kind() == Index); |
960 | return m_base; |
961 | } |
962 | |
963 | bool hasOffset() const { return isMemory(); } |
964 | |
965 | Value::OffsetType offset() const |
966 | { |
967 | if (kind() == Stack) |
968 | return static_cast<Value::OffsetType>(m_scale); |
969 | ASSERT(kind() == Addr || kind() == ExtendedOffsetAddr || kind() == CallArg || kind() == Index); |
970 | return static_cast<Value::OffsetType>(m_offset); |
971 | } |
972 | |
973 | StackSlot* stackSlot() const |
974 | { |
975 | ASSERT(kind() == Stack); |
976 | return bitwise_cast<StackSlot*>(static_cast<uintptr_t>(m_offset)); |
977 | } |
978 | |
979 | Air::Tmp index() const |
980 | { |
981 | ASSERT(kind() == Index); |
982 | return m_index; |
983 | } |
984 | |
985 | unsigned scale() const |
986 | { |
987 | ASSERT(kind() == Index); |
988 | return m_scale; |
989 | } |
990 | |
991 | unsigned logScale() const |
992 | { |
993 | return logScale(scale()); |
994 | } |
995 | |
996 | Air::Special* special() const |
997 | { |
998 | ASSERT(kind() == Special); |
999 | return bitwise_cast<Air::Special*>(static_cast<uintptr_t>(m_offset)); |
1000 | } |
1001 | |
1002 | Width width() const |
1003 | { |
1004 | ASSERT(kind() == WidthArg); |
1005 | return static_cast<Width>(m_offset); |
1006 | } |
1007 | |
1008 | bool isGPTmp() const |
1009 | { |
1010 | return isTmp() && tmp().isGP(); |
1011 | } |
1012 | |
1013 | bool isFPTmp() const |
1014 | { |
1015 | return isTmp() && tmp().isFP(); |
1016 | } |
1017 | |
1018 | // Tells us if this Arg can be used in a position that requires a GP value. |
1019 | bool isGP() const |
1020 | { |
1021 | switch (kind()) { |
1022 | case Imm: |
1023 | case BigImm: |
1024 | case BitImm: |
1025 | case BitImm64: |
1026 | case SimpleAddr: |
1027 | case Addr: |
1028 | case ExtendedOffsetAddr: |
1029 | case Index: |
1030 | case Stack: |
1031 | case CallArg: |
1032 | case RelCond: |
1033 | case ResCond: |
1034 | case DoubleCond: |
1035 | case StatusCond: |
1036 | case Special: |
1037 | case WidthArg: |
1038 | return true; |
1039 | case Tmp: |
1040 | return isGPTmp(); |
1041 | case Invalid: |
1042 | return false; |
1043 | } |
1044 | ASSERT_NOT_REACHED(); |
1045 | } |
1046 | |
1047 | // Tells us if this Arg can be used in a position that requires a FP value. |
1048 | bool isFP() const |
1049 | { |
1050 | switch (kind()) { |
1051 | case Imm: |
1052 | case BitImm: |
1053 | case BitImm64: |
1054 | case RelCond: |
1055 | case ResCond: |
1056 | case DoubleCond: |
1057 | case StatusCond: |
1058 | case Special: |
1059 | case WidthArg: |
1060 | case Invalid: |
1061 | return false; |
1062 | case SimpleAddr: |
1063 | case Addr: |
1064 | case ExtendedOffsetAddr: |
1065 | case Index: |
1066 | case Stack: |
1067 | case CallArg: |
1068 | case BigImm: // Yes, we allow BigImm as a double immediate. We use this for implementing stackmaps. |
1069 | return true; |
1070 | case Tmp: |
1071 | return isFPTmp(); |
1072 | } |
1073 | ASSERT_NOT_REACHED(); |
1074 | } |
1075 | |
1076 | bool hasBank() const |
1077 | { |
1078 | switch (kind()) { |
1079 | case Imm: |
1080 | case BitImm: |
1081 | case BitImm64: |
1082 | case Special: |
1083 | case Tmp: |
1084 | return true; |
1085 | default: |
1086 | return false; |
1087 | } |
1088 | } |
1089 | |
1090 | // The type is ambiguous for some arg kinds. Call with care. |
1091 | Bank bank() const |
1092 | { |
1093 | return isGP() ? GP : FP; |
1094 | } |
1095 | |
1096 | bool isBank(Bank bank) const |
1097 | { |
1098 | switch (bank) { |
1099 | case GP: |
1100 | return isGP(); |
1101 | case FP: |
1102 | return isFP(); |
1103 | } |
1104 | ASSERT_NOT_REACHED(); |
1105 | } |
1106 | |
1107 | bool canRepresent(Type) const; |
1108 | bool canRepresent(Value* value) const; |
1109 | |
1110 | bool isCompatibleBank(const Arg& other) const; |
1111 | |
1112 | bool isGPR() const |
1113 | { |
1114 | return isTmp() && tmp().isGPR(); |
1115 | } |
1116 | |
1117 | GPRReg gpr() const |
1118 | { |
1119 | return tmp().gpr(); |
1120 | } |
1121 | |
1122 | bool isFPR() const |
1123 | { |
1124 | return isTmp() && tmp().isFPR(); |
1125 | } |
1126 | |
1127 | FPRReg fpr() const |
1128 | { |
1129 | return tmp().fpr(); |
1130 | } |
1131 | |
1132 | bool isReg() const |
1133 | { |
1134 | return isTmp() && tmp().isReg(); |
1135 | } |
1136 | |
1137 | Reg reg() const |
1138 | { |
1139 | return tmp().reg(); |
1140 | } |
1141 | |
1142 | unsigned gpTmpIndex() const |
1143 | { |
1144 | return tmp().gpTmpIndex(); |
1145 | } |
1146 | |
1147 | unsigned fpTmpIndex() const |
1148 | { |
1149 | return tmp().fpTmpIndex(); |
1150 | } |
1151 | |
1152 | unsigned tmpIndex() const |
1153 | { |
1154 | return tmp().tmpIndex(); |
1155 | } |
1156 | |
1157 | static bool isValidImmForm(int64_t value) |
1158 | { |
1159 | if (isX86()) |
1160 | return B3::isRepresentableAs<int32_t>(value); |
1161 | if (isARM64()) |
1162 | return isUInt12(value); |
1163 | return false; |
1164 | } |
1165 | |
1166 | static bool isValidBitImmForm(int64_t value) |
1167 | { |
1168 | if (isX86()) |
1169 | return B3::isRepresentableAs<int32_t>(value); |
1170 | if (isARM64()) |
1171 | return ARM64LogicalImmediate::create32(value).isValid(); |
1172 | return false; |
1173 | } |
1174 | |
1175 | static bool isValidBitImm64Form(int64_t value) |
1176 | { |
1177 | if (isX86()) |
1178 | return B3::isRepresentableAs<int32_t>(value); |
1179 | if (isARM64()) |
1180 | return ARM64LogicalImmediate::create64(value).isValid(); |
1181 | return false; |
1182 | } |
1183 | |
1184 | template<typename Int, typename = Value::IsLegalOffset<Int>> |
1185 | static bool isValidAddrForm(Int offset, Optional<Width> width = WTF::nullopt) |
1186 | { |
1187 | if (isX86()) |
1188 | return true; |
1189 | if (isARM64()) { |
1190 | if (!width) |
1191 | return true; |
1192 | |
1193 | if (isValidSignedImm9(offset)) |
1194 | return true; |
1195 | |
1196 | switch (*width) { |
1197 | case Width8: |
1198 | return isValidScaledUImm12<8>(offset); |
1199 | case Width16: |
1200 | return isValidScaledUImm12<16>(offset); |
1201 | case Width32: |
1202 | return isValidScaledUImm12<32>(offset); |
1203 | case Width64: |
1204 | return isValidScaledUImm12<64>(offset); |
1205 | } |
1206 | } |
1207 | return false; |
1208 | } |
1209 | |
1210 | template<typename Int, typename = Value::IsLegalOffset<Int>> |
1211 | static bool isValidIndexForm(unsigned scale, Int offset, Optional<Width> width = WTF::nullopt) |
1212 | { |
1213 | if (!isValidScale(scale, width)) |
1214 | return false; |
1215 | if (isX86()) |
1216 | return true; |
1217 | if (isARM64()) |
1218 | return !offset; |
1219 | return false; |
1220 | } |
1221 | |
1222 | // If you don't pass a width then this optimistically assumes that you're using the right width. But |
1223 | // the width is relevant to validity, so passing a null width is only useful for assertions. Don't |
1224 | // pass null widths when cascading through Args in the instruction selector! |
1225 | bool isValidForm(Optional<Width> width = WTF::nullopt) const |
1226 | { |
1227 | switch (kind()) { |
1228 | case Invalid: |
1229 | return false; |
1230 | case Tmp: |
1231 | return true; |
1232 | case Imm: |
1233 | return isValidImmForm(value()); |
1234 | case BigImm: |
1235 | return true; |
1236 | case BitImm: |
1237 | return isValidBitImmForm(value()); |
1238 | case BitImm64: |
1239 | return isValidBitImm64Form(value()); |
1240 | case SimpleAddr: |
1241 | case ExtendedOffsetAddr: |
1242 | return true; |
1243 | case Addr: |
1244 | case Stack: |
1245 | case CallArg: |
1246 | return isValidAddrForm(offset(), width); |
1247 | case Index: |
1248 | return isValidIndexForm(scale(), offset(), width); |
1249 | case RelCond: |
1250 | case ResCond: |
1251 | case DoubleCond: |
1252 | case StatusCond: |
1253 | case Special: |
1254 | case WidthArg: |
1255 | return true; |
1256 | } |
1257 | ASSERT_NOT_REACHED(); |
1258 | } |
1259 | |
1260 | template<typename Functor> |
1261 | void forEachTmpFast(const Functor& functor) |
1262 | { |
1263 | switch (m_kind) { |
1264 | case Tmp: |
1265 | case SimpleAddr: |
1266 | case Addr: |
1267 | case ExtendedOffsetAddr: |
1268 | functor(m_base); |
1269 | break; |
1270 | case Index: |
1271 | functor(m_base); |
1272 | functor(m_index); |
1273 | break; |
1274 | default: |
1275 | break; |
1276 | } |
1277 | } |
1278 | |
1279 | bool usesTmp(Air::Tmp tmp) const; |
1280 | |
1281 | template<typename Thing> |
1282 | bool is() const; |
1283 | |
1284 | template<typename Thing> |
1285 | Thing as() const; |
1286 | |
1287 | template<typename Thing, typename Functor> |
1288 | void forEachFast(const Functor&); |
1289 | |
1290 | template<typename Thing, typename Functor> |
1291 | void forEach(Role, Bank, Width, const Functor&); |
1292 | |
1293 | // This is smart enough to know that an address arg in a Def or UseDef rule will use its |
1294 | // tmps and never def them. For example, this: |
1295 | // |
1296 | // mov %rax, (%rcx) |
1297 | // |
1298 | // This defs (%rcx) but uses %rcx. |
1299 | template<typename Functor> |
1300 | void forEachTmp(Role argRole, Bank argBank, Width argWidth, const Functor& functor) |
1301 | { |
1302 | switch (m_kind) { |
1303 | case Tmp: |
1304 | ASSERT(isAnyUse(argRole) || isAnyDef(argRole)); |
1305 | functor(m_base, argRole, argBank, argWidth); |
1306 | break; |
1307 | case SimpleAddr: |
1308 | case Addr: |
1309 | case ExtendedOffsetAddr: |
1310 | functor(m_base, Use, GP, argRole == UseAddr ? argWidth : pointerWidth()); |
1311 | break; |
1312 | case Index: |
1313 | functor(m_base, Use, GP, argRole == UseAddr ? argWidth : pointerWidth()); |
1314 | functor(m_index, Use, GP, argRole == UseAddr ? argWidth : pointerWidth()); |
1315 | break; |
1316 | default: |
1317 | break; |
1318 | } |
1319 | } |
1320 | |
1321 | MacroAssembler::TrustedImm32 asTrustedImm32() const |
1322 | { |
1323 | ASSERT(isImm() || isBitImm()); |
1324 | return MacroAssembler::TrustedImm32(static_cast<Value::OffsetType>(m_offset)); |
1325 | } |
1326 | |
1327 | #if USE(JSVALUE64) |
1328 | MacroAssembler::TrustedImm64 asTrustedImm64() const |
1329 | { |
1330 | ASSERT(isBigImm() || isBitImm64()); |
1331 | return MacroAssembler::TrustedImm64(value()); |
1332 | } |
1333 | #endif |
1334 | |
1335 | MacroAssembler::TrustedImmPtr asTrustedImmPtr() const |
1336 | { |
1337 | if (is64Bit()) |
1338 | ASSERT(isBigImm()); |
1339 | else |
1340 | ASSERT(isImm()); |
1341 | return MacroAssembler::TrustedImmPtr(pointerValue()); |
1342 | } |
1343 | |
1344 | MacroAssembler::Address asAddress() const |
1345 | { |
1346 | if (isSimpleAddr()) |
1347 | return MacroAssembler::Address(m_base.gpr()); |
1348 | ASSERT(isAddr() || isExtendedOffsetAddr()); |
1349 | return MacroAssembler::Address(m_base.gpr(), static_cast<Value::OffsetType>(m_offset)); |
1350 | } |
1351 | |
1352 | MacroAssembler::BaseIndex asBaseIndex() const |
1353 | { |
1354 | ASSERT(isIndex()); |
1355 | return MacroAssembler::BaseIndex( |
1356 | m_base.gpr(), m_index.gpr(), static_cast<MacroAssembler::Scale>(logScale()), |
1357 | static_cast<Value::OffsetType>(m_offset)); |
1358 | } |
1359 | |
1360 | MacroAssembler::RelationalCondition asRelationalCondition() const |
1361 | { |
1362 | ASSERT(isRelCond()); |
1363 | return static_cast<MacroAssembler::RelationalCondition>(m_offset); |
1364 | } |
1365 | |
1366 | MacroAssembler::ResultCondition asResultCondition() const |
1367 | { |
1368 | ASSERT(isResCond()); |
1369 | return static_cast<MacroAssembler::ResultCondition>(m_offset); |
1370 | } |
1371 | |
1372 | MacroAssembler::DoubleCondition asDoubleCondition() const |
1373 | { |
1374 | ASSERT(isDoubleCond()); |
1375 | return static_cast<MacroAssembler::DoubleCondition>(m_offset); |
1376 | } |
1377 | |
1378 | MacroAssembler::StatusCondition asStatusCondition() const |
1379 | { |
1380 | ASSERT(isStatusCond()); |
1381 | return static_cast<MacroAssembler::StatusCondition>(m_offset); |
1382 | } |
1383 | |
1384 | // Tells you if the Arg is invertible. Only condition arguments are invertible, and even for those, there |
1385 | // are a few exceptions - notably Overflow and Signed. |
1386 | bool isInvertible() const |
1387 | { |
1388 | switch (kind()) { |
1389 | case RelCond: |
1390 | case DoubleCond: |
1391 | case StatusCond: |
1392 | return true; |
1393 | case ResCond: |
1394 | return MacroAssembler::isInvertible(asResultCondition()); |
1395 | default: |
1396 | return false; |
1397 | } |
1398 | } |
1399 | |
1400 | // This is valid for condition arguments. It will invert them. |
1401 | Arg inverted(bool inverted = true) const |
1402 | { |
1403 | if (!inverted) |
1404 | return *this; |
1405 | switch (kind()) { |
1406 | case RelCond: |
1407 | return relCond(MacroAssembler::invert(asRelationalCondition())); |
1408 | case ResCond: |
1409 | return resCond(MacroAssembler::invert(asResultCondition())); |
1410 | case DoubleCond: |
1411 | return doubleCond(MacroAssembler::invert(asDoubleCondition())); |
1412 | case StatusCond: |
1413 | return statusCond(MacroAssembler::invert(asStatusCondition())); |
1414 | default: |
1415 | RELEASE_ASSERT_NOT_REACHED(); |
1416 | return Arg(); |
1417 | } |
1418 | } |
1419 | |
1420 | Arg flipped(bool flipped = true) const |
1421 | { |
1422 | if (!flipped) |
1423 | return Arg(); |
1424 | return relCond(MacroAssembler::flip(asRelationalCondition())); |
1425 | } |
1426 | |
1427 | bool isSignedCond() const |
1428 | { |
1429 | return isRelCond() && MacroAssembler::isSigned(asRelationalCondition()); |
1430 | } |
1431 | |
1432 | bool isUnsignedCond() const |
1433 | { |
1434 | return isRelCond() && MacroAssembler::isUnsigned(asRelationalCondition()); |
1435 | } |
1436 | |
1437 | // This computes a hash for comparing this to JSAir's Arg. |
1438 | unsigned jsHash() const; |
1439 | |
1440 | void dump(PrintStream&) const; |
1441 | |
1442 | Arg(WTF::HashTableDeletedValueType) |
1443 | : m_base(WTF::HashTableDeletedValue) |
1444 | { |
1445 | } |
1446 | |
1447 | bool isHashTableDeletedValue() const |
1448 | { |
1449 | return *this == Arg(WTF::HashTableDeletedValue); |
1450 | } |
1451 | |
1452 | unsigned hash() const |
1453 | { |
1454 | // This really doesn't have to be that great. |
1455 | return WTF::IntHash<int64_t>::hash(m_offset) + m_kind + m_scale + m_base.hash() + |
1456 | m_index.hash(); |
1457 | } |
1458 | |
1459 | private: |
1460 | int64_t m_offset { 0 }; |
1461 | Kind m_kind { Invalid }; |
1462 | int32_t m_scale { 1 }; |
1463 | Air::Tmp m_base; |
1464 | Air::Tmp m_index; |
1465 | }; |
1466 | |
1467 | struct ArgHash { |
1468 | static unsigned hash(const Arg& key) { return key.hash(); } |
1469 | static bool equal(const Arg& a, const Arg& b) { return a == b; } |
1470 | static constexpr bool safeToCompareToEmptyOrDeleted = true; |
1471 | }; |
1472 | |
1473 | } } } // namespace JSC::B3::Air |
1474 | |
1475 | namespace WTF { |
1476 | |
1477 | JS_EXPORT_PRIVATE void printInternal(PrintStream&, JSC::B3::Air::Arg::Kind); |
1478 | JS_EXPORT_PRIVATE void printInternal(PrintStream&, JSC::B3::Air::Arg::Temperature); |
1479 | JS_EXPORT_PRIVATE void printInternal(PrintStream&, JSC::B3::Air::Arg::Phase); |
1480 | JS_EXPORT_PRIVATE void printInternal(PrintStream&, JSC::B3::Air::Arg::Timing); |
1481 | JS_EXPORT_PRIVATE void printInternal(PrintStream&, JSC::B3::Air::Arg::Role); |
1482 | JS_EXPORT_PRIVATE void printInternal(PrintStream&, JSC::B3::Air::Arg::Signedness); |
1483 | |
1484 | template<typename T> struct DefaultHash; |
1485 | template<> struct DefaultHash<JSC::B3::Air::Arg> { |
1486 | typedef JSC::B3::Air::ArgHash Hash; |
1487 | }; |
1488 | |
1489 | template<typename T> struct HashTraits; |
1490 | template<> struct HashTraits<JSC::B3::Air::Arg> : SimpleClassHashTraits<JSC::B3::Air::Arg> { |
1491 | // Because m_scale is 1 in the empty value. |
1492 | static constexpr bool emptyValueIsZero = false; |
1493 | }; |
1494 | |
1495 | } // namespace WTF |
1496 | |
1497 | #if ASSERT_DISABLED |
1498 | IGNORE_RETURN_TYPE_WARNINGS_END |
1499 | #endif |
1500 | |
1501 | #endif // ENABLE(B3_JIT) |
1502 | |