1// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "vm/globals.h" // NOLINT
6#if defined(TARGET_ARCH_IA32)
7
8#define SHOULD_NOT_INCLUDE_RUNTIME
9
10#include "vm/class_id.h"
11#include "vm/compiler/assembler/assembler.h"
12#include "vm/cpu.h"
13#include "vm/instructions.h"
14
15namespace dart {
16
17DECLARE_FLAG(bool, inline_alloc);
18DECLARE_FLAG(bool, use_slow_path);
19
20namespace compiler {
21
22class DirectCallRelocation : public AssemblerFixup {
23 public:
24 void Process(const MemoryRegion& region, intptr_t position) {
25 // Direct calls are relative to the following instruction on x86.
26 int32_t pointer = region.Load<int32_t>(position);
27 int32_t delta = region.start() + position + sizeof(int32_t);
28 region.Store<int32_t>(position, pointer - delta);
29 }
30
31 virtual bool IsPointerOffset() const { return false; }
32};
33
34int32_t Assembler::jit_cookie() {
35 if (jit_cookie_ == 0) {
36 jit_cookie_ = CreateJitCookie();
37 }
38 return jit_cookie_;
39}
40
41void Assembler::call(Register reg) {
42 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
43 EmitUint8(0xFF);
44 EmitRegisterOperand(2, reg);
45}
46
47void Assembler::call(const Address& address) {
48 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
49 EmitUint8(0xFF);
50 EmitOperand(2, address);
51}
52
53void Assembler::call(Label* label) {
54 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
55 EmitUint8(0xE8);
56 static const int kSize = 5;
57 EmitLabel(label, kSize);
58}
59
60void Assembler::call(const ExternalLabel* label) {
61 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
62 intptr_t call_start = buffer_.GetPosition();
63 EmitUint8(0xE8);
64 EmitFixup(new DirectCallRelocation());
65 EmitInt32(label->address());
66 ASSERT((buffer_.GetPosition() - call_start) == kCallExternalLabelSize);
67}
68
69void Assembler::pushl(Register reg) {
70 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
71 EmitUint8(0x50 + reg);
72}
73
74void Assembler::pushl(const Address& address) {
75 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
76 EmitUint8(0xFF);
77 EmitOperand(6, address);
78}
79
80void Assembler::pushl(const Immediate& imm) {
81 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
82 if (imm.is_int8()) {
83 EmitUint8(0x6A);
84 EmitUint8(imm.value() & 0xFF);
85 } else {
86 EmitUint8(0x68);
87 EmitImmediate(imm);
88 }
89}
90
91void Assembler::popl(Register reg) {
92 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
93 EmitUint8(0x58 + reg);
94}
95
96void Assembler::popl(const Address& address) {
97 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
98 EmitUint8(0x8F);
99 EmitOperand(0, address);
100}
101
102void Assembler::pushal() {
103 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
104 EmitUint8(0x60);
105}
106
107void Assembler::popal() {
108 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
109 EmitUint8(0x61);
110}
111
112void Assembler::setcc(Condition condition, ByteRegister dst) {
113 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
114 EmitUint8(0x0F);
115 EmitUint8(0x90 + condition);
116 EmitUint8(0xC0 + dst);
117}
118
119void Assembler::movl(Register dst, const Immediate& imm) {
120 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
121 EmitUint8(0xB8 + dst);
122 EmitImmediate(imm);
123}
124
125void Assembler::movl(Register dst, Register src) {
126 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
127 EmitUint8(0x89);
128 EmitRegisterOperand(src, dst);
129}
130
131void Assembler::movl(Register dst, const Address& src) {
132 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
133 EmitUint8(0x8B);
134 EmitOperand(dst, src);
135}
136
137void Assembler::movl(const Address& dst, Register src) {
138 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
139 EmitUint8(0x89);
140 EmitOperand(src, dst);
141}
142
143void Assembler::movl(const Address& dst, const Immediate& imm) {
144 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
145 EmitUint8(0xC7);
146 EmitOperand(0, dst);
147 EmitImmediate(imm);
148}
149
150void Assembler::movzxb(Register dst, ByteRegister src) {
151 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
152 EmitUint8(0x0F);
153 EmitUint8(0xB6);
154 EmitRegisterOperand(dst, src);
155}
156
157void Assembler::movzxb(Register dst, const Address& src) {
158 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
159 EmitUint8(0x0F);
160 EmitUint8(0xB6);
161 EmitOperand(dst, src);
162}
163
164void Assembler::movsxb(Register dst, ByteRegister src) {
165 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
166 EmitUint8(0x0F);
167 EmitUint8(0xBE);
168 EmitRegisterOperand(dst, src);
169}
170
171void Assembler::movsxb(Register dst, const Address& src) {
172 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
173 EmitUint8(0x0F);
174 EmitUint8(0xBE);
175 EmitOperand(dst, src);
176}
177
178void Assembler::movb(Register dst, const Address& src) {
179 // This would leave 24 bits above the 1 byte value undefined.
180 // If we ever want to purposefully have those undefined, remove this.
181 // TODO(dartbug.com/40210): Allow this.
182 FATAL("Use movzxb or movsxb instead.");
183}
184
185void Assembler::movb(const Address& dst, ByteRegister src) {
186 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
187 EmitUint8(0x88);
188 EmitOperand(src, dst);
189}
190
191void Assembler::movb(const Address& dst, const Immediate& imm) {
192 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
193 EmitUint8(0xC6);
194 EmitOperand(EAX, dst);
195 ASSERT(imm.is_int8());
196 EmitUint8(imm.value() & 0xFF);
197}
198
199void Assembler::movzxw(Register dst, Register src) {
200 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
201 EmitUint8(0x0F);
202 EmitUint8(0xB7);
203 EmitRegisterOperand(dst, src);
204}
205
206void Assembler::movzxw(Register dst, const Address& src) {
207 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
208 EmitUint8(0x0F);
209 EmitUint8(0xB7);
210 EmitOperand(dst, src);
211}
212
213void Assembler::movsxw(Register dst, Register src) {
214 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
215 EmitUint8(0x0F);
216 EmitUint8(0xBF);
217 EmitRegisterOperand(dst, src);
218}
219
220void Assembler::movsxw(Register dst, const Address& src) {
221 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
222 EmitUint8(0x0F);
223 EmitUint8(0xBF);
224 EmitOperand(dst, src);
225}
226
227void Assembler::movw(Register dst, const Address& src) {
228 // This would leave 16 bits above the 2 byte value undefined.
229 // If we ever want to purposefully have those undefined, remove this.
230 // TODO(dartbug.com/40210): Allow this.
231 FATAL("Use movzxw or movsxw instead.");
232}
233
234void Assembler::movw(const Address& dst, Register src) {
235 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
236 EmitOperandSizeOverride();
237 EmitUint8(0x89);
238 EmitOperand(src, dst);
239}
240
241void Assembler::movw(const Address& dst, const Immediate& imm) {
242 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
243 EmitOperandSizeOverride();
244 EmitUint8(0xC7);
245 EmitOperand(0, dst);
246 EmitUint8(imm.value() & 0xFF);
247 EmitUint8((imm.value() >> 8) & 0xFF);
248}
249
250void Assembler::leal(Register dst, const Address& src) {
251 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
252 EmitUint8(0x8D);
253 EmitOperand(dst, src);
254}
255
256// Move if not overflow.
257void Assembler::cmovno(Register dst, Register src) {
258 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
259 EmitUint8(0x0F);
260 EmitUint8(0x41);
261 EmitRegisterOperand(dst, src);
262}
263
264void Assembler::cmove(Register dst, Register src) {
265 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
266 EmitUint8(0x0F);
267 EmitUint8(0x44);
268 EmitRegisterOperand(dst, src);
269}
270
271void Assembler::cmovne(Register dst, Register src) {
272 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
273 EmitUint8(0x0F);
274 EmitUint8(0x45);
275 EmitRegisterOperand(dst, src);
276}
277
278void Assembler::cmovs(Register dst, Register src) {
279 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
280 EmitUint8(0x0F);
281 EmitUint8(0x48);
282 EmitRegisterOperand(dst, src);
283}
284
285void Assembler::cmovns(Register dst, Register src) {
286 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
287 EmitUint8(0x0F);
288 EmitUint8(0x49);
289 EmitRegisterOperand(dst, src);
290}
291
292void Assembler::cmovgel(Register dst, Register src) {
293 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
294 EmitUint8(0x0F);
295 EmitUint8(0x4D);
296 EmitRegisterOperand(dst, src);
297}
298
299void Assembler::cmovlessl(Register dst, Register src) {
300 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
301 EmitUint8(0x0F);
302 EmitUint8(0x4C);
303 EmitRegisterOperand(dst, src);
304}
305
306void Assembler::rep_movsb() {
307 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
308 EmitUint8(0xF3);
309 EmitUint8(0xA4);
310}
311
312void Assembler::rep_movsw() {
313 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
314 EmitUint8(0xF3);
315 EmitUint8(0x66);
316 EmitUint8(0xA5);
317}
318
319void Assembler::rep_movsl() {
320 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
321 EmitUint8(0xF3);
322 EmitUint8(0xA5);
323}
324
325void Assembler::movss(XmmRegister dst, const Address& src) {
326 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
327 EmitUint8(0xF3);
328 EmitUint8(0x0F);
329 EmitUint8(0x10);
330 EmitOperand(dst, src);
331}
332
333void Assembler::movss(const Address& dst, XmmRegister src) {
334 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
335 EmitUint8(0xF3);
336 EmitUint8(0x0F);
337 EmitUint8(0x11);
338 EmitOperand(src, dst);
339}
340
341void Assembler::movss(XmmRegister dst, XmmRegister src) {
342 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
343 EmitUint8(0xF3);
344 EmitUint8(0x0F);
345 EmitUint8(0x11);
346 EmitXmmRegisterOperand(src, dst);
347}
348
349void Assembler::movd(XmmRegister dst, Register src) {
350 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
351 EmitUint8(0x66);
352 EmitUint8(0x0F);
353 EmitUint8(0x6E);
354 EmitOperand(dst, Operand(src));
355}
356
357void Assembler::movd(Register dst, XmmRegister src) {
358 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
359 EmitUint8(0x66);
360 EmitUint8(0x0F);
361 EmitUint8(0x7E);
362 EmitOperand(src, Operand(dst));
363}
364
365void Assembler::movq(const Address& dst, XmmRegister src) {
366 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
367 EmitUint8(0x66);
368 EmitUint8(0x0F);
369 EmitUint8(0xD6);
370 EmitOperand(src, Operand(dst));
371}
372
373void Assembler::movq(XmmRegister dst, const Address& src) {
374 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
375 EmitUint8(0xF3);
376 EmitUint8(0x0F);
377 EmitUint8(0x7E);
378 EmitOperand(dst, Operand(src));
379}
380
381void Assembler::addss(XmmRegister dst, XmmRegister src) {
382 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
383 EmitUint8(0xF3);
384 EmitUint8(0x0F);
385 EmitUint8(0x58);
386 EmitXmmRegisterOperand(dst, src);
387}
388
389void Assembler::addss(XmmRegister dst, const Address& src) {
390 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
391 EmitUint8(0xF3);
392 EmitUint8(0x0F);
393 EmitUint8(0x58);
394 EmitOperand(dst, src);
395}
396
397void Assembler::subss(XmmRegister dst, XmmRegister src) {
398 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
399 EmitUint8(0xF3);
400 EmitUint8(0x0F);
401 EmitUint8(0x5C);
402 EmitXmmRegisterOperand(dst, src);
403}
404
405void Assembler::subss(XmmRegister dst, const Address& src) {
406 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
407 EmitUint8(0xF3);
408 EmitUint8(0x0F);
409 EmitUint8(0x5C);
410 EmitOperand(dst, src);
411}
412
413void Assembler::mulss(XmmRegister dst, XmmRegister src) {
414 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
415 EmitUint8(0xF3);
416 EmitUint8(0x0F);
417 EmitUint8(0x59);
418 EmitXmmRegisterOperand(dst, src);
419}
420
421void Assembler::mulss(XmmRegister dst, const Address& src) {
422 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
423 EmitUint8(0xF3);
424 EmitUint8(0x0F);
425 EmitUint8(0x59);
426 EmitOperand(dst, src);
427}
428
429void Assembler::divss(XmmRegister dst, XmmRegister src) {
430 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
431 EmitUint8(0xF3);
432 EmitUint8(0x0F);
433 EmitUint8(0x5E);
434 EmitXmmRegisterOperand(dst, src);
435}
436
437void Assembler::divss(XmmRegister dst, const Address& src) {
438 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
439 EmitUint8(0xF3);
440 EmitUint8(0x0F);
441 EmitUint8(0x5E);
442 EmitOperand(dst, src);
443}
444
445void Assembler::flds(const Address& src) {
446 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
447 EmitUint8(0xD9);
448 EmitOperand(0, src);
449}
450
451void Assembler::fstps(const Address& dst) {
452 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
453 EmitUint8(0xD9);
454 EmitOperand(3, dst);
455}
456
457void Assembler::movsd(XmmRegister dst, const Address& src) {
458 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
459 EmitUint8(0xF2);
460 EmitUint8(0x0F);
461 EmitUint8(0x10);
462 EmitOperand(dst, src);
463}
464
465void Assembler::movsd(const Address& dst, XmmRegister src) {
466 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
467 EmitUint8(0xF2);
468 EmitUint8(0x0F);
469 EmitUint8(0x11);
470 EmitOperand(src, dst);
471}
472
473void Assembler::movsd(XmmRegister dst, XmmRegister src) {
474 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
475 EmitUint8(0xF2);
476 EmitUint8(0x0F);
477 EmitUint8(0x11);
478 EmitXmmRegisterOperand(src, dst);
479}
480
481void Assembler::movaps(XmmRegister dst, XmmRegister src) {
482 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
483 EmitUint8(0x0F);
484 EmitUint8(0x28);
485 EmitXmmRegisterOperand(dst, src);
486}
487
488void Assembler::movups(XmmRegister dst, const Address& src) {
489 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
490 EmitUint8(0x0F);
491 EmitUint8(0x10);
492 EmitOperand(dst, src);
493}
494
495void Assembler::movups(const Address& dst, XmmRegister src) {
496 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
497 EmitUint8(0x0F);
498 EmitUint8(0x11);
499 EmitOperand(src, dst);
500}
501
502void Assembler::addsd(XmmRegister dst, XmmRegister src) {
503 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
504 EmitUint8(0xF2);
505 EmitUint8(0x0F);
506 EmitUint8(0x58);
507 EmitXmmRegisterOperand(dst, src);
508}
509
510void Assembler::addsd(XmmRegister dst, const Address& src) {
511 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
512 EmitUint8(0xF2);
513 EmitUint8(0x0F);
514 EmitUint8(0x58);
515 EmitOperand(dst, src);
516}
517
518void Assembler::addpl(XmmRegister dst, XmmRegister src) {
519 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
520 EmitUint8(0x66);
521 EmitUint8(0x0F);
522 EmitUint8(0xFE);
523 EmitXmmRegisterOperand(dst, src);
524}
525
526void Assembler::subpl(XmmRegister dst, XmmRegister src) {
527 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
528 EmitUint8(0x66);
529 EmitUint8(0x0F);
530 EmitUint8(0xFA);
531 EmitXmmRegisterOperand(dst, src);
532}
533
534void Assembler::addps(XmmRegister dst, XmmRegister src) {
535 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
536 EmitUint8(0x0F);
537 EmitUint8(0x58);
538 EmitXmmRegisterOperand(dst, src);
539}
540
541void Assembler::subps(XmmRegister dst, XmmRegister src) {
542 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
543 EmitUint8(0x0F);
544 EmitUint8(0x5C);
545 EmitXmmRegisterOperand(dst, src);
546}
547
548void Assembler::divps(XmmRegister dst, XmmRegister src) {
549 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
550 EmitUint8(0x0F);
551 EmitUint8(0x5E);
552 EmitXmmRegisterOperand(dst, src);
553}
554
555void Assembler::mulps(XmmRegister dst, XmmRegister src) {
556 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
557 EmitUint8(0x0F);
558 EmitUint8(0x59);
559 EmitXmmRegisterOperand(dst, src);
560}
561
562void Assembler::minps(XmmRegister dst, XmmRegister src) {
563 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
564 EmitUint8(0x0F);
565 EmitUint8(0x5D);
566 EmitXmmRegisterOperand(dst, src);
567}
568
569void Assembler::maxps(XmmRegister dst, XmmRegister src) {
570 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
571 EmitUint8(0x0F);
572 EmitUint8(0x5F);
573 EmitXmmRegisterOperand(dst, src);
574}
575
576void Assembler::andps(XmmRegister dst, XmmRegister src) {
577 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
578 EmitUint8(0x0F);
579 EmitUint8(0x54);
580 EmitXmmRegisterOperand(dst, src);
581}
582
583void Assembler::andps(XmmRegister dst, const Address& src) {
584 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
585 EmitUint8(0x0F);
586 EmitUint8(0x54);
587 EmitOperand(dst, src);
588}
589
590void Assembler::orps(XmmRegister dst, XmmRegister src) {
591 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
592 EmitUint8(0x0F);
593 EmitUint8(0x56);
594 EmitXmmRegisterOperand(dst, src);
595}
596
597void Assembler::notps(XmmRegister dst) {
598 static const struct ALIGN16 {
599 uint32_t a;
600 uint32_t b;
601 uint32_t c;
602 uint32_t d;
603 } float_not_constant = {0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF};
604 xorps(dst, Address::Absolute(reinterpret_cast<uword>(&float_not_constant)));
605}
606
607void Assembler::negateps(XmmRegister dst) {
608 static const struct ALIGN16 {
609 uint32_t a;
610 uint32_t b;
611 uint32_t c;
612 uint32_t d;
613 } float_negate_constant = {0x80000000, 0x80000000, 0x80000000, 0x80000000};
614 xorps(dst,
615 Address::Absolute(reinterpret_cast<uword>(&float_negate_constant)));
616}
617
618void Assembler::absps(XmmRegister dst) {
619 static const struct ALIGN16 {
620 uint32_t a;
621 uint32_t b;
622 uint32_t c;
623 uint32_t d;
624 } float_absolute_constant = {0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF};
625 andps(dst,
626 Address::Absolute(reinterpret_cast<uword>(&float_absolute_constant)));
627}
628
629void Assembler::zerowps(XmmRegister dst) {
630 static const struct ALIGN16 {
631 uint32_t a;
632 uint32_t b;
633 uint32_t c;
634 uint32_t d;
635 } float_zerow_constant = {0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0x00000000};
636 andps(dst, Address::Absolute(reinterpret_cast<uword>(&float_zerow_constant)));
637}
638
639void Assembler::cmppseq(XmmRegister dst, XmmRegister src) {
640 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
641 EmitUint8(0x0F);
642 EmitUint8(0xC2);
643 EmitXmmRegisterOperand(dst, src);
644 EmitUint8(0x0);
645}
646
647void Assembler::cmppsneq(XmmRegister dst, XmmRegister src) {
648 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
649 EmitUint8(0x0F);
650 EmitUint8(0xC2);
651 EmitXmmRegisterOperand(dst, src);
652 EmitUint8(0x4);
653}
654
655void Assembler::cmppslt(XmmRegister dst, XmmRegister src) {
656 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
657 EmitUint8(0x0F);
658 EmitUint8(0xC2);
659 EmitXmmRegisterOperand(dst, src);
660 EmitUint8(0x1);
661}
662
663void Assembler::cmppsle(XmmRegister dst, XmmRegister src) {
664 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
665 EmitUint8(0x0F);
666 EmitUint8(0xC2);
667 EmitXmmRegisterOperand(dst, src);
668 EmitUint8(0x2);
669}
670
671void Assembler::cmppsnlt(XmmRegister dst, XmmRegister src) {
672 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
673 EmitUint8(0x0F);
674 EmitUint8(0xC2);
675 EmitXmmRegisterOperand(dst, src);
676 EmitUint8(0x5);
677}
678
679void Assembler::cmppsnle(XmmRegister dst, XmmRegister src) {
680 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
681 EmitUint8(0x0F);
682 EmitUint8(0xC2);
683 EmitXmmRegisterOperand(dst, src);
684 EmitUint8(0x6);
685}
686
687void Assembler::sqrtps(XmmRegister dst) {
688 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
689 EmitUint8(0x0F);
690 EmitUint8(0x51);
691 EmitXmmRegisterOperand(dst, dst);
692}
693
694void Assembler::rsqrtps(XmmRegister dst) {
695 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
696 EmitUint8(0x0F);
697 EmitUint8(0x52);
698 EmitXmmRegisterOperand(dst, dst);
699}
700
701void Assembler::reciprocalps(XmmRegister dst) {
702 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
703 EmitUint8(0x0F);
704 EmitUint8(0x53);
705 EmitXmmRegisterOperand(dst, dst);
706}
707
708void Assembler::movhlps(XmmRegister dst, XmmRegister src) {
709 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
710 EmitUint8(0x0F);
711 EmitUint8(0x12);
712 EmitXmmRegisterOperand(dst, src);
713}
714
715void Assembler::movlhps(XmmRegister dst, XmmRegister src) {
716 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
717 EmitUint8(0x0F);
718 EmitUint8(0x16);
719 EmitXmmRegisterOperand(dst, src);
720}
721
722void Assembler::unpcklps(XmmRegister dst, XmmRegister src) {
723 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
724 EmitUint8(0x0F);
725 EmitUint8(0x14);
726 EmitXmmRegisterOperand(dst, src);
727}
728
729void Assembler::unpckhps(XmmRegister dst, XmmRegister src) {
730 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
731 EmitUint8(0x0F);
732 EmitUint8(0x15);
733 EmitXmmRegisterOperand(dst, src);
734}
735
736void Assembler::unpcklpd(XmmRegister dst, XmmRegister src) {
737 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
738 EmitUint8(0x66);
739 EmitUint8(0x0F);
740 EmitUint8(0x14);
741 EmitXmmRegisterOperand(dst, src);
742}
743
744void Assembler::unpckhpd(XmmRegister dst, XmmRegister src) {
745 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
746 EmitUint8(0x66);
747 EmitUint8(0x0F);
748 EmitUint8(0x15);
749 EmitXmmRegisterOperand(dst, src);
750}
751
752void Assembler::set1ps(XmmRegister dst, Register tmp1, const Immediate& imm) {
753 // Load 32-bit immediate value into tmp1.
754 movl(tmp1, imm);
755 // Move value from tmp1 into dst.
756 movd(dst, tmp1);
757 // Broadcast low lane into other three lanes.
758 shufps(dst, dst, Immediate(0x0));
759}
760
761void Assembler::shufps(XmmRegister dst, XmmRegister src, const Immediate& imm) {
762 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
763 EmitUint8(0x0F);
764 EmitUint8(0xC6);
765 EmitXmmRegisterOperand(dst, src);
766 ASSERT(imm.is_uint8());
767 EmitUint8(imm.value());
768}
769
770void Assembler::addpd(XmmRegister dst, XmmRegister src) {
771 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
772 EmitUint8(0x66);
773 EmitUint8(0x0F);
774 EmitUint8(0x58);
775 EmitXmmRegisterOperand(dst, src);
776}
777
778void Assembler::negatepd(XmmRegister dst) {
779 static const struct ALIGN16 {
780 uint64_t a;
781 uint64_t b;
782 } double_negate_constant = {0x8000000000000000LLU, 0x8000000000000000LLU};
783 xorpd(dst,
784 Address::Absolute(reinterpret_cast<uword>(&double_negate_constant)));
785}
786
787void Assembler::subpd(XmmRegister dst, XmmRegister src) {
788 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
789 EmitUint8(0x66);
790 EmitUint8(0x0F);
791 EmitUint8(0x5C);
792 EmitXmmRegisterOperand(dst, src);
793}
794
795void Assembler::mulpd(XmmRegister dst, XmmRegister src) {
796 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
797 EmitUint8(0x66);
798 EmitUint8(0x0F);
799 EmitUint8(0x59);
800 EmitXmmRegisterOperand(dst, src);
801}
802
803void Assembler::divpd(XmmRegister dst, XmmRegister src) {
804 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
805 EmitUint8(0x66);
806 EmitUint8(0x0F);
807 EmitUint8(0x5E);
808 EmitXmmRegisterOperand(dst, src);
809}
810
811void Assembler::abspd(XmmRegister dst) {
812 static const struct ALIGN16 {
813 uint64_t a;
814 uint64_t b;
815 } double_absolute_constant = {0x7FFFFFFFFFFFFFFFLL, 0x7FFFFFFFFFFFFFFFLL};
816 andpd(dst,
817 Address::Absolute(reinterpret_cast<uword>(&double_absolute_constant)));
818}
819
820void Assembler::minpd(XmmRegister dst, XmmRegister src) {
821 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
822 EmitUint8(0x66);
823 EmitUint8(0x0F);
824 EmitUint8(0x5D);
825 EmitXmmRegisterOperand(dst, src);
826}
827
828void Assembler::maxpd(XmmRegister dst, XmmRegister src) {
829 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
830 EmitUint8(0x66);
831 EmitUint8(0x0F);
832 EmitUint8(0x5F);
833 EmitXmmRegisterOperand(dst, src);
834}
835
836void Assembler::sqrtpd(XmmRegister dst) {
837 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
838 EmitUint8(0x66);
839 EmitUint8(0x0F);
840 EmitUint8(0x51);
841 EmitXmmRegisterOperand(dst, dst);
842}
843
844void Assembler::cvtps2pd(XmmRegister dst, XmmRegister src) {
845 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
846 EmitUint8(0x0F);
847 EmitUint8(0x5A);
848 EmitXmmRegisterOperand(dst, src);
849}
850
851void Assembler::cvtpd2ps(XmmRegister dst, XmmRegister src) {
852 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
853 EmitUint8(0x66);
854 EmitUint8(0x0F);
855 EmitUint8(0x5A);
856 EmitXmmRegisterOperand(dst, src);
857}
858
859void Assembler::shufpd(XmmRegister dst, XmmRegister src, const Immediate& imm) {
860 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
861 EmitUint8(0x66);
862 EmitUint8(0x0F);
863 EmitUint8(0xC6);
864 EmitXmmRegisterOperand(dst, src);
865 ASSERT(imm.is_uint8());
866 EmitUint8(imm.value());
867}
868
869void Assembler::subsd(XmmRegister dst, XmmRegister src) {
870 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
871 EmitUint8(0xF2);
872 EmitUint8(0x0F);
873 EmitUint8(0x5C);
874 EmitXmmRegisterOperand(dst, src);
875}
876
877void Assembler::subsd(XmmRegister dst, const Address& src) {
878 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
879 EmitUint8(0xF2);
880 EmitUint8(0x0F);
881 EmitUint8(0x5C);
882 EmitOperand(dst, src);
883}
884
885void Assembler::mulsd(XmmRegister dst, XmmRegister src) {
886 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
887 EmitUint8(0xF2);
888 EmitUint8(0x0F);
889 EmitUint8(0x59);
890 EmitXmmRegisterOperand(dst, src);
891}
892
893void Assembler::mulsd(XmmRegister dst, const Address& src) {
894 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
895 EmitUint8(0xF2);
896 EmitUint8(0x0F);
897 EmitUint8(0x59);
898 EmitOperand(dst, src);
899}
900
901void Assembler::divsd(XmmRegister dst, XmmRegister src) {
902 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
903 EmitUint8(0xF2);
904 EmitUint8(0x0F);
905 EmitUint8(0x5E);
906 EmitXmmRegisterOperand(dst, src);
907}
908
909void Assembler::divsd(XmmRegister dst, const Address& src) {
910 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
911 EmitUint8(0xF2);
912 EmitUint8(0x0F);
913 EmitUint8(0x5E);
914 EmitOperand(dst, src);
915}
916
917void Assembler::cvtsi2ss(XmmRegister dst, Register src) {
918 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
919 EmitUint8(0xF3);
920 EmitUint8(0x0F);
921 EmitUint8(0x2A);
922 EmitOperand(dst, Operand(src));
923}
924
925void Assembler::cvtsi2sd(XmmRegister dst, Register src) {
926 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
927 EmitUint8(0xF2);
928 EmitUint8(0x0F);
929 EmitUint8(0x2A);
930 EmitOperand(dst, Operand(src));
931}
932
933void Assembler::cvtss2si(Register dst, XmmRegister src) {
934 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
935 EmitUint8(0xF3);
936 EmitUint8(0x0F);
937 EmitUint8(0x2D);
938 EmitXmmRegisterOperand(dst, src);
939}
940
941void Assembler::cvtss2sd(XmmRegister dst, XmmRegister src) {
942 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
943 EmitUint8(0xF3);
944 EmitUint8(0x0F);
945 EmitUint8(0x5A);
946 EmitXmmRegisterOperand(dst, src);
947}
948
949void Assembler::cvtsd2si(Register dst, XmmRegister src) {
950 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
951 EmitUint8(0xF2);
952 EmitUint8(0x0F);
953 EmitUint8(0x2D);
954 EmitXmmRegisterOperand(dst, src);
955}
956
957void Assembler::cvttss2si(Register dst, XmmRegister src) {
958 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
959 EmitUint8(0xF3);
960 EmitUint8(0x0F);
961 EmitUint8(0x2C);
962 EmitXmmRegisterOperand(dst, src);
963}
964
965void Assembler::cvttsd2si(Register dst, XmmRegister src) {
966 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
967 EmitUint8(0xF2);
968 EmitUint8(0x0F);
969 EmitUint8(0x2C);
970 EmitXmmRegisterOperand(dst, src);
971}
972
973void Assembler::cvtsd2ss(XmmRegister dst, XmmRegister src) {
974 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
975 EmitUint8(0xF2);
976 EmitUint8(0x0F);
977 EmitUint8(0x5A);
978 EmitXmmRegisterOperand(dst, src);
979}
980
981void Assembler::cvtdq2pd(XmmRegister dst, XmmRegister src) {
982 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
983 EmitUint8(0xF3);
984 EmitUint8(0x0F);
985 EmitUint8(0xE6);
986 EmitXmmRegisterOperand(dst, src);
987}
988
989void Assembler::comiss(XmmRegister a, XmmRegister b) {
990 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
991 EmitUint8(0x0F);
992 EmitUint8(0x2F);
993 EmitXmmRegisterOperand(a, b);
994}
995
996void Assembler::comisd(XmmRegister a, XmmRegister b) {
997 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
998 EmitUint8(0x66);
999 EmitUint8(0x0F);
1000 EmitUint8(0x2F);
1001 EmitXmmRegisterOperand(a, b);
1002}
1003
1004void Assembler::movmskpd(Register dst, XmmRegister src) {
1005 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1006 EmitUint8(0x66);
1007 EmitUint8(0x0F);
1008 EmitUint8(0x50);
1009 EmitXmmRegisterOperand(dst, src);
1010}
1011
1012void Assembler::movmskps(Register dst, XmmRegister src) {
1013 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1014 EmitUint8(0x0F);
1015 EmitUint8(0x50);
1016 EmitXmmRegisterOperand(dst, src);
1017}
1018
1019void Assembler::pmovmskb(Register dst, XmmRegister src) {
1020 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1021 EmitUint8(0x66);
1022 EmitUint8(0x0F);
1023 EmitUint8(0xD7);
1024 EmitXmmRegisterOperand(dst, src);
1025}
1026
1027void Assembler::sqrtsd(XmmRegister dst, XmmRegister src) {
1028 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1029 EmitUint8(0xF2);
1030 EmitUint8(0x0F);
1031 EmitUint8(0x51);
1032 EmitXmmRegisterOperand(dst, src);
1033}
1034
1035void Assembler::sqrtss(XmmRegister dst, XmmRegister src) {
1036 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1037 EmitUint8(0xF3);
1038 EmitUint8(0x0F);
1039 EmitUint8(0x51);
1040 EmitXmmRegisterOperand(dst, src);
1041}
1042
1043void Assembler::xorpd(XmmRegister dst, const Address& src) {
1044 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1045 EmitUint8(0x66);
1046 EmitUint8(0x0F);
1047 EmitUint8(0x57);
1048 EmitOperand(dst, src);
1049}
1050
1051void Assembler::xorpd(XmmRegister dst, XmmRegister src) {
1052 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1053 EmitUint8(0x66);
1054 EmitUint8(0x0F);
1055 EmitUint8(0x57);
1056 EmitXmmRegisterOperand(dst, src);
1057}
1058
1059void Assembler::orpd(XmmRegister dst, XmmRegister src) {
1060 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1061 EmitUint8(0x66);
1062 EmitUint8(0x0F);
1063 EmitUint8(0x56);
1064 EmitXmmRegisterOperand(dst, src);
1065}
1066
1067void Assembler::xorps(XmmRegister dst, const Address& src) {
1068 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1069 EmitUint8(0x0F);
1070 EmitUint8(0x57);
1071 EmitOperand(dst, src);
1072}
1073
1074void Assembler::xorps(XmmRegister dst, XmmRegister src) {
1075 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1076 EmitUint8(0x0F);
1077 EmitUint8(0x57);
1078 EmitXmmRegisterOperand(dst, src);
1079}
1080
1081void Assembler::andpd(XmmRegister dst, const Address& src) {
1082 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1083 EmitUint8(0x66);
1084 EmitUint8(0x0F);
1085 EmitUint8(0x54);
1086 EmitOperand(dst, src);
1087}
1088
1089void Assembler::andpd(XmmRegister dst, XmmRegister src) {
1090 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1091 EmitUint8(0x66);
1092 EmitUint8(0x0F);
1093 EmitUint8(0x54);
1094 EmitXmmRegisterOperand(dst, src);
1095}
1096
1097void Assembler::pextrd(Register dst, XmmRegister src, const Immediate& imm) {
1098 ASSERT(TargetCPUFeatures::sse4_1_supported());
1099 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1100 EmitUint8(0x66);
1101 EmitUint8(0x0F);
1102 EmitUint8(0x3A);
1103 EmitUint8(0x16);
1104 EmitOperand(src, Operand(dst));
1105 ASSERT(imm.is_uint8());
1106 EmitUint8(imm.value());
1107}
1108
1109void Assembler::pmovsxdq(XmmRegister dst, XmmRegister src) {
1110 ASSERT(TargetCPUFeatures::sse4_1_supported());
1111 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1112 EmitUint8(0x66);
1113 EmitUint8(0x0F);
1114 EmitUint8(0x38);
1115 EmitUint8(0x25);
1116 EmitXmmRegisterOperand(dst, src);
1117}
1118
1119void Assembler::pcmpeqq(XmmRegister dst, XmmRegister src) {
1120 ASSERT(TargetCPUFeatures::sse4_1_supported());
1121 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1122 EmitUint8(0x66);
1123 EmitUint8(0x0F);
1124 EmitUint8(0x38);
1125 EmitUint8(0x29);
1126 EmitXmmRegisterOperand(dst, src);
1127}
1128
1129void Assembler::pxor(XmmRegister dst, XmmRegister src) {
1130 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1131 EmitUint8(0x66);
1132 EmitUint8(0x0F);
1133 EmitUint8(0xEF);
1134 EmitXmmRegisterOperand(dst, src);
1135}
1136
1137void Assembler::roundsd(XmmRegister dst, XmmRegister src, RoundingMode mode) {
1138 ASSERT(TargetCPUFeatures::sse4_1_supported());
1139 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1140 EmitUint8(0x66);
1141 EmitUint8(0x0F);
1142 EmitUint8(0x3A);
1143 EmitUint8(0x0B);
1144 EmitXmmRegisterOperand(dst, src);
1145 // Mask precision exeption.
1146 EmitUint8(static_cast<uint8_t>(mode) | 0x8);
1147}
1148
1149void Assembler::fldl(const Address& src) {
1150 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1151 EmitUint8(0xDD);
1152 EmitOperand(0, src);
1153}
1154
1155void Assembler::fstpl(const Address& dst) {
1156 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1157 EmitUint8(0xDD);
1158 EmitOperand(3, dst);
1159}
1160
1161void Assembler::fnstcw(const Address& dst) {
1162 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1163 EmitUint8(0xD9);
1164 EmitOperand(7, dst);
1165}
1166
1167void Assembler::fldcw(const Address& src) {
1168 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1169 EmitUint8(0xD9);
1170 EmitOperand(5, src);
1171}
1172
1173void Assembler::fistpl(const Address& dst) {
1174 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1175 EmitUint8(0xDF);
1176 EmitOperand(7, dst);
1177}
1178
1179void Assembler::fistps(const Address& dst) {
1180 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1181 EmitUint8(0xDB);
1182 EmitOperand(3, dst);
1183}
1184
1185void Assembler::fildl(const Address& src) {
1186 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1187 EmitUint8(0xDF);
1188 EmitOperand(5, src);
1189}
1190
1191void Assembler::filds(const Address& src) {
1192 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1193 EmitUint8(0xDB);
1194 EmitOperand(0, src);
1195}
1196
1197void Assembler::fincstp() {
1198 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1199 EmitUint8(0xD9);
1200 EmitUint8(0xF7);
1201}
1202
1203void Assembler::ffree(intptr_t value) {
1204 ASSERT(value < 7);
1205 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1206 EmitUint8(0xDD);
1207 EmitUint8(0xC0 + value);
1208}
1209
1210void Assembler::fsin() {
1211 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1212 EmitUint8(0xD9);
1213 EmitUint8(0xFE);
1214}
1215
1216void Assembler::fcos() {
1217 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1218 EmitUint8(0xD9);
1219 EmitUint8(0xFF);
1220}
1221
1222void Assembler::fsincos() {
1223 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1224 EmitUint8(0xD9);
1225 EmitUint8(0xFB);
1226}
1227
1228void Assembler::fptan() {
1229 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1230 EmitUint8(0xD9);
1231 EmitUint8(0xF2);
1232}
1233
1234void Assembler::xchgl(Register dst, Register src) {
1235 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1236 EmitUint8(0x87);
1237 EmitRegisterOperand(dst, src);
1238}
1239
1240void Assembler::cmpw(const Address& address, const Immediate& imm) {
1241 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1242 EmitOperandSizeOverride();
1243 EmitUint8(0x81);
1244 EmitOperand(7, address);
1245 EmitUint8(imm.value() & 0xFF);
1246 EmitUint8((imm.value() >> 8) & 0xFF);
1247}
1248
1249void Assembler::cmpb(const Address& address, const Immediate& imm) {
1250 ASSERT(imm.is_int8());
1251 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1252 EmitUint8(0x80);
1253 EmitOperand(7, address);
1254 EmitUint8(imm.value() & 0xFF);
1255}
1256
1257void Assembler::testl(Register reg1, Register reg2) {
1258 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1259 EmitUint8(0x85);
1260 EmitRegisterOperand(reg1, reg2);
1261}
1262
1263void Assembler::testl(Register reg, const Immediate& immediate) {
1264 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1265 // For registers that have a byte variant (EAX, EBX, ECX, and EDX)
1266 // we only test the byte register to keep the encoding short.
1267 if (immediate.is_uint8() && reg < 4) {
1268 // Use zero-extended 8-bit immediate.
1269 if (reg == EAX) {
1270 EmitUint8(0xA8);
1271 } else {
1272 EmitUint8(0xF6);
1273 EmitUint8(0xC0 + reg);
1274 }
1275 EmitUint8(immediate.value() & 0xFF);
1276 } else if (reg == EAX) {
1277 // Use short form if the destination is EAX.
1278 EmitUint8(0xA9);
1279 EmitImmediate(immediate);
1280 } else {
1281 EmitUint8(0xF7);
1282 EmitOperand(0, Operand(reg));
1283 EmitImmediate(immediate);
1284 }
1285}
1286
1287void Assembler::testb(const Address& address, const Immediate& imm) {
1288 ASSERT(imm.is_int8());
1289 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1290 EmitUint8(0xF6);
1291 EmitOperand(0, address);
1292 EmitUint8(imm.value() & 0xFF);
1293}
1294
1295void Assembler::Alu(int bytes, uint8_t opcode, Register dst, Register src) {
1296 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1297 if (bytes == 2) {
1298 EmitOperandSizeOverride();
1299 }
1300 ASSERT((opcode & 7) == 3);
1301 EmitUint8(opcode);
1302 EmitOperand(dst, Operand(src));
1303}
1304
1305void Assembler::Alu(uint8_t modrm_opcode, Register dst, const Immediate& imm) {
1306 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1307 EmitComplex(modrm_opcode, Operand(dst), imm);
1308}
1309
1310void Assembler::Alu(int bytes,
1311 uint8_t opcode,
1312 Register dst,
1313 const Address& src) {
1314 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1315 if (bytes == 2) {
1316 EmitOperandSizeOverride();
1317 }
1318 ASSERT((opcode & 7) == 3);
1319 EmitUint8(opcode);
1320 EmitOperand(dst, src);
1321}
1322
1323void Assembler::Alu(int bytes,
1324 uint8_t opcode,
1325 const Address& dst,
1326 Register src) {
1327 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1328 if (bytes == 2) {
1329 EmitOperandSizeOverride();
1330 }
1331 ASSERT((opcode & 7) == 1);
1332 EmitUint8(opcode);
1333 EmitOperand(src, dst);
1334}
1335
1336void Assembler::Alu(uint8_t modrm_opcode,
1337 const Address& dst,
1338 const Immediate& imm) {
1339 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1340 EmitComplex(modrm_opcode, dst, imm);
1341}
1342
1343void Assembler::cdq() {
1344 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1345 EmitUint8(0x99);
1346}
1347
1348void Assembler::idivl(Register reg) {
1349 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1350 EmitUint8(0xF7);
1351 EmitOperand(7, Operand(reg));
1352}
1353
1354void Assembler::divl(Register reg) {
1355 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1356 EmitUint8(0xF7);
1357 EmitOperand(6, Operand(reg));
1358}
1359
1360void Assembler::imull(Register dst, Register src) {
1361 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1362 EmitUint8(0x0F);
1363 EmitUint8(0xAF);
1364 EmitOperand(dst, Operand(src));
1365}
1366
1367void Assembler::imull(Register reg, const Immediate& imm) {
1368 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1369 EmitUint8(0x69);
1370 EmitOperand(reg, Operand(reg));
1371 EmitImmediate(imm);
1372}
1373
1374void Assembler::imull(Register reg, const Address& address) {
1375 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1376 EmitUint8(0x0F);
1377 EmitUint8(0xAF);
1378 EmitOperand(reg, address);
1379}
1380
1381void Assembler::imull(Register reg) {
1382 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1383 EmitUint8(0xF7);
1384 EmitOperand(5, Operand(reg));
1385}
1386
1387void Assembler::imull(const Address& address) {
1388 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1389 EmitUint8(0xF7);
1390 EmitOperand(5, address);
1391}
1392
1393void Assembler::mull(Register reg) {
1394 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1395 EmitUint8(0xF7);
1396 EmitOperand(4, Operand(reg));
1397}
1398
1399void Assembler::mull(const Address& address) {
1400 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1401 EmitUint8(0xF7);
1402 EmitOperand(4, address);
1403}
1404
1405void Assembler::incl(Register reg) {
1406 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1407 EmitUint8(0x40 + reg);
1408}
1409
1410void Assembler::incl(const Address& address) {
1411 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1412 EmitUint8(0xFF);
1413 EmitOperand(0, address);
1414}
1415
1416void Assembler::decl(Register reg) {
1417 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1418 EmitUint8(0x48 + reg);
1419}
1420
1421void Assembler::decl(const Address& address) {
1422 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1423 EmitUint8(0xFF);
1424 EmitOperand(1, address);
1425}
1426
1427void Assembler::shll(Register reg, const Immediate& imm) {
1428 EmitGenericShift(4, reg, imm);
1429}
1430
1431void Assembler::shll(Register operand, Register shifter) {
1432 EmitGenericShift(4, Operand(operand), shifter);
1433}
1434
1435void Assembler::shll(const Address& operand, Register shifter) {
1436 EmitGenericShift(4, Operand(operand), shifter);
1437}
1438
1439void Assembler::shrl(Register reg, const Immediate& imm) {
1440 EmitGenericShift(5, reg, imm);
1441}
1442
1443void Assembler::shrl(Register operand, Register shifter) {
1444 EmitGenericShift(5, Operand(operand), shifter);
1445}
1446
1447void Assembler::sarl(Register reg, const Immediate& imm) {
1448 EmitGenericShift(7, reg, imm);
1449}
1450
1451void Assembler::sarl(Register operand, Register shifter) {
1452 EmitGenericShift(7, Operand(operand), shifter);
1453}
1454
1455void Assembler::sarl(const Address& address, Register shifter) {
1456 EmitGenericShift(7, Operand(address), shifter);
1457}
1458
1459void Assembler::shldl(Register dst, Register src, Register shifter) {
1460 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1461 ASSERT(shifter == ECX);
1462 EmitUint8(0x0F);
1463 EmitUint8(0xA5);
1464 EmitRegisterOperand(src, dst);
1465}
1466
1467void Assembler::shldl(Register dst, Register src, const Immediate& imm) {
1468 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1469 ASSERT(imm.is_int8());
1470 EmitUint8(0x0F);
1471 EmitUint8(0xA4);
1472 EmitRegisterOperand(src, dst);
1473 EmitUint8(imm.value() & 0xFF);
1474}
1475
1476void Assembler::shldl(const Address& operand, Register src, Register shifter) {
1477 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1478 ASSERT(shifter == ECX);
1479 EmitUint8(0x0F);
1480 EmitUint8(0xA5);
1481 EmitOperand(src, Operand(operand));
1482}
1483
1484void Assembler::shrdl(Register dst, Register src, Register shifter) {
1485 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1486 ASSERT(shifter == ECX);
1487 EmitUint8(0x0F);
1488 EmitUint8(0xAD);
1489 EmitRegisterOperand(src, dst);
1490}
1491
1492void Assembler::shrdl(Register dst, Register src, const Immediate& imm) {
1493 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1494 ASSERT(imm.is_int8());
1495 EmitUint8(0x0F);
1496 EmitUint8(0xAC);
1497 EmitRegisterOperand(src, dst);
1498 EmitUint8(imm.value() & 0xFF);
1499}
1500
1501void Assembler::shrdl(const Address& dst, Register src, Register shifter) {
1502 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1503 ASSERT(shifter == ECX);
1504 EmitUint8(0x0F);
1505 EmitUint8(0xAD);
1506 EmitOperand(src, Operand(dst));
1507}
1508
1509void Assembler::negl(Register reg) {
1510 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1511 EmitUint8(0xF7);
1512 EmitOperand(3, Operand(reg));
1513}
1514
1515void Assembler::notl(Register reg) {
1516 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1517 EmitUint8(0xF7);
1518 EmitUint8(0xD0 | reg);
1519}
1520
1521void Assembler::bsfl(Register dst, Register src) {
1522 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1523 EmitUint8(0x0F);
1524 EmitUint8(0xBC);
1525 EmitRegisterOperand(dst, src);
1526}
1527
1528void Assembler::bsrl(Register dst, Register src) {
1529 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1530 EmitUint8(0x0F);
1531 EmitUint8(0xBD);
1532 EmitRegisterOperand(dst, src);
1533}
1534
1535void Assembler::popcntl(Register dst, Register src) {
1536 ASSERT(TargetCPUFeatures::popcnt_supported());
1537 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1538 EmitUint8(0xF3);
1539 EmitUint8(0x0F);
1540 EmitUint8(0xB8);
1541 EmitRegisterOperand(dst, src);
1542}
1543
1544void Assembler::lzcntl(Register dst, Register src) {
1545 ASSERT(TargetCPUFeatures::abm_supported());
1546 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1547 EmitUint8(0xF3);
1548 EmitUint8(0x0F);
1549 EmitUint8(0xBD);
1550 EmitRegisterOperand(dst, src);
1551}
1552
1553void Assembler::bt(Register base, Register offset) {
1554 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1555 EmitUint8(0x0F);
1556 EmitUint8(0xA3);
1557 EmitRegisterOperand(offset, base);
1558}
1559
1560void Assembler::bt(Register base, int bit) {
1561 ASSERT(bit >= 0 && bit < 32);
1562 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1563 EmitUint8(0x0F);
1564 EmitUint8(0xBA);
1565 EmitRegisterOperand(4, base);
1566 EmitUint8(bit);
1567}
1568
1569void Assembler::enter(const Immediate& imm) {
1570 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1571 EmitUint8(0xC8);
1572 ASSERT(imm.is_uint16());
1573 EmitUint8(imm.value() & 0xFF);
1574 EmitUint8((imm.value() >> 8) & 0xFF);
1575 EmitUint8(0x00);
1576}
1577
1578void Assembler::leave() {
1579 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1580 EmitUint8(0xC9);
1581}
1582
1583void Assembler::ret() {
1584 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1585 EmitUint8(0xC3);
1586}
1587
1588void Assembler::ret(const Immediate& imm) {
1589 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1590 EmitUint8(0xC2);
1591 ASSERT(imm.is_uint16());
1592 EmitUint8(imm.value() & 0xFF);
1593 EmitUint8((imm.value() >> 8) & 0xFF);
1594}
1595
1596void Assembler::nop(int size) {
1597 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1598 // There are nops up to size 15, but for now just provide up to size 8.
1599 ASSERT(0 < size && size <= MAX_NOP_SIZE);
1600 switch (size) {
1601 case 1:
1602 EmitUint8(0x90);
1603 break;
1604 case 2:
1605 EmitUint8(0x66);
1606 EmitUint8(0x90);
1607 break;
1608 case 3:
1609 EmitUint8(0x0F);
1610 EmitUint8(0x1F);
1611 EmitUint8(0x00);
1612 break;
1613 case 4:
1614 EmitUint8(0x0F);
1615 EmitUint8(0x1F);
1616 EmitUint8(0x40);
1617 EmitUint8(0x00);
1618 break;
1619 case 5:
1620 EmitUint8(0x0F);
1621 EmitUint8(0x1F);
1622 EmitUint8(0x44);
1623 EmitUint8(0x00);
1624 EmitUint8(0x00);
1625 break;
1626 case 6:
1627 EmitUint8(0x66);
1628 EmitUint8(0x0F);
1629 EmitUint8(0x1F);
1630 EmitUint8(0x44);
1631 EmitUint8(0x00);
1632 EmitUint8(0x00);
1633 break;
1634 case 7:
1635 EmitUint8(0x0F);
1636 EmitUint8(0x1F);
1637 EmitUint8(0x80);
1638 EmitUint8(0x00);
1639 EmitUint8(0x00);
1640 EmitUint8(0x00);
1641 EmitUint8(0x00);
1642 break;
1643 case 8:
1644 EmitUint8(0x0F);
1645 EmitUint8(0x1F);
1646 EmitUint8(0x84);
1647 EmitUint8(0x00);
1648 EmitUint8(0x00);
1649 EmitUint8(0x00);
1650 EmitUint8(0x00);
1651 EmitUint8(0x00);
1652 break;
1653 default:
1654 UNIMPLEMENTED();
1655 }
1656}
1657
1658void Assembler::int3() {
1659 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1660 EmitUint8(0xCC);
1661}
1662
1663void Assembler::hlt() {
1664 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1665 EmitUint8(0xF4);
1666}
1667
1668void Assembler::j(Condition condition, Label* label, bool near) {
1669 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1670 if (label->IsBound()) {
1671 static const int kShortSize = 2;
1672 static const int kLongSize = 6;
1673 intptr_t offset = label->Position() - buffer_.Size();
1674 ASSERT(offset <= 0);
1675 if (Utils::IsInt(8, offset - kShortSize)) {
1676 EmitUint8(0x70 + condition);
1677 EmitUint8((offset - kShortSize) & 0xFF);
1678 } else {
1679 EmitUint8(0x0F);
1680 EmitUint8(0x80 + condition);
1681 EmitInt32(offset - kLongSize);
1682 }
1683 } else if (near) {
1684 EmitUint8(0x70 + condition);
1685 EmitNearLabelLink(label);
1686 } else {
1687 EmitUint8(0x0F);
1688 EmitUint8(0x80 + condition);
1689 EmitLabelLink(label);
1690 }
1691}
1692
1693void Assembler::j(Condition condition, const ExternalLabel* label) {
1694 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1695 EmitUint8(0x0F);
1696 EmitUint8(0x80 + condition);
1697 EmitFixup(new DirectCallRelocation());
1698 EmitInt32(label->address());
1699}
1700
1701void Assembler::jmp(Register reg) {
1702 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1703 EmitUint8(0xFF);
1704 EmitRegisterOperand(4, reg);
1705}
1706
1707void Assembler::jmp(const Address& address) {
1708 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1709 EmitUint8(0xFF);
1710 EmitOperand(4, address);
1711}
1712
1713void Assembler::jmp(Label* label, bool near) {
1714 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1715 if (label->IsBound()) {
1716 static const int kShortSize = 2;
1717 static const int kLongSize = 5;
1718 intptr_t offset = label->Position() - buffer_.Size();
1719 ASSERT(offset <= 0);
1720 if (Utils::IsInt(8, offset - kShortSize)) {
1721 EmitUint8(0xEB);
1722 EmitUint8((offset - kShortSize) & 0xFF);
1723 } else {
1724 EmitUint8(0xE9);
1725 EmitInt32(offset - kLongSize);
1726 }
1727 } else if (near) {
1728 EmitUint8(0xEB);
1729 EmitNearLabelLink(label);
1730 } else {
1731 EmitUint8(0xE9);
1732 EmitLabelLink(label);
1733 }
1734}
1735
1736void Assembler::jmp(const ExternalLabel* label) {
1737 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1738 EmitUint8(0xE9);
1739 EmitFixup(new DirectCallRelocation());
1740 EmitInt32(label->address());
1741}
1742
1743void Assembler::lock() {
1744 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1745 EmitUint8(0xF0);
1746}
1747
1748void Assembler::cmpxchgl(const Address& address, Register reg) {
1749 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1750 EmitUint8(0x0F);
1751 EmitUint8(0xB1);
1752 EmitOperand(reg, address);
1753}
1754
1755void Assembler::cpuid() {
1756 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1757 EmitUint8(0x0F);
1758 EmitUint8(0xA2);
1759}
1760
1761void Assembler::CompareRegisters(Register a, Register b) {
1762 cmpl(a, b);
1763}
1764
1765void Assembler::MoveRegister(Register to, Register from) {
1766 if (to != from) {
1767 movl(to, from);
1768 }
1769}
1770
1771void Assembler::PushRegister(Register r) {
1772 pushl(r);
1773}
1774
1775void Assembler::PopRegister(Register r) {
1776 popl(r);
1777}
1778
1779void Assembler::AddImmediate(Register reg, const Immediate& imm) {
1780 const intptr_t value = imm.value();
1781 if (value == 0) {
1782 return;
1783 }
1784 if ((value > 0) || (value == kMinInt32)) {
1785 if (value == 1) {
1786 incl(reg);
1787 } else {
1788 addl(reg, imm);
1789 }
1790 } else {
1791 SubImmediate(reg, Immediate(-value));
1792 }
1793}
1794
1795void Assembler::SubImmediate(Register reg, const Immediate& imm) {
1796 const intptr_t value = imm.value();
1797 if (value == 0) {
1798 return;
1799 }
1800 if ((value > 0) || (value == kMinInt32)) {
1801 if (value == 1) {
1802 decl(reg);
1803 } else {
1804 subl(reg, imm);
1805 }
1806 } else {
1807 AddImmediate(reg, Immediate(-value));
1808 }
1809}
1810
1811void Assembler::Drop(intptr_t stack_elements) {
1812 ASSERT(stack_elements >= 0);
1813 if (stack_elements > 0) {
1814 addl(ESP, Immediate(stack_elements * target::kWordSize));
1815 }
1816}
1817
1818void Assembler::LoadIsolate(Register dst) {
1819 movl(dst, Address(THR, target::Thread::isolate_offset()));
1820}
1821
1822void Assembler::LoadObject(Register dst,
1823 const Object& object,
1824 bool movable_referent) {
1825 ASSERT(IsOriginalObject(object));
1826
1827 // movable_referent: some references to VM heap objects may be patched with
1828 // references to isolate-local objects (e.g., optimized static calls).
1829 // We need to track such references since the latter may move during
1830 // compaction.
1831 if (target::CanEmbedAsRawPointerInGeneratedCode(object) &&
1832 !movable_referent) {
1833 movl(dst, Immediate(target::ToRawPointer(object)));
1834 } else {
1835 ASSERT(IsNotTemporaryScopedHandle(object));
1836 ASSERT(IsInOldSpace(object));
1837 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1838 EmitUint8(0xB8 + dst);
1839 buffer_.EmitObject(object);
1840 }
1841}
1842
1843void Assembler::LoadObjectSafely(Register dst, const Object& object) {
1844 ASSERT(IsOriginalObject(object));
1845 if (target::IsSmi(object) && !IsSafeSmi(object)) {
1846 const int32_t cookie = jit_cookie();
1847 movl(dst, Immediate(target::ToRawSmi(object) ^ cookie));
1848 xorl(dst, Immediate(cookie));
1849 } else {
1850 LoadObject(dst, object);
1851 }
1852}
1853
1854void Assembler::PushObject(const Object& object) {
1855 ASSERT(IsOriginalObject(object));
1856 if (target::CanEmbedAsRawPointerInGeneratedCode(object)) {
1857 pushl(Immediate(target::ToRawPointer(object)));
1858 } else {
1859 ASSERT(IsNotTemporaryScopedHandle(object));
1860 ASSERT(IsInOldSpace(object));
1861 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1862 EmitUint8(0x68);
1863 buffer_.EmitObject(object);
1864 }
1865}
1866
1867void Assembler::CompareObject(Register reg, const Object& object) {
1868 ASSERT(IsOriginalObject(object));
1869 if (target::CanEmbedAsRawPointerInGeneratedCode(object)) {
1870 cmpl(reg, Immediate(target::ToRawPointer(object)));
1871 } else {
1872 ASSERT(IsNotTemporaryScopedHandle(object));
1873 ASSERT(IsInOldSpace(object));
1874 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1875 if (reg == EAX) {
1876 EmitUint8(0x05 + (7 << 3));
1877 buffer_.EmitObject(object);
1878 } else {
1879 EmitUint8(0x81);
1880 EmitOperand(7, Operand(reg));
1881 buffer_.EmitObject(object);
1882 }
1883 }
1884}
1885
1886// Destroys the value register.
1887void Assembler::StoreIntoObjectFilter(Register object,
1888 Register value,
1889 Label* label,
1890 CanBeSmi can_be_smi,
1891 BarrierFilterMode how_to_jump) {
1892 if (can_be_smi == kValueIsNotSmi) {
1893#if defined(DEBUG)
1894 Label okay;
1895 BranchIfNotSmi(value, &okay);
1896 Stop("Unexpected Smi!");
1897 Bind(&okay);
1898#endif
1899 COMPILE_ASSERT((target::ObjectAlignment::kNewObjectAlignmentOffset ==
1900 target::kWordSize) &&
1901 (target::ObjectAlignment::kOldObjectAlignmentOffset == 0));
1902 // Write-barrier triggers if the value is in the new space (has bit set) and
1903 // the object is in the old space (has bit cleared).
1904 // To check that we could compute value & ~object and skip the write barrier
1905 // if the bit is not set. However we can't destroy the object.
1906 // However to preserve the object we compute negated expression
1907 // ~value | object instead and skip the write barrier if the bit is set.
1908 notl(value);
1909 orl(value, object);
1910 testl(value, Immediate(target::ObjectAlignment::kNewObjectAlignmentOffset));
1911 } else {
1912 ASSERT(target::ObjectAlignment::kNewObjectAlignmentOffset == 4);
1913 ASSERT(kHeapObjectTag == 1);
1914 // Detect value being ...101 and object being ...001.
1915 andl(value, Immediate(7));
1916 leal(value, Address(value, object, TIMES_2, 9));
1917 testl(value, Immediate(0xf));
1918 }
1919 Condition condition = how_to_jump == kJumpToNoUpdate ? NOT_ZERO : ZERO;
1920 bool distance = how_to_jump == kJumpToNoUpdate ? kNearJump : kFarJump;
1921 j(condition, label, distance);
1922}
1923
1924void Assembler::StoreIntoObject(Register object,
1925 const Address& dest,
1926 Register value,
1927 CanBeSmi can_be_smi) {
1928 // x.slot = x. Barrier should have be removed at the IL level.
1929 ASSERT(object != value);
1930
1931 movl(dest, value);
1932 Label done;
1933 StoreIntoObjectFilter(object, value, &done, can_be_smi, kJumpToNoUpdate);
1934 // A store buffer update is required.
1935 if (value != EDX) {
1936 pushl(EDX); // Preserve EDX.
1937 }
1938 if (object != EDX) {
1939 movl(EDX, object);
1940 }
1941 call(Address(THR, target::Thread::write_barrier_entry_point_offset()));
1942 if (value != EDX) {
1943 popl(EDX); // Restore EDX.
1944 }
1945 Bind(&done);
1946}
1947
1948void Assembler::StoreIntoObjectNoBarrier(Register object,
1949 const Address& dest,
1950 Register value) {
1951 movl(dest, value);
1952#if defined(DEBUG)
1953 Label done;
1954 pushl(value);
1955 StoreIntoObjectFilter(object, value, &done, kValueCanBeSmi, kJumpToNoUpdate);
1956
1957 testb(FieldAddress(object, target::Object::tags_offset()),
1958 Immediate(1 << target::ObjectLayout::kOldAndNotRememberedBit));
1959 j(ZERO, &done, Assembler::kNearJump);
1960
1961 Stop("Store buffer update is required");
1962 Bind(&done);
1963 popl(value);
1964#endif // defined(DEBUG)
1965 // No store buffer update.
1966}
1967
1968// Destroys the value register.
1969void Assembler::StoreIntoArray(Register object,
1970 Register slot,
1971 Register value,
1972 CanBeSmi can_be_smi) {
1973 ASSERT(object != value);
1974 movl(Address(slot, 0), value);
1975
1976 Label done;
1977 StoreIntoObjectFilter(object, value, &done, can_be_smi, kJumpToNoUpdate);
1978 // A store buffer update is required.
1979 if (value != kWriteBarrierObjectReg) {
1980 pushl(kWriteBarrierObjectReg); // Preserve kWriteBarrierObjectReg.
1981 }
1982 if (value != kWriteBarrierSlotReg && slot != kWriteBarrierSlotReg) {
1983 pushl(kWriteBarrierSlotReg); // Preserve kWriteBarrierSlotReg.
1984 }
1985 if (object != kWriteBarrierObjectReg && slot != kWriteBarrierSlotReg) {
1986 if (slot == kWriteBarrierObjectReg && object == kWriteBarrierSlotReg) {
1987 xchgl(slot, object);
1988 } else if (slot == kWriteBarrierObjectReg) {
1989 movl(kWriteBarrierSlotReg, slot);
1990 movl(kWriteBarrierObjectReg, object);
1991 } else {
1992 movl(kWriteBarrierObjectReg, object);
1993 movl(kWriteBarrierSlotReg, slot);
1994 }
1995 } else if (object != kWriteBarrierObjectReg) {
1996 movl(kWriteBarrierObjectReg, object);
1997 } else if (slot != kWriteBarrierSlotReg) {
1998 movl(kWriteBarrierSlotReg, slot);
1999 }
2000 call(Address(THR, target::Thread::array_write_barrier_entry_point_offset()));
2001 if (value != kWriteBarrierSlotReg && slot != kWriteBarrierSlotReg) {
2002 popl(kWriteBarrierSlotReg); // Restore kWriteBarrierSlotReg.
2003 }
2004 if (value != kWriteBarrierObjectReg) {
2005 popl(kWriteBarrierObjectReg); // Restore kWriteBarrierObjectReg.
2006 }
2007 Bind(&done);
2008}
2009
2010void Assembler::StoreIntoObjectNoBarrier(Register object,
2011 const Address& dest,
2012 const Object& value) {
2013 ASSERT(IsOriginalObject(value));
2014 if (target::CanEmbedAsRawPointerInGeneratedCode(value)) {
2015 Immediate imm_value(target::ToRawPointer(value));
2016 movl(dest, imm_value);
2017 } else {
2018 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2019 EmitUint8(0xC7);
2020 EmitOperand(0, dest);
2021 buffer_.EmitObject(value);
2022 }
2023 // No store buffer update.
2024}
2025
2026void Assembler::StoreInternalPointer(Register object,
2027 const Address& dest,
2028 Register value) {
2029 movl(dest, value);
2030}
2031
2032void Assembler::StoreIntoSmiField(const Address& dest, Register value) {
2033#if defined(DEBUG)
2034 Label done;
2035 testl(value, Immediate(kHeapObjectTag));
2036 j(ZERO, &done);
2037 Stop("New value must be Smi.");
2038 Bind(&done);
2039#endif // defined(DEBUG)
2040 movl(dest, value);
2041}
2042
2043void Assembler::ZeroInitSmiField(const Address& dest) {
2044 Immediate zero(target::ToRawSmi(0));
2045 movl(dest, zero);
2046}
2047
2048void Assembler::IncrementSmiField(const Address& dest, int32_t increment) {
2049 // Note: FlowGraphCompiler::EdgeCounterIncrementSizeInBytes depends on
2050 // the length of this instruction sequence.
2051 Immediate inc_imm(target::ToRawSmi(increment));
2052 addl(dest, inc_imm);
2053}
2054
2055void Assembler::LoadDoubleConstant(XmmRegister dst, double value) {
2056 // TODO(5410843): Need to have a code constants table.
2057 int64_t constant = bit_cast<int64_t, double>(value);
2058 pushl(Immediate(Utils::High32Bits(constant)));
2059 pushl(Immediate(Utils::Low32Bits(constant)));
2060 movsd(dst, Address(ESP, 0));
2061 addl(ESP, Immediate(2 * target::kWordSize));
2062}
2063
2064void Assembler::FloatNegate(XmmRegister f) {
2065 static const struct ALIGN16 {
2066 uint32_t a;
2067 uint32_t b;
2068 uint32_t c;
2069 uint32_t d;
2070 } float_negate_constant = {0x80000000, 0x00000000, 0x80000000, 0x00000000};
2071 xorps(f, Address::Absolute(reinterpret_cast<uword>(&float_negate_constant)));
2072}
2073
2074void Assembler::DoubleNegate(XmmRegister d) {
2075 static const struct ALIGN16 {
2076 uint64_t a;
2077 uint64_t b;
2078 } double_negate_constant = {0x8000000000000000LLU, 0x8000000000000000LLU};
2079 xorpd(d, Address::Absolute(reinterpret_cast<uword>(&double_negate_constant)));
2080}
2081
2082void Assembler::DoubleAbs(XmmRegister reg) {
2083 static const struct ALIGN16 {
2084 uint64_t a;
2085 uint64_t b;
2086 } double_abs_constant = {0x7FFFFFFFFFFFFFFFLL, 0x7FFFFFFFFFFFFFFFLL};
2087 andpd(reg, Address::Absolute(reinterpret_cast<uword>(&double_abs_constant)));
2088}
2089
2090void Assembler::EnterFrame(intptr_t frame_size) {
2091 if (prologue_offset_ == -1) {
2092 Comment("PrologueOffset = %" Pd "", CodeSize());
2093 prologue_offset_ = CodeSize();
2094 }
2095#ifdef DEBUG
2096 intptr_t check_offset = CodeSize();
2097#endif
2098 pushl(EBP);
2099 movl(EBP, ESP);
2100#ifdef DEBUG
2101 ProloguePattern pp(CodeAddress(check_offset));
2102 ASSERT(pp.IsValid());
2103#endif
2104 if (frame_size != 0) {
2105 Immediate frame_space(frame_size);
2106 subl(ESP, frame_space);
2107 }
2108}
2109
2110void Assembler::LeaveFrame() {
2111 movl(ESP, EBP);
2112 popl(EBP);
2113}
2114
2115void Assembler::ReserveAlignedFrameSpace(intptr_t frame_space) {
2116 // Reserve space for arguments and align frame before entering
2117 // the C++ world.
2118 AddImmediate(ESP, Immediate(-frame_space));
2119 if (OS::ActivationFrameAlignment() > 1) {
2120 andl(ESP, Immediate(~(OS::ActivationFrameAlignment() - 1)));
2121 }
2122}
2123
2124void Assembler::EmitEntryFrameVerification() {
2125#if defined(DEBUG)
2126 Label ok;
2127 leal(EAX, Address(EBP, target::frame_layout.exit_link_slot_from_entry_fp *
2128 target::kWordSize));
2129 cmpl(EAX, ESP);
2130 j(EQUAL, &ok);
2131 Stop("target::frame_layout.exit_link_slot_from_entry_fp mismatch");
2132 Bind(&ok);
2133#endif
2134}
2135
2136// EBX receiver, ECX ICData entries array
2137// Preserve EDX (ARGS_DESC_REG), not required today, but maybe later.
2138void Assembler::MonomorphicCheckedEntryJIT() {
2139 has_monomorphic_entry_ = true;
2140 intptr_t start = CodeSize();
2141 Label have_cid, miss;
2142 Bind(&miss);
2143 jmp(Address(THR, target::Thread::switchable_call_miss_entry_offset()));
2144
2145 Comment("MonomorphicCheckedEntry");
2146 ASSERT(CodeSize() - start ==
2147 target::Instructions::kMonomorphicEntryOffsetJIT);
2148
2149 const intptr_t cid_offset = target::Array::element_offset(0);
2150 const intptr_t count_offset = target::Array::element_offset(1);
2151
2152 movl(EAX, Immediate(kSmiCid << 1));
2153 testl(EBX, Immediate(kSmiTagMask));
2154 j(ZERO, &have_cid, kNearJump);
2155 LoadClassId(EAX, EBX);
2156 SmiTag(EAX);
2157 Bind(&have_cid);
2158 // EAX: cid as Smi
2159
2160 cmpl(EAX, FieldAddress(ECX, cid_offset));
2161 j(NOT_EQUAL, &miss, Assembler::kNearJump);
2162 addl(FieldAddress(ECX, count_offset), Immediate(target::ToRawSmi(1)));
2163 xorl(EDX, EDX); // GC-safe for OptimizeInvokedFunction.
2164 nop(1);
2165
2166 // Fall through to unchecked entry.
2167 ASSERT(CodeSize() - start ==
2168 target::Instructions::kPolymorphicEntryOffsetJIT);
2169}
2170
2171// EBX receiver, ECX guarded cid as Smi.
2172// Preserve EDX (ARGS_DESC_REG), not required today, but maybe later.
2173void Assembler::MonomorphicCheckedEntryAOT() {
2174 UNIMPLEMENTED();
2175}
2176
2177void Assembler::BranchOnMonomorphicCheckedEntryJIT(Label* label) {
2178 has_monomorphic_entry_ = true;
2179 while (CodeSize() < target::Instructions::kMonomorphicEntryOffsetJIT) {
2180 int3();
2181 }
2182 jmp(label);
2183 while (CodeSize() < target::Instructions::kPolymorphicEntryOffsetJIT) {
2184 int3();
2185 }
2186}
2187
2188void Assembler::EnterSafepoint(Register scratch) {
2189 // We generate the same number of instructions whether or not the slow-path is
2190 // forced. This simplifies GenerateJitCallbackTrampolines.
2191
2192 // Compare and swap the value at Thread::safepoint_state from unacquired to
2193 // acquired. On success, jump to 'success'; otherwise, fallthrough.
2194 Label done, slow_path;
2195 if (FLAG_use_slow_path) {
2196 jmp(&slow_path);
2197 }
2198
2199 pushl(EAX);
2200 movl(EAX, Immediate(target::Thread::safepoint_state_unacquired()));
2201 movl(scratch, Immediate(target::Thread::safepoint_state_acquired()));
2202 LockCmpxchgl(Address(THR, target::Thread::safepoint_state_offset()), scratch);
2203 movl(scratch, EAX);
2204 popl(EAX);
2205 cmpl(scratch, Immediate(target::Thread::safepoint_state_unacquired()));
2206
2207 if (!FLAG_use_slow_path) {
2208 j(EQUAL, &done);
2209 }
2210
2211 Bind(&slow_path);
2212 movl(scratch, Address(THR, target::Thread::enter_safepoint_stub_offset()));
2213 movl(scratch, FieldAddress(scratch, target::Code::entry_point_offset()));
2214 call(scratch);
2215
2216 Bind(&done);
2217}
2218
2219void Assembler::TransitionGeneratedToNative(Register destination_address,
2220 Register new_exit_frame,
2221 Register new_exit_through_ffi,
2222 bool enter_safepoint) {
2223 // Save exit frame information to enable stack walking.
2224 movl(Address(THR, target::Thread::top_exit_frame_info_offset()),
2225 new_exit_frame);
2226
2227 movl(compiler::Address(THR,
2228 compiler::target::Thread::exit_through_ffi_offset()),
2229 new_exit_through_ffi);
2230 Register scratch = new_exit_through_ffi;
2231
2232 // Mark that the thread is executing native code.
2233 movl(VMTagAddress(), destination_address);
2234 movl(Address(THR, target::Thread::execution_state_offset()),
2235 Immediate(target::Thread::native_execution_state()));
2236
2237 if (enter_safepoint) {
2238 EnterSafepoint(scratch);
2239 }
2240}
2241
2242void Assembler::ExitSafepoint(Register scratch) {
2243 ASSERT(scratch != EAX);
2244 // We generate the same number of instructions whether or not the slow-path is
2245 // forced, for consistency with EnterSafepoint.
2246
2247 // Compare and swap the value at Thread::safepoint_state from acquired to
2248 // unacquired. On success, jump to 'success'; otherwise, fallthrough.
2249 Label done, slow_path;
2250 if (FLAG_use_slow_path) {
2251 jmp(&slow_path);
2252 }
2253
2254 pushl(EAX);
2255 movl(EAX, Immediate(target::Thread::safepoint_state_acquired()));
2256 movl(scratch, Immediate(target::Thread::safepoint_state_unacquired()));
2257 LockCmpxchgl(Address(THR, target::Thread::safepoint_state_offset()), scratch);
2258 movl(scratch, EAX);
2259 popl(EAX);
2260 cmpl(scratch, Immediate(target::Thread::safepoint_state_acquired()));
2261
2262 if (!FLAG_use_slow_path) {
2263 j(EQUAL, &done);
2264 }
2265
2266 Bind(&slow_path);
2267 movl(scratch, Address(THR, target::Thread::exit_safepoint_stub_offset()));
2268 movl(scratch, FieldAddress(scratch, target::Code::entry_point_offset()));
2269 call(scratch);
2270
2271 Bind(&done);
2272}
2273
2274void Assembler::TransitionNativeToGenerated(Register scratch,
2275 bool exit_safepoint) {
2276 if (exit_safepoint) {
2277 ExitSafepoint(scratch);
2278 } else {
2279#if defined(DEBUG)
2280 // Ensure we've already left the safepoint.
2281 movl(scratch, Address(THR, target::Thread::safepoint_state_offset()));
2282 andl(scratch, Immediate(1 << target::Thread::safepoint_state_inside_bit()));
2283 Label ok;
2284 j(ZERO, &ok);
2285 Breakpoint();
2286 Bind(&ok);
2287#endif
2288 }
2289
2290 // Mark that the thread is executing Dart code.
2291 movl(Assembler::VMTagAddress(),
2292 Immediate(target::Thread::vm_tag_compiled_id()));
2293 movl(Address(THR, target::Thread::execution_state_offset()),
2294 Immediate(target::Thread::generated_execution_state()));
2295
2296 // Reset exit frame information in Isolate's mutator thread structure.
2297 movl(Address(THR, target::Thread::top_exit_frame_info_offset()),
2298 Immediate(0));
2299 movl(compiler::Address(THR,
2300 compiler::target::Thread::exit_through_ffi_offset()),
2301 compiler::Immediate(0));
2302}
2303
2304static const intptr_t kNumberOfVolatileCpuRegisters = 3;
2305static const Register volatile_cpu_registers[kNumberOfVolatileCpuRegisters] = {
2306 EAX, ECX, EDX};
2307
2308// XMM0 is used only as a scratch register in the optimized code. No need to
2309// save it.
2310static const intptr_t kNumberOfVolatileXmmRegisters = kNumberOfXmmRegisters - 1;
2311
2312void Assembler::EnterCallRuntimeFrame(intptr_t frame_space) {
2313 Comment("EnterCallRuntimeFrame");
2314 EnterFrame(0);
2315
2316 // Preserve volatile CPU registers.
2317 for (intptr_t i = 0; i < kNumberOfVolatileCpuRegisters; i++) {
2318 pushl(volatile_cpu_registers[i]);
2319 }
2320
2321 // Preserve all XMM registers except XMM0
2322 subl(ESP, Immediate((kNumberOfXmmRegisters - 1) * kFpuRegisterSize));
2323 // Store XMM registers with the lowest register number at the lowest
2324 // address.
2325 intptr_t offset = 0;
2326 for (intptr_t reg_idx = 1; reg_idx < kNumberOfXmmRegisters; ++reg_idx) {
2327 XmmRegister xmm_reg = static_cast<XmmRegister>(reg_idx);
2328 movups(Address(ESP, offset), xmm_reg);
2329 offset += kFpuRegisterSize;
2330 }
2331
2332 ReserveAlignedFrameSpace(frame_space);
2333}
2334
2335void Assembler::LeaveCallRuntimeFrame() {
2336 // ESP might have been modified to reserve space for arguments
2337 // and ensure proper alignment of the stack frame.
2338 // We need to restore it before restoring registers.
2339 const intptr_t kPushedRegistersSize =
2340 kNumberOfVolatileCpuRegisters * target::kWordSize +
2341 kNumberOfVolatileXmmRegisters * kFpuRegisterSize;
2342 leal(ESP, Address(EBP, -kPushedRegistersSize));
2343
2344 // Restore all XMM registers except XMM0
2345 // XMM registers have the lowest register number at the lowest address.
2346 intptr_t offset = 0;
2347 for (intptr_t reg_idx = 1; reg_idx < kNumberOfXmmRegisters; ++reg_idx) {
2348 XmmRegister xmm_reg = static_cast<XmmRegister>(reg_idx);
2349 movups(xmm_reg, Address(ESP, offset));
2350 offset += kFpuRegisterSize;
2351 }
2352 addl(ESP, Immediate(offset));
2353
2354 // Restore volatile CPU registers.
2355 for (intptr_t i = kNumberOfVolatileCpuRegisters - 1; i >= 0; i--) {
2356 popl(volatile_cpu_registers[i]);
2357 }
2358
2359 leave();
2360}
2361
2362void Assembler::CallRuntime(const RuntimeEntry& entry,
2363 intptr_t argument_count) {
2364 entry.Call(this, argument_count);
2365}
2366
2367void Assembler::Call(const Code& target,
2368 bool movable_target,
2369 CodeEntryKind entry_kind) {
2370 LoadObject(CODE_REG, ToObject(target), movable_target);
2371 call(FieldAddress(CODE_REG, target::Code::entry_point_offset(entry_kind)));
2372}
2373
2374void Assembler::CallToRuntime() {
2375 call(Address(THR, target::Thread::call_to_runtime_entry_point_offset()));
2376}
2377
2378void Assembler::Jmp(const Code& target) {
2379 const ExternalLabel label(target::Code::EntryPointOf(target));
2380 jmp(&label);
2381}
2382
2383void Assembler::J(Condition condition, const Code& target) {
2384 const ExternalLabel label(target::Code::EntryPointOf(target));
2385 j(condition, &label);
2386}
2387
2388void Assembler::Align(intptr_t alignment, intptr_t offset) {
2389 ASSERT(Utils::IsPowerOfTwo(alignment));
2390 intptr_t pos = offset + buffer_.GetPosition();
2391 intptr_t mod = pos & (alignment - 1);
2392 if (mod == 0) {
2393 return;
2394 }
2395 intptr_t bytes_needed = alignment - mod;
2396 while (bytes_needed > MAX_NOP_SIZE) {
2397 nop(MAX_NOP_SIZE);
2398 bytes_needed -= MAX_NOP_SIZE;
2399 }
2400 if (bytes_needed) {
2401 nop(bytes_needed);
2402 }
2403 ASSERT(((offset + buffer_.GetPosition()) & (alignment - 1)) == 0);
2404}
2405
2406void Assembler::Bind(Label* label) {
2407 intptr_t bound = buffer_.Size();
2408 ASSERT(!label->IsBound()); // Labels can only be bound once.
2409 while (label->IsLinked()) {
2410 intptr_t position = label->LinkPosition();
2411 intptr_t next = buffer_.Load<int32_t>(position);
2412 buffer_.Store<int32_t>(position, bound - (position + 4));
2413 label->position_ = next;
2414 }
2415 while (label->HasNear()) {
2416 intptr_t position = label->NearPosition();
2417 intptr_t offset = bound - (position + 1);
2418 ASSERT(Utils::IsInt(8, offset));
2419 buffer_.Store<int8_t>(position, offset);
2420 }
2421 label->BindTo(bound);
2422}
2423
2424void Assembler::MoveMemoryToMemory(Address dst, Address src, Register tmp) {
2425 movl(tmp, src);
2426 movl(dst, tmp);
2427}
2428
2429#ifndef PRODUCT
2430void Assembler::MaybeTraceAllocation(intptr_t cid,
2431 Register temp_reg,
2432 Label* trace,
2433 bool near_jump) {
2434 ASSERT(cid > 0);
2435 Address state_address(kNoRegister, 0);
2436
2437 const intptr_t shared_table_offset =
2438 target::Isolate::shared_class_table_offset();
2439 const intptr_t table_offset =
2440 target::SharedClassTable::class_heap_stats_table_offset();
2441 const intptr_t class_offset = target::ClassTable::ClassOffsetFor(cid);
2442
2443 ASSERT(temp_reg != kNoRegister);
2444 LoadIsolate(temp_reg);
2445 movl(temp_reg, Address(temp_reg, shared_table_offset));
2446 movl(temp_reg, Address(temp_reg, table_offset));
2447 cmpb(Address(temp_reg, class_offset), Immediate(0));
2448 // We are tracing for this class, jump to the trace label which will use
2449 // the allocation stub.
2450 j(NOT_ZERO, trace, near_jump);
2451}
2452#endif // !PRODUCT
2453
2454void Assembler::TryAllocate(const Class& cls,
2455 Label* failure,
2456 bool near_jump,
2457 Register instance_reg,
2458 Register temp_reg) {
2459 ASSERT(failure != NULL);
2460 ASSERT(temp_reg != kNoRegister);
2461 const intptr_t instance_size = target::Class::GetInstanceSize(cls);
2462 if (FLAG_inline_alloc &&
2463 target::Heap::IsAllocatableInNewSpace(instance_size)) {
2464 // If this allocation is traced, program will jump to failure path
2465 // (i.e. the allocation stub) which will allocate the object and trace the
2466 // allocation call site.
2467 const classid_t cid = target::Class::GetId(cls);
2468 NOT_IN_PRODUCT(MaybeTraceAllocation(cid, temp_reg, failure, near_jump));
2469 movl(instance_reg, Address(THR, target::Thread::top_offset()));
2470 addl(instance_reg, Immediate(instance_size));
2471 // instance_reg: potential next object start.
2472 cmpl(instance_reg, Address(THR, target::Thread::end_offset()));
2473 j(ABOVE_EQUAL, failure, near_jump);
2474 // Successfully allocated the object, now update top to point to
2475 // next object start and store the class in the class field of object.
2476 movl(Address(THR, target::Thread::top_offset()), instance_reg);
2477 ASSERT(instance_size >= kHeapObjectTag);
2478 subl(instance_reg, Immediate(instance_size - kHeapObjectTag));
2479 const uint32_t tags =
2480 target::MakeTagWordForNewSpaceObject(cid, instance_size);
2481 movl(FieldAddress(instance_reg, target::Object::tags_offset()),
2482 Immediate(tags));
2483 } else {
2484 jmp(failure);
2485 }
2486}
2487
2488void Assembler::TryAllocateArray(intptr_t cid,
2489 intptr_t instance_size,
2490 Label* failure,
2491 bool near_jump,
2492 Register instance,
2493 Register end_address,
2494 Register temp_reg) {
2495 ASSERT(failure != NULL);
2496 ASSERT(temp_reg != kNoRegister);
2497 if (FLAG_inline_alloc &&
2498 target::Heap::IsAllocatableInNewSpace(instance_size)) {
2499 // If this allocation is traced, program will jump to failure path
2500 // (i.e. the allocation stub) which will allocate the object and trace the
2501 // allocation call site.
2502 NOT_IN_PRODUCT(MaybeTraceAllocation(cid, temp_reg, failure, near_jump));
2503 movl(instance, Address(THR, target::Thread::top_offset()));
2504 movl(end_address, instance);
2505
2506 addl(end_address, Immediate(instance_size));
2507 j(CARRY, failure);
2508
2509 // Check if the allocation fits into the remaining space.
2510 // EAX: potential new object start.
2511 // EBX: potential next object start.
2512 cmpl(end_address, Address(THR, target::Thread::end_offset()));
2513 j(ABOVE_EQUAL, failure);
2514
2515 // Successfully allocated the object(s), now update top to point to
2516 // next object start and initialize the object.
2517 movl(Address(THR, target::Thread::top_offset()), end_address);
2518 addl(instance, Immediate(kHeapObjectTag));
2519
2520 // Initialize the tags.
2521 const uint32_t tags =
2522 target::MakeTagWordForNewSpaceObject(cid, instance_size);
2523 movl(FieldAddress(instance, target::Object::tags_offset()),
2524 Immediate(tags));
2525 } else {
2526 jmp(failure);
2527 }
2528}
2529
2530void Assembler::PushCodeObject() {
2531 ASSERT(IsNotTemporaryScopedHandle(code_));
2532 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2533 EmitUint8(0x68);
2534 buffer_.EmitObject(code_);
2535}
2536
2537void Assembler::EnterDartFrame(intptr_t frame_size) {
2538 EnterFrame(0);
2539
2540 PushCodeObject();
2541
2542 if (frame_size != 0) {
2543 subl(ESP, Immediate(frame_size));
2544 }
2545}
2546
2547// On entry to a function compiled for OSR, the caller's frame pointer, the
2548// stack locals, and any copied parameters are already in place. The frame
2549// pointer is already set up. There may be extra space for spill slots to
2550// allocate.
2551void Assembler::EnterOsrFrame(intptr_t extra_size) {
2552 Comment("EnterOsrFrame");
2553 if (prologue_offset_ == -1) {
2554 Comment("PrologueOffset = %" Pd "", CodeSize());
2555 prologue_offset_ = CodeSize();
2556 }
2557
2558 if (extra_size != 0) {
2559 subl(ESP, Immediate(extra_size));
2560 }
2561}
2562
2563void Assembler::EnterStubFrame() {
2564 EnterDartFrame(0);
2565}
2566
2567void Assembler::LeaveStubFrame() {
2568 LeaveFrame();
2569}
2570
2571void Assembler::EnterCFrame(intptr_t frame_space) {
2572 EnterFrame(0);
2573 ReserveAlignedFrameSpace(frame_space);
2574}
2575
2576void Assembler::LeaveCFrame() {
2577 LeaveFrame();
2578}
2579
2580void Assembler::EmitOperand(int rm, const Operand& operand) {
2581 ASSERT(rm >= 0 && rm < 8);
2582 const intptr_t length = operand.length_;
2583 ASSERT(length > 0);
2584 // Emit the ModRM byte updated with the given RM value.
2585 ASSERT((operand.encoding_[0] & 0x38) == 0);
2586 EmitUint8(operand.encoding_[0] + (rm << 3));
2587 // Emit the rest of the encoded operand.
2588 for (intptr_t i = 1; i < length; i++) {
2589 EmitUint8(operand.encoding_[i]);
2590 }
2591}
2592
2593void Assembler::EmitImmediate(const Immediate& imm) {
2594 EmitInt32(imm.value());
2595}
2596
2597void Assembler::EmitComplex(int rm,
2598 const Operand& operand,
2599 const Immediate& immediate) {
2600 ASSERT(rm >= 0 && rm < 8);
2601 if (immediate.is_int8()) {
2602 // Use sign-extended 8-bit immediate.
2603 EmitUint8(0x83);
2604 EmitOperand(rm, operand);
2605 EmitUint8(immediate.value() & 0xFF);
2606 } else if (operand.IsRegister(EAX)) {
2607 // Use short form if the destination is eax.
2608 EmitUint8(0x05 + (rm << 3));
2609 EmitImmediate(immediate);
2610 } else {
2611 EmitUint8(0x81);
2612 EmitOperand(rm, operand);
2613 EmitImmediate(immediate);
2614 }
2615}
2616
2617void Assembler::EmitLabel(Label* label, intptr_t instruction_size) {
2618 if (label->IsBound()) {
2619 intptr_t offset = label->Position() - buffer_.Size();
2620 ASSERT(offset <= 0);
2621 EmitInt32(offset - instruction_size);
2622 } else {
2623 EmitLabelLink(label);
2624 }
2625}
2626
2627void Assembler::EmitLabelLink(Label* label) {
2628 ASSERT(!label->IsBound());
2629 intptr_t position = buffer_.Size();
2630 EmitInt32(label->position_);
2631 label->LinkTo(position);
2632}
2633
2634void Assembler::EmitNearLabelLink(Label* label) {
2635 ASSERT(!label->IsBound());
2636 intptr_t position = buffer_.Size();
2637 EmitUint8(0);
2638 label->NearLinkTo(position);
2639}
2640
2641void Assembler::EmitGenericShift(int rm, Register reg, const Immediate& imm) {
2642 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2643 ASSERT(imm.is_int8());
2644 if (imm.value() == 1) {
2645 EmitUint8(0xD1);
2646 EmitOperand(rm, Operand(reg));
2647 } else {
2648 EmitUint8(0xC1);
2649 EmitOperand(rm, Operand(reg));
2650 EmitUint8(imm.value() & 0xFF);
2651 }
2652}
2653
2654void Assembler::EmitGenericShift(int rm,
2655 const Operand& operand,
2656 Register shifter) {
2657 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2658 ASSERT(shifter == ECX);
2659 EmitUint8(0xD3);
2660 EmitOperand(rm, Operand(operand));
2661}
2662
2663void Assembler::LoadClassId(Register result, Register object) {
2664 ASSERT(target::ObjectLayout::kClassIdTagPos == 16);
2665 ASSERT(target::ObjectLayout::kClassIdTagSize == 16);
2666 const intptr_t class_id_offset =
2667 target::Object::tags_offset() +
2668 target::ObjectLayout::kClassIdTagPos / kBitsPerByte;
2669 movzxw(result, FieldAddress(object, class_id_offset));
2670}
2671
2672void Assembler::LoadClassById(Register result, Register class_id) {
2673 ASSERT(result != class_id);
2674
2675 const intptr_t table_offset =
2676 target::Isolate::cached_class_table_table_offset();
2677 LoadIsolate(result);
2678 movl(result, Address(result, table_offset));
2679 movl(result, Address(result, class_id, TIMES_4, 0));
2680}
2681
2682void Assembler::CompareClassId(Register object,
2683 intptr_t class_id,
2684 Register scratch) {
2685 LoadClassId(scratch, object);
2686 cmpl(scratch, Immediate(class_id));
2687}
2688
2689void Assembler::SmiUntagOrCheckClass(Register object,
2690 intptr_t class_id,
2691 Register scratch,
2692 Label* is_smi) {
2693 ASSERT(kSmiTagShift == 1);
2694 ASSERT(target::ObjectLayout::kClassIdTagPos == 16);
2695 ASSERT(target::ObjectLayout::kClassIdTagSize == 16);
2696 const intptr_t class_id_offset =
2697 target::Object::tags_offset() +
2698 target::ObjectLayout::kClassIdTagPos / kBitsPerByte;
2699
2700 // Untag optimistically. Tag bit is shifted into the CARRY.
2701 SmiUntag(object);
2702 j(NOT_CARRY, is_smi, kNearJump);
2703 // Load cid: can't use LoadClassId, object is untagged. Use TIMES_2 scale
2704 // factor in the addressing mode to compensate for this.
2705 movzxw(scratch, Address(object, TIMES_2, class_id_offset));
2706 cmpl(scratch, Immediate(class_id));
2707}
2708
2709void Assembler::LoadClassIdMayBeSmi(Register result, Register object) {
2710 if (result == object) {
2711 Label smi, join;
2712
2713 testl(object, Immediate(kSmiTagMask));
2714 j(EQUAL, &smi, Assembler::kNearJump);
2715 LoadClassId(result, object);
2716 jmp(&join, Assembler::kNearJump);
2717
2718 Bind(&smi);
2719 movl(result, Immediate(kSmiCid));
2720
2721 Bind(&join);
2722 } else {
2723 ASSERT(result != object);
2724 static const intptr_t kSmiCidSource =
2725 kSmiCid << target::ObjectLayout::kClassIdTagPos;
2726
2727 // Make a dummy "Object" whose cid is kSmiCid.
2728 movl(result, Immediate(reinterpret_cast<int32_t>(&kSmiCidSource) + 1));
2729
2730 // Check if object (in tmp) is a Smi.
2731 testl(object, Immediate(kSmiTagMask));
2732
2733 // If the object is not a Smi, use the original object to load the cid.
2734 // Otherwise, the dummy object is used, and the result is kSmiCid.
2735 cmovne(result, object);
2736 LoadClassId(result, result);
2737 }
2738}
2739
2740void Assembler::LoadTaggedClassIdMayBeSmi(Register result, Register object) {
2741 if (result == object) {
2742 Label smi, join;
2743
2744 testl(object, Immediate(kSmiTagMask));
2745 j(EQUAL, &smi, Assembler::kNearJump);
2746 LoadClassId(result, object);
2747 SmiTag(result);
2748 jmp(&join, Assembler::kNearJump);
2749
2750 Bind(&smi);
2751 movl(result, Immediate(target::ToRawSmi(kSmiCid)));
2752
2753 Bind(&join);
2754 } else {
2755 LoadClassIdMayBeSmi(result, object);
2756 SmiTag(result);
2757 }
2758}
2759
2760Address Assembler::ElementAddressForIntIndex(bool is_external,
2761 intptr_t cid,
2762 intptr_t index_scale,
2763 Register array,
2764 intptr_t index,
2765 intptr_t extra_disp) {
2766 if (is_external) {
2767 return Address(array, index * index_scale + extra_disp);
2768 } else {
2769 const int64_t disp = static_cast<int64_t>(index) * index_scale +
2770 target::Instance::DataOffsetFor(cid) + extra_disp;
2771 ASSERT(Utils::IsInt(32, disp));
2772 return FieldAddress(array, static_cast<int32_t>(disp));
2773 }
2774}
2775
2776static ScaleFactor ToScaleFactor(intptr_t index_scale, bool index_unboxed) {
2777 if (index_unboxed) {
2778 switch (index_scale) {
2779 case 1:
2780 return TIMES_1;
2781 case 2:
2782 return TIMES_2;
2783 case 4:
2784 return TIMES_4;
2785 case 8:
2786 return TIMES_8;
2787 case 16:
2788 return TIMES_16;
2789 default:
2790 UNREACHABLE();
2791 return TIMES_1;
2792 }
2793 } else {
2794 // Note that index is expected smi-tagged, (i.e, times 2) for all arrays
2795 // with index scale factor > 1. E.g., for Uint8Array and OneByteString the
2796 // index is expected to be untagged before accessing.
2797 ASSERT(kSmiTagShift == 1);
2798 switch (index_scale) {
2799 case 1:
2800 return TIMES_1;
2801 case 2:
2802 return TIMES_1;
2803 case 4:
2804 return TIMES_2;
2805 case 8:
2806 return TIMES_4;
2807 case 16:
2808 return TIMES_8;
2809 default:
2810 UNREACHABLE();
2811 return TIMES_1;
2812 }
2813 }
2814}
2815
2816Address Assembler::ElementAddressForRegIndex(bool is_external,
2817 intptr_t cid,
2818 intptr_t index_scale,
2819 bool index_unboxed,
2820 Register array,
2821 Register index,
2822 intptr_t extra_disp) {
2823 if (is_external) {
2824 return Address(array, index, ToScaleFactor(index_scale, index_unboxed),
2825 extra_disp);
2826 } else {
2827 return FieldAddress(array, index, ToScaleFactor(index_scale, index_unboxed),
2828 target::Instance::DataOffsetFor(cid) + extra_disp);
2829 }
2830}
2831
2832} // namespace compiler
2833} // namespace dart
2834
2835#endif // defined(TARGET_ARCH_IA32)
2836