1 | // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 | // for details. All rights reserved. Use of this source code is governed by a |
3 | // BSD-style license that can be found in the LICENSE file. |
4 | |
5 | #include "vm/globals.h" |
6 | #if defined(TARGET_ARCH_ARM64) |
7 | |
8 | #include "vm/compiler/assembler/assembler.h" |
9 | #include "vm/cpu.h" |
10 | #include "vm/os.h" |
11 | #include "vm/unit_test.h" |
12 | #include "vm/virtual_memory.h" |
13 | |
14 | namespace dart { |
15 | namespace compiler { |
16 | #define __ assembler-> |
17 | |
18 | ASSEMBLER_TEST_GENERATE(Simple, assembler) { |
19 | __ add(R0, ZR, Operand(ZR)); |
20 | __ add(R0, R0, Operand(42)); |
21 | __ ret(); |
22 | } |
23 | |
24 | ASSEMBLER_TEST_RUN(Simple, test) { |
25 | typedef int64_t (*Int64Return)() DART_UNUSED; |
26 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
27 | } |
28 | |
29 | // Move wide immediate tests. |
30 | // movz |
31 | ASSEMBLER_TEST_GENERATE(Movz0, assembler) { |
32 | __ movz(R0, Immediate(42), 0); |
33 | __ ret(); |
34 | } |
35 | |
36 | ASSEMBLER_TEST_RUN(Movz0, test) { |
37 | typedef int64_t (*Int64Return)() DART_UNUSED; |
38 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
39 | } |
40 | |
41 | ASSEMBLER_TEST_GENERATE(Movz1, assembler) { |
42 | __ movz(R0, Immediate(42), 0); // Overwritten by next instruction. |
43 | __ movz(R0, Immediate(42), 1); |
44 | __ ret(); |
45 | } |
46 | |
47 | ASSEMBLER_TEST_RUN(Movz1, test) { |
48 | typedef int64_t (*Int64Return)() DART_UNUSED; |
49 | EXPECT_EQ(42LL << 16, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
50 | } |
51 | |
52 | ASSEMBLER_TEST_GENERATE(Movz2, assembler) { |
53 | __ movz(R0, Immediate(42), 2); |
54 | __ ret(); |
55 | } |
56 | |
57 | ASSEMBLER_TEST_RUN(Movz2, test) { |
58 | typedef int64_t (*Int64Return)() DART_UNUSED; |
59 | EXPECT_EQ(42LL << 32, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
60 | } |
61 | |
62 | ASSEMBLER_TEST_GENERATE(Movz3, assembler) { |
63 | __ movz(R0, Immediate(42), 3); |
64 | __ ret(); |
65 | } |
66 | |
67 | ASSEMBLER_TEST_RUN(Movz3, test) { |
68 | typedef int64_t (*Int64Return)() DART_UNUSED; |
69 | EXPECT_EQ(42LL << 48, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
70 | } |
71 | |
72 | // movn |
73 | ASSEMBLER_TEST_GENERATE(Movn0, assembler) { |
74 | __ movn(R0, Immediate(42), 0); |
75 | __ ret(); |
76 | } |
77 | |
78 | ASSEMBLER_TEST_RUN(Movn0, test) { |
79 | typedef int64_t (*Int64Return)() DART_UNUSED; |
80 | EXPECT_EQ(~42LL, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
81 | } |
82 | |
83 | ASSEMBLER_TEST_GENERATE(Movn1, assembler) { |
84 | __ movn(R0, Immediate(42), 1); |
85 | __ ret(); |
86 | } |
87 | |
88 | ASSEMBLER_TEST_RUN(Movn1, test) { |
89 | typedef int64_t (*Int64Return)() DART_UNUSED; |
90 | EXPECT_EQ(~(42LL << 16), EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
91 | } |
92 | |
93 | ASSEMBLER_TEST_GENERATE(Movn2, assembler) { |
94 | __ movn(R0, Immediate(42), 2); |
95 | __ ret(); |
96 | } |
97 | |
98 | ASSEMBLER_TEST_RUN(Movn2, test) { |
99 | typedef int64_t (*Int64Return)() DART_UNUSED; |
100 | EXPECT_EQ(~(42LL << 32), EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
101 | } |
102 | |
103 | ASSEMBLER_TEST_GENERATE(Movn3, assembler) { |
104 | __ movn(R0, Immediate(42), 3); |
105 | __ ret(); |
106 | } |
107 | |
108 | ASSEMBLER_TEST_RUN(Movn3, test) { |
109 | typedef int64_t (*Int64Return)() DART_UNUSED; |
110 | EXPECT_EQ(~(42LL << 48), EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
111 | } |
112 | |
113 | // movk |
114 | ASSEMBLER_TEST_GENERATE(Movk0, assembler) { |
115 | __ movz(R0, Immediate(1), 3); |
116 | __ movk(R0, Immediate(42), 0); |
117 | __ ret(); |
118 | } |
119 | |
120 | ASSEMBLER_TEST_RUN(Movk0, test) { |
121 | typedef int64_t (*Int64Return)() DART_UNUSED; |
122 | EXPECT_EQ(42LL | (1LL << 48), |
123 | EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
124 | } |
125 | |
126 | ASSEMBLER_TEST_GENERATE(Movk1, assembler) { |
127 | __ movz(R0, Immediate(1), 0); |
128 | __ movk(R0, Immediate(42), 1); |
129 | __ ret(); |
130 | } |
131 | |
132 | ASSEMBLER_TEST_RUN(Movk1, test) { |
133 | typedef int64_t (*Int64Return)() DART_UNUSED; |
134 | EXPECT_EQ((42LL << 16) | 1, |
135 | EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
136 | } |
137 | |
138 | ASSEMBLER_TEST_GENERATE(Movk2, assembler) { |
139 | __ movz(R0, Immediate(1), 0); |
140 | __ movk(R0, Immediate(42), 2); |
141 | __ ret(); |
142 | } |
143 | |
144 | ASSEMBLER_TEST_RUN(Movk2, test) { |
145 | typedef int64_t (*Int64Return)() DART_UNUSED; |
146 | EXPECT_EQ((42LL << 32) | 1, |
147 | EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
148 | } |
149 | |
150 | ASSEMBLER_TEST_GENERATE(Movk3, assembler) { |
151 | __ movz(R0, Immediate(1), 0); |
152 | __ movk(R0, Immediate(42), 3); |
153 | __ ret(); |
154 | } |
155 | |
156 | ASSEMBLER_TEST_RUN(Movk3, test) { |
157 | typedef int64_t (*Int64Return)() DART_UNUSED; |
158 | EXPECT_EQ((42LL << 48) | 1, |
159 | EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
160 | } |
161 | |
162 | ASSEMBLER_TEST_GENERATE(MovzBig, assembler) { |
163 | __ movz(R0, Immediate(0x8000), 0); |
164 | __ ret(); |
165 | } |
166 | |
167 | ASSEMBLER_TEST_RUN(MovzBig, test) { |
168 | typedef int64_t (*Int64Return)() DART_UNUSED; |
169 | EXPECT_EQ(0x8000, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
170 | } |
171 | |
172 | // add tests. |
173 | ASSEMBLER_TEST_GENERATE(AddReg, assembler) { |
174 | __ movz(R0, Immediate(20), 0); |
175 | __ movz(R1, Immediate(22), 0); |
176 | __ add(R0, R0, Operand(R1)); |
177 | __ ret(); |
178 | } |
179 | |
180 | ASSEMBLER_TEST_RUN(AddReg, test) { |
181 | typedef int64_t (*Int64Return)() DART_UNUSED; |
182 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
183 | } |
184 | |
185 | ASSEMBLER_TEST_GENERATE(AddLSLReg, assembler) { |
186 | __ movz(R0, Immediate(20), 0); |
187 | __ movz(R1, Immediate(11), 0); |
188 | __ add(R0, R0, Operand(R1, LSL, 1)); |
189 | __ ret(); |
190 | } |
191 | |
192 | ASSEMBLER_TEST_RUN(AddLSLReg, test) { |
193 | typedef int64_t (*Int64Return)() DART_UNUSED; |
194 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
195 | } |
196 | |
197 | ASSEMBLER_TEST_GENERATE(AddLSRReg, assembler) { |
198 | __ movz(R0, Immediate(20), 0); |
199 | __ movz(R1, Immediate(44), 0); |
200 | __ add(R0, R0, Operand(R1, LSR, 1)); |
201 | __ ret(); |
202 | } |
203 | |
204 | ASSEMBLER_TEST_RUN(AddLSRReg, test) { |
205 | typedef int64_t (*Int64Return)() DART_UNUSED; |
206 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
207 | } |
208 | |
209 | ASSEMBLER_TEST_GENERATE(AddASRReg, assembler) { |
210 | __ movz(R0, Immediate(20), 0); |
211 | __ movz(R1, Immediate(44), 0); |
212 | __ add(R0, R0, Operand(R1, ASR, 1)); |
213 | __ ret(); |
214 | } |
215 | |
216 | ASSEMBLER_TEST_RUN(AddASRReg, test) { |
217 | typedef int64_t (*Int64Return)() DART_UNUSED; |
218 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
219 | } |
220 | |
221 | ASSEMBLER_TEST_GENERATE(AddASRNegReg, assembler) { |
222 | __ movz(R0, Immediate(43), 0); |
223 | __ movn(R1, Immediate(0), 0); // R1 <- -1 |
224 | __ add(R1, ZR, Operand(R1, LSL, 3)); // R1 <- -8 |
225 | __ add(R0, R0, Operand(R1, ASR, 3)); // R0 <- 43 + (-8 >> 3) |
226 | __ ret(); |
227 | } |
228 | |
229 | ASSEMBLER_TEST_RUN(AddASRNegReg, test) { |
230 | typedef int64_t (*Int64Return)() DART_UNUSED; |
231 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
232 | } |
233 | |
234 | // TODO(zra): test other sign extension modes. |
235 | ASSEMBLER_TEST_GENERATE(AddExtReg, assembler) { |
236 | __ movz(R0, Immediate(43), 0); |
237 | __ movz(R1, Immediate(0xffff), 0); |
238 | __ movk(R1, Immediate(0xffff), 1); // R1 <- -1 (32-bit) |
239 | __ add(R0, R0, Operand(R1, SXTW, 0)); // R0 <- R0 + (sign extended R1) |
240 | __ ret(); |
241 | } |
242 | |
243 | ASSEMBLER_TEST_RUN(AddExtReg, test) { |
244 | typedef int64_t (*Int64Return)() DART_UNUSED; |
245 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
246 | } |
247 | |
248 | ASSEMBLER_TEST_GENERATE(AddCarryInOut, assembler) { |
249 | __ LoadImmediate(R2, -1); |
250 | __ LoadImmediate(R1, 1); |
251 | __ LoadImmediate(R0, 0); |
252 | __ adds(IP0, R2, Operand(R1)); // c_out = 1. |
253 | __ adcs(IP0, R2, R0); // c_in = 1, c_out = 1. |
254 | __ adc(R0, R0, R0); // c_in = 1. |
255 | __ ret(); |
256 | } |
257 | |
258 | ASSEMBLER_TEST_RUN(AddCarryInOut, test) { |
259 | typedef int64_t (*Int64Return)() DART_UNUSED; |
260 | EXPECT_EQ(1, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
261 | } |
262 | |
263 | ASSEMBLER_TEST_GENERATE(SubCarryInOut, assembler) { |
264 | __ LoadImmediate(R1, 1); |
265 | __ LoadImmediate(R0, 0); |
266 | __ subs(IP0, R0, Operand(R1)); // c_out = 1. |
267 | __ sbcs(IP0, R0, R0); // c_in = 1, c_out = 1. |
268 | __ sbc(R0, R0, R0); // c_in = 1. |
269 | __ ret(); |
270 | } |
271 | |
272 | ASSEMBLER_TEST_RUN(SubCarryInOut, test) { |
273 | typedef int64_t (*Int64Return)() DART_UNUSED; |
274 | EXPECT_EQ(-1, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
275 | } |
276 | |
277 | ASSEMBLER_TEST_GENERATE(Overflow, assembler) { |
278 | __ LoadImmediate(R0, 0); |
279 | __ LoadImmediate(R1, 1); |
280 | __ LoadImmediate(R2, 0xFFFFFFFFFFFFFFFF); |
281 | __ LoadImmediate(R3, 0x7FFFFFFFFFFFFFFF); |
282 | __ adds(IP0, R2, Operand(R1)); // c_out = 1. |
283 | __ adcs(IP0, R3, R0); // c_in = 1, c_out = 1, v = 1. |
284 | __ csinc(R0, R0, R0, VS); // R0 = v ? R0 : R0 + 1. |
285 | __ ret(); |
286 | } |
287 | |
288 | ASSEMBLER_TEST_RUN(Overflow, test) { |
289 | typedef int64_t (*Int64Return)() DART_UNUSED; |
290 | EXPECT_EQ(0, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
291 | } |
292 | |
293 | ASSEMBLER_TEST_GENERATE(WordAddCarryInOut, assembler) { |
294 | __ LoadImmediate(R2, -1); |
295 | __ LoadImmediate(R1, 1); |
296 | __ LoadImmediate(R0, 0); |
297 | __ addsw(IP0, R2, Operand(R1)); // c_out = 1. |
298 | __ adcsw(IP0, R2, R0); // c_in = 1, c_out = 1. |
299 | __ adcw(R0, R0, R0); // c_in = 1. |
300 | __ ret(); |
301 | } |
302 | |
303 | ASSEMBLER_TEST_RUN(WordAddCarryInOut, test) { |
304 | typedef int64_t (*Int64Return)() DART_UNUSED; |
305 | EXPECT_EQ(1, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
306 | } |
307 | |
308 | ASSEMBLER_TEST_GENERATE(WordSubCarryInOut, assembler) { |
309 | __ LoadImmediate(R1, 1); |
310 | __ LoadImmediate(R0, 0); |
311 | __ subsw(IP0, R0, Operand(R1)); // c_out = 1. |
312 | __ sbcsw(IP0, R0, R0); // c_in = 1, c_out = 1. |
313 | __ sbcw(R0, R0, R0); // c_in = 1. |
314 | __ ret(); |
315 | } |
316 | |
317 | ASSEMBLER_TEST_RUN(WordSubCarryInOut, test) { |
318 | typedef int64_t (*Int64Return)() DART_UNUSED; |
319 | EXPECT_EQ(0x0FFFFFFFF, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
320 | } |
321 | |
322 | ASSEMBLER_TEST_GENERATE(WordOverflow, assembler) { |
323 | __ LoadImmediate(R0, 0); |
324 | __ LoadImmediate(R1, 1); |
325 | __ LoadImmediate(R2, 0xFFFFFFFF); |
326 | __ LoadImmediate(R3, 0x7FFFFFFF); |
327 | __ addsw(IP0, R2, Operand(R1)); // c_out = 1. |
328 | __ adcsw(IP0, R3, R0); // c_in = 1, c_out = 1, v = 1. |
329 | __ csinc(R0, R0, R0, VS); // R0 = v ? R0 : R0 + 1. |
330 | __ ret(); |
331 | } |
332 | |
333 | ASSEMBLER_TEST_RUN(WordOverflow, test) { |
334 | typedef int64_t (*Int64Return)() DART_UNUSED; |
335 | EXPECT_EQ(0, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
336 | } |
337 | |
338 | // Loads and Stores. |
339 | ASSEMBLER_TEST_GENERATE(SimpleLoadStore, assembler) { |
340 | __ SetupDartSP(); |
341 | |
342 | __ sub(CSP, CSP, |
343 | Operand(2 * target::kWordSize)); // Must not access beyond CSP. |
344 | |
345 | __ movz(R0, Immediate(43), 0); |
346 | __ movz(R1, Immediate(42), 0); |
347 | __ str(R1, Address(SP, -1 * target::kWordSize, Address::PreIndex)); |
348 | __ ldr(R0, Address(SP, 1 * target::kWordSize, Address::PostIndex)); |
349 | __ RestoreCSP(); |
350 | __ ret(); |
351 | } |
352 | |
353 | ASSEMBLER_TEST_RUN(SimpleLoadStore, test) { |
354 | typedef int64_t (*Int64Return)() DART_UNUSED; |
355 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
356 | } |
357 | |
358 | ASSEMBLER_TEST_GENERATE(SimpleLoadStoreHeapTag, assembler) { |
359 | __ SetupDartSP(); |
360 | __ movz(R0, Immediate(43), 0); |
361 | __ movz(R1, Immediate(42), 0); |
362 | __ add(R2, SP, Operand(1)); |
363 | __ str(R1, Address(R2, -1)); |
364 | __ ldr(R0, Address(R2, -1)); |
365 | __ RestoreCSP(); |
366 | __ ret(); |
367 | } |
368 | |
369 | ASSEMBLER_TEST_RUN(SimpleLoadStoreHeapTag, test) { |
370 | typedef int64_t (*Int64Return)() DART_UNUSED; |
371 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
372 | } |
373 | |
374 | ASSEMBLER_TEST_GENERATE(LoadStoreLargeIndex, assembler) { |
375 | __ SetupDartSP(); |
376 | |
377 | __ sub(CSP, CSP, |
378 | Operand(32 * target::kWordSize)); // Must not access beyond CSP. |
379 | |
380 | __ movz(R0, Immediate(43), 0); |
381 | __ movz(R1, Immediate(42), 0); |
382 | // Largest negative offset that can fit in the signed 9-bit immediate field. |
383 | __ str(R1, Address(SP, -32 * target::kWordSize, Address::PreIndex)); |
384 | // Largest positive kWordSize aligned offset that we can fit. |
385 | __ ldr(R0, Address(SP, 31 * target::kWordSize, Address::PostIndex)); |
386 | // Correction. |
387 | __ add(SP, SP, Operand(target::kWordSize)); // Restore SP. |
388 | __ RestoreCSP(); |
389 | __ ret(); |
390 | } |
391 | |
392 | ASSEMBLER_TEST_RUN(LoadStoreLargeIndex, test) { |
393 | typedef int64_t (*Int64Return)() DART_UNUSED; |
394 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
395 | } |
396 | |
397 | ASSEMBLER_TEST_GENERATE(LoadStoreLargeOffset, assembler) { |
398 | __ SetupDartSP(); |
399 | __ movz(R0, Immediate(43), 0); |
400 | __ movz(R1, Immediate(42), 0); |
401 | __ sub(SP, SP, Operand(512 * target::kWordSize)); |
402 | __ andi(CSP, SP, Immediate(~15)); // Must not access beyond CSP. |
403 | __ str(R1, Address(SP, 512 * target::kWordSize, Address::Offset)); |
404 | __ add(SP, SP, Operand(512 * target::kWordSize)); |
405 | __ ldr(R0, Address(SP)); |
406 | __ RestoreCSP(); |
407 | __ ret(); |
408 | } |
409 | |
410 | ASSEMBLER_TEST_RUN(LoadStoreLargeOffset, test) { |
411 | typedef int64_t (*Int64Return)() DART_UNUSED; |
412 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
413 | } |
414 | |
415 | ASSEMBLER_TEST_GENERATE(LoadStoreExtReg, assembler) { |
416 | __ SetupDartSP(); |
417 | __ movz(R0, Immediate(43), 0); |
418 | __ movz(R1, Immediate(42), 0); |
419 | __ movz(R2, Immediate(0xfff8), 0); |
420 | __ movk(R2, Immediate(0xffff), 1); // R2 <- -8 (int32_t). |
421 | // This should sign extend R2, and add to SP to get address, |
422 | // i.e. SP - kWordSize. |
423 | __ str(R1, Address(SP, R2, SXTW)); |
424 | __ sub(SP, SP, Operand(target::kWordSize)); |
425 | __ andi(CSP, SP, Immediate(~15)); // Must not access beyond CSP. |
426 | __ ldr(R0, Address(SP)); |
427 | __ add(SP, SP, Operand(target::kWordSize)); |
428 | __ RestoreCSP(); |
429 | __ ret(); |
430 | } |
431 | |
432 | ASSEMBLER_TEST_RUN(LoadStoreExtReg, test) { |
433 | typedef int64_t (*Int64Return)() DART_UNUSED; |
434 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
435 | } |
436 | |
437 | ASSEMBLER_TEST_GENERATE(LoadStoreScaledReg, assembler) { |
438 | __ SetupDartSP(); |
439 | __ movz(R0, Immediate(43), 0); |
440 | __ movz(R1, Immediate(42), 0); |
441 | __ movz(R2, Immediate(10), 0); |
442 | __ sub(SP, SP, Operand(10 * target::kWordSize)); |
443 | __ andi(CSP, SP, Immediate(~15)); // Must not access beyond CSP. |
444 | // Store R1 into SP + R2 * kWordSize. |
445 | __ str(R1, Address(SP, R2, UXTX, Address::Scaled)); |
446 | __ ldr(R0, Address(SP, R2, UXTX, Address::Scaled)); |
447 | __ add(SP, SP, Operand(10 * target::kWordSize)); |
448 | __ RestoreCSP(); |
449 | __ ret(); |
450 | } |
451 | |
452 | ASSEMBLER_TEST_RUN(LoadStoreScaledReg, test) { |
453 | typedef int64_t (*Int64Return)() DART_UNUSED; |
454 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
455 | } |
456 | |
457 | ASSEMBLER_TEST_GENERATE(LoadSigned32Bit, assembler) { |
458 | __ SetupDartSP(); |
459 | |
460 | __ sub(CSP, CSP, |
461 | Operand(2 * target::kWordSize)); // Must not access beyond CSP. |
462 | |
463 | __ LoadImmediate(R1, 0xffffffff); |
464 | __ str(R1, Address(SP, -4, Address::PreIndex, kWord), kWord); |
465 | __ ldr(R0, Address(SP), kWord); |
466 | __ ldr(R1, Address(SP, 4, Address::PostIndex, kWord), kWord); |
467 | __ RestoreCSP(); |
468 | __ ret(); |
469 | } |
470 | |
471 | ASSEMBLER_TEST_RUN(LoadSigned32Bit, test) { |
472 | typedef int64_t (*Int64Return)() DART_UNUSED; |
473 | EXPECT_EQ(-1, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
474 | } |
475 | |
476 | ASSEMBLER_TEST_GENERATE(SimpleLoadStorePair, assembler) { |
477 | __ SetupDartSP(); |
478 | |
479 | __ sub(CSP, CSP, |
480 | Operand(2 * target::kWordSize)); // Must not access beyond CSP. |
481 | |
482 | __ LoadImmediate(R2, 43); |
483 | __ LoadImmediate(R3, 42); |
484 | __ stp(R2, R3, Address(SP, -2 * target::kWordSize, Address::PairPreIndex)); |
485 | __ ldp(R0, R1, Address(SP, 2 * target::kWordSize, Address::PairPostIndex)); |
486 | __ sub(R0, R0, Operand(R1)); |
487 | __ RestoreCSP(); |
488 | __ ret(); |
489 | } |
490 | |
491 | ASSEMBLER_TEST_RUN(SimpleLoadStorePair, test) { |
492 | typedef int64_t (*Int64Return)() DART_UNUSED; |
493 | EXPECT_EQ(1, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
494 | } |
495 | |
496 | ASSEMBLER_TEST_GENERATE(LoadStorePairOffset, assembler) { |
497 | __ SetupDartSP(); |
498 | __ LoadImmediate(R2, 43); |
499 | __ LoadImmediate(R3, 42); |
500 | __ sub(SP, SP, Operand(4 * target::kWordSize)); |
501 | __ andi(CSP, SP, Immediate(~15)); // Must not access beyond CSP. |
502 | __ stp(R2, R3, Address::Pair(SP, 2 * target::kWordSize)); |
503 | __ ldp(R0, R1, Address::Pair(SP, 2 * target::kWordSize)); |
504 | __ add(SP, SP, Operand(4 * target::kWordSize)); |
505 | __ sub(R0, R0, Operand(R1)); |
506 | __ RestoreCSP(); |
507 | __ ret(); |
508 | } |
509 | |
510 | ASSEMBLER_TEST_RUN(LoadStorePairOffset, test) { |
511 | typedef int64_t (*Int64Return)() DART_UNUSED; |
512 | EXPECT_EQ(1, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
513 | } |
514 | |
515 | ASSEMBLER_TEST_GENERATE(PushRegisterPair, assembler) { |
516 | __ SetupDartSP(); |
517 | __ LoadImmediate(R2, 12); |
518 | __ LoadImmediate(R3, 21); |
519 | __ PushRegisterPair(R2, R3); |
520 | __ Pop(R0); |
521 | __ Pop(R1); |
522 | __ RestoreCSP(); |
523 | __ ret(); |
524 | } |
525 | |
526 | ASSEMBLER_TEST_RUN(PushRegisterPair, test) { |
527 | EXPECT(test != NULL); |
528 | typedef int (*PushRegisterPair)() DART_UNUSED; |
529 | EXPECT_EQ(12, EXECUTE_TEST_CODE_INT64(PushRegisterPair, test->entry())); |
530 | } |
531 | |
532 | ASSEMBLER_TEST_GENERATE(PushRegisterPairReversed, assembler) { |
533 | __ SetupDartSP(); |
534 | __ LoadImmediate(R3, 12); |
535 | __ LoadImmediate(R2, 21); |
536 | __ PushRegisterPair(R3, R2); |
537 | __ Pop(R0); |
538 | __ Pop(R1); |
539 | __ RestoreCSP(); |
540 | __ ret(); |
541 | } |
542 | |
543 | ASSEMBLER_TEST_RUN(PushRegisterPairReversed, test) { |
544 | EXPECT(test != NULL); |
545 | typedef int (*PushRegisterPairReversed)() DART_UNUSED; |
546 | EXPECT_EQ(12, |
547 | EXECUTE_TEST_CODE_INT64(PushRegisterPairReversed, test->entry())); |
548 | } |
549 | |
550 | ASSEMBLER_TEST_GENERATE(PopRegisterPair, assembler) { |
551 | __ SetupDartSP(); |
552 | __ LoadImmediate(R2, 12); |
553 | __ LoadImmediate(R3, 21); |
554 | __ Push(R3); |
555 | __ Push(R2); |
556 | __ PopRegisterPair(R0, R1); |
557 | __ RestoreCSP(); |
558 | __ ret(); |
559 | } |
560 | |
561 | ASSEMBLER_TEST_RUN(PopRegisterPair, test) { |
562 | EXPECT(test != NULL); |
563 | typedef int (*PopRegisterPair)() DART_UNUSED; |
564 | EXPECT_EQ(12, EXECUTE_TEST_CODE_INT64(PopRegisterPair, test->entry())); |
565 | } |
566 | |
567 | ASSEMBLER_TEST_GENERATE(PopRegisterPairReversed, assembler) { |
568 | __ SetupDartSP(); |
569 | __ LoadImmediate(R3, 12); |
570 | __ LoadImmediate(R2, 21); |
571 | __ Push(R3); |
572 | __ Push(R2); |
573 | __ PopRegisterPair(R1, R0); |
574 | __ RestoreCSP(); |
575 | __ ret(); |
576 | } |
577 | |
578 | ASSEMBLER_TEST_RUN(PopRegisterPairReversed, test) { |
579 | EXPECT(test != NULL); |
580 | typedef int (*PopRegisterPairReversed)() DART_UNUSED; |
581 | EXPECT_EQ(12, |
582 | EXECUTE_TEST_CODE_INT64(PopRegisterPairReversed, test->entry())); |
583 | } |
584 | |
585 | ASSEMBLER_TEST_GENERATE(Semaphore, assembler) { |
586 | __ SetupDartSP(); |
587 | __ movz(R0, Immediate(40), 0); |
588 | __ movz(R1, Immediate(42), 0); |
589 | __ Push(R0); |
590 | Label retry; |
591 | __ Bind(&retry); |
592 | __ ldxr(R0, SP); |
593 | __ stxr(TMP, R1, SP); // IP == 0, success |
594 | __ cmp(TMP, Operand(0)); |
595 | __ b(&retry, NE); // NE if context switch occurred between ldrex and strex. |
596 | __ Pop(R0); // 42 |
597 | __ RestoreCSP(); |
598 | __ ret(); |
599 | } |
600 | |
601 | ASSEMBLER_TEST_RUN(Semaphore, test) { |
602 | EXPECT(test != NULL); |
603 | typedef intptr_t (*Semaphore)() DART_UNUSED; |
604 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Semaphore, test->entry())); |
605 | } |
606 | |
607 | ASSEMBLER_TEST_GENERATE(FailedSemaphore, assembler) { |
608 | __ SetupDartSP(); |
609 | __ movz(R0, Immediate(40), 0); |
610 | __ movz(R1, Immediate(42), 0); |
611 | __ Push(R0); |
612 | __ ldxr(R0, SP); |
613 | __ clrex(); // Simulate a context switch. |
614 | __ stxr(TMP, R1, SP); // IP == 1, failure |
615 | __ Pop(R0); // 40 |
616 | __ add(R0, R0, Operand(TMP)); |
617 | __ RestoreCSP(); |
618 | __ ret(); |
619 | } |
620 | |
621 | ASSEMBLER_TEST_RUN(FailedSemaphore, test) { |
622 | EXPECT(test != NULL); |
623 | typedef intptr_t (*FailedSemaphore)() DART_UNUSED; |
624 | EXPECT_EQ(41, EXECUTE_TEST_CODE_INT64(FailedSemaphore, test->entry())); |
625 | } |
626 | |
627 | ASSEMBLER_TEST_GENERATE(Semaphore32, assembler) { |
628 | __ SetupDartSP(); |
629 | __ movz(R0, Immediate(40), 0); |
630 | __ add(R0, R0, Operand(R0, LSL, 32)); |
631 | __ Push(R0); |
632 | |
633 | __ movz(R0, Immediate(40), 0); |
634 | __ movz(R1, Immediate(42), 0); |
635 | |
636 | Label retry; |
637 | __ Bind(&retry); |
638 | __ ldxr(R0, SP, kWord); |
639 | // 32 bit operation should ignore the high word of R0 that was pushed on the |
640 | // stack. |
641 | __ stxr(TMP, R1, SP, kWord); // IP == 0, success |
642 | __ cmp(TMP, Operand(0)); |
643 | __ b(&retry, NE); // NE if context switch occurred between ldrex and strex. |
644 | __ Pop(R0); // 42 + 42 * 2**32 |
645 | __ RestoreCSP(); |
646 | __ ret(); |
647 | } |
648 | |
649 | ASSEMBLER_TEST_RUN(Semaphore32, test) { |
650 | EXPECT(test != NULL); |
651 | typedef intptr_t (*Semaphore32)() DART_UNUSED; |
652 | // Lower word has been atomically switched from 40 to 42k, whereas upper word |
653 | // is unchanged at 40. |
654 | EXPECT_EQ(42 + (DART_INT64_C(40) << 32), |
655 | EXECUTE_TEST_CODE_INT64(Semaphore32, test->entry())); |
656 | } |
657 | |
658 | ASSEMBLER_TEST_GENERATE(FailedSemaphore32, assembler) { |
659 | __ SetupDartSP(); |
660 | __ movz(R0, Immediate(40), 0); |
661 | __ add(R0, R0, Operand(R0, LSL, 32)); |
662 | __ Push(R0); |
663 | |
664 | __ movz(R0, Immediate(40), 0); |
665 | __ movz(R1, Immediate(42), 0); |
666 | |
667 | __ ldxr(R0, SP, kWord); |
668 | __ clrex(); // Simulate a context switch. |
669 | __ stxr(TMP, R1, SP, kWord); // IP == 1, failure |
670 | __ Pop(R0); // 40 |
671 | __ add(R0, R0, Operand(TMP)); |
672 | __ RestoreCSP(); |
673 | __ ret(); |
674 | } |
675 | |
676 | ASSEMBLER_TEST_RUN(FailedSemaphore32, test) { |
677 | EXPECT(test != NULL); |
678 | typedef intptr_t (*FailedSemaphore32)() DART_UNUSED; |
679 | // Lower word has had the failure code (1) added to it. Upper word is |
680 | // unchanged at 40. |
681 | EXPECT_EQ(41 + (DART_INT64_C(40) << 32), |
682 | EXECUTE_TEST_CODE_INT64(FailedSemaphore32, test->entry())); |
683 | } |
684 | |
685 | ASSEMBLER_TEST_GENERATE(LoadAcquireStoreRelease, assembler) { |
686 | // We cannot really test that ldar/stlr have the barrier behavior, but at |
687 | // least we can test that the load/store behavior is correct. |
688 | Label failed, done; |
689 | |
690 | __ SetupDartSP(); |
691 | __ EnterFrame(0); |
692 | |
693 | // Test 64-bit ladr. |
694 | __ PushImmediate(0x1122334455667788); |
695 | __ ldar(R1, SP, kDoubleWord); |
696 | __ CompareImmediate(R1, 0x1122334455667788); |
697 | __ BranchIf(NOT_EQUAL, &failed); |
698 | __ Drop(1); |
699 | |
700 | // Test 32-bit ladr - must zero extend. |
701 | __ PushImmediate(0x1122334455667788); |
702 | __ ldar(R1, SP, kWord); |
703 | __ CompareImmediate(R1, 0x55667788); |
704 | __ BranchIf(NOT_EQUAL, &failed); |
705 | __ Drop(1); |
706 | |
707 | // Test 64-bit stlr. |
708 | __ PushImmediate(0); |
709 | __ LoadImmediate(R1, 0x1122334455667788); |
710 | __ stlr(R1, SP, kDoubleWord); |
711 | __ Pop(R1); |
712 | __ CompareImmediate(R1, 0x1122334455667788); |
713 | __ BranchIf(NOT_EQUAL, &failed); |
714 | |
715 | // Test 32-bit stlr. |
716 | __ PushImmediate(0); |
717 | __ LoadImmediate(R1, 0x1122334455667788); |
718 | __ stlr(R1, SP, kWord); |
719 | __ Pop(R1); |
720 | __ CompareImmediate(R1, 0x55667788); |
721 | __ BranchIf(NOT_EQUAL, &failed); |
722 | |
723 | __ LoadImmediate(R0, 0x42); |
724 | __ b(&done); |
725 | |
726 | __ Bind(&failed); |
727 | __ LoadImmediate(R0, 0x84); |
728 | |
729 | __ Bind(&done); |
730 | __ LeaveFrame(); |
731 | __ RestoreCSP(); |
732 | __ ret(); |
733 | } |
734 | |
735 | ASSEMBLER_TEST_RUN(LoadAcquireStoreRelease, test) { |
736 | typedef intptr_t (*LoadAcquireStoreRelease)() DART_UNUSED; |
737 | EXPECT_EQ(0x42, |
738 | EXECUTE_TEST_CODE_INT64(LoadAcquireStoreRelease, test->entry())); |
739 | } |
740 | |
741 | // Logical register operations. |
742 | ASSEMBLER_TEST_GENERATE(AndRegs, assembler) { |
743 | __ movz(R1, Immediate(43), 0); |
744 | __ movz(R2, Immediate(42), 0); |
745 | __ and_(R0, R1, Operand(R2)); |
746 | __ ret(); |
747 | } |
748 | |
749 | ASSEMBLER_TEST_RUN(AndRegs, test) { |
750 | typedef int64_t (*Int64Return)() DART_UNUSED; |
751 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
752 | } |
753 | |
754 | constexpr uint64_t kU64MinusOne = 0xffffffffffffffffull; |
755 | constexpr uint64_t kU64MinInt32 = 0xffffffff80000000ull; |
756 | constexpr uint64_t kU64MaxInt32 = 0x000000007fffffffull; |
757 | constexpr uint64_t kU64MinInt64 = 0x8000000000000000ull; |
758 | constexpr uint64_t kU64MaxInt64 = 0x7fffffffffffffffull; |
759 | |
760 | #define FOR_EACH_ASR_64_TEST_CONFIG(M) \ |
761 | M(0ull, 0, 0ull) \ |
762 | M(1ull, 0, 1ull) \ |
763 | M(kU64MaxInt32, 0, kU64MaxInt32) \ |
764 | M(kU64MaxInt64, 0, kU64MaxInt64) \ |
765 | M(kU64MinInt32, 0, kU64MinInt32) \ |
766 | M(kU64MinInt64, 0, kU64MinInt64) \ |
767 | M(0ull, 1, 0ull) \ |
768 | M(1ull, 1, 0ull) \ |
769 | M(4ull, 1, 2ull) \ |
770 | M(0xffffull, 1, 0x7fffull) \ |
771 | M(0xffffffffull, 1, 0x7fffffffull) \ |
772 | M(kU64MaxInt32, 1, 0x3fffffffull) \ |
773 | M(kU64MaxInt64, 1, 0x3fffffffffffffffull) \ |
774 | M(kU64MinInt32, 1, 0xffffffffc0000000ull) \ |
775 | M(kU64MinInt64, 1, 0xc000000000000000ull) \ |
776 | M(kU64MinusOne, 1, kU64MinusOne) \ |
777 | M(1ull, 2, 0ull) \ |
778 | M(4ull, 2, 1ull) \ |
779 | M(0xffffull, 2, 0x3fffull) \ |
780 | M(0xffffffffull, 2, 0x3fffffffull) \ |
781 | M(kU64MaxInt32, 2, 0x1fffffffull) \ |
782 | M(kU64MaxInt64, 2, 0x1fffffffffffffffull) \ |
783 | M(kU64MinInt32, 2, 0xffffffffe0000000ull) \ |
784 | M(kU64MinInt64, 2, 0xe000000000000000ull) \ |
785 | M(kU64MinusOne, 2, kU64MinusOne) \ |
786 | M(0ull, 31, 0ull) \ |
787 | M(1ull, 31, 0ull) \ |
788 | M(4ull, 31, 0ull) \ |
789 | M(0xffffull, 31, 0ull) \ |
790 | M(0xffffffffull, 31, 1ull) \ |
791 | M(kU64MaxInt32, 31, 0ull) \ |
792 | M(kU64MaxInt64, 31, 0xffffffffull) \ |
793 | M(kU64MinInt32, 31, kU64MinusOne) \ |
794 | M(kU64MinInt64, 31, 0xffffffff00000000ull) \ |
795 | M(kU64MinusOne, 31, kU64MinusOne) \ |
796 | M(0ull, 32, 0ull) \ |
797 | M(1ull, 32, 0ull) \ |
798 | M(4ull, 32, 0ull) \ |
799 | M(0xffffull, 32, 0ull) \ |
800 | M(0xffffffffull, 32, 0ull) \ |
801 | M(kU64MaxInt64, 32, 0x7fffffffull) \ |
802 | M(kU64MinInt32, 32, kU64MinusOne) \ |
803 | M(kU64MinInt64, 32, 0xffffffff80000000ull) \ |
804 | M(kU64MinusOne, 32, kU64MinusOne) \ |
805 | M(0ull, 62, 0ull) \ |
806 | M(1ull, 62, 0ull) \ |
807 | M(4ull, 62, 0ull) \ |
808 | M(0xffffull, 62, 0ull) \ |
809 | M(0xffffffffull, 62, 0ull) \ |
810 | M(kU64MaxInt64, 62, 1ull) \ |
811 | M(kU64MinInt32, 62, kU64MinusOne) \ |
812 | M(kU64MinInt64, 62, 0xfffffffffffffffeull) \ |
813 | M(kU64MinusOne, 62, kU64MinusOne) \ |
814 | M(0ull, 63, 0ull) \ |
815 | M(1ull, 63, 0ull) \ |
816 | M(4ull, 63, 0ull) \ |
817 | M(0xffffull, 63, 0ull) \ |
818 | M(0xffffffffull, 63, 0ull) \ |
819 | M(kU64MaxInt64, 63, 0ull) \ |
820 | M(kU64MinInt32, 63, kU64MinusOne) \ |
821 | M(kU64MinInt64, 63, kU64MinusOne) \ |
822 | M(kU64MinusOne, 63, kU64MinusOne) |
823 | |
824 | #define FOR_EACH_LSR_64_TEST_CONFIG(M) \ |
825 | M(0ull, 0, 0ull) \ |
826 | M(1ull, 0, 1ull) \ |
827 | M(kU64MaxInt32, 0, kU64MaxInt32) \ |
828 | M(kU64MaxInt64, 0, kU64MaxInt64) \ |
829 | M(kU64MinInt32, 0, kU64MinInt32) \ |
830 | M(kU64MinInt64, 0, kU64MinInt64) \ |
831 | M(0ull, 1, 0ull) \ |
832 | M(1ull, 1, 0ull) \ |
833 | M(4ull, 1, 2ull) \ |
834 | M(0xffffull, 1, 0x7fffull) \ |
835 | M(0xffffffffull, 1, 0x7fffffffull) \ |
836 | M(kU64MaxInt32, 1, 0x3fffffffull) \ |
837 | M(kU64MaxInt64, 1, 0x3fffffffffffffffull) \ |
838 | M(kU64MinInt32, 1, 0x7fffffffc0000000ull) \ |
839 | M(kU64MinInt64, 1, 0x4000000000000000ull) \ |
840 | M(kU64MinusOne, 1, 0x7fffffffffffffffull) \ |
841 | M(1ull, 2, 0ull) \ |
842 | M(4ull, 2, 1ull) \ |
843 | M(0xffffull, 2, 0x3fffull) \ |
844 | M(0xffffffffull, 2, 0x3fffffffull) \ |
845 | M(kU64MaxInt32, 2, 0x1fffffffull) \ |
846 | M(kU64MaxInt64, 2, 0x1fffffffffffffffull) \ |
847 | M(kU64MinInt32, 2, 0x3fffffffe0000000ull) \ |
848 | M(kU64MinInt64, 2, 0x2000000000000000ull) \ |
849 | M(kU64MinusOne, 2, 0x3fffffffffffffffull) \ |
850 | M(0ull, 31, 0ull) \ |
851 | M(1ull, 31, 0ull) \ |
852 | M(4ull, 31, 0ull) \ |
853 | M(0xffffull, 31, 0ull) \ |
854 | M(0xffffffffull, 31, 1ull) \ |
855 | M(kU64MaxInt32, 31, 0ull) \ |
856 | M(kU64MaxInt64, 31, 0xffffffffull) \ |
857 | M(kU64MinInt32, 31, 0x1ffffffffull) \ |
858 | M(kU64MinInt64, 31, 0x100000000ull) \ |
859 | M(kU64MinusOne, 31, 0x1ffffffffull) \ |
860 | M(0ull, 32, 0ull) \ |
861 | M(1ull, 32, 0ull) \ |
862 | M(4ull, 32, 0ull) \ |
863 | M(0xffffull, 32, 0ull) \ |
864 | M(0xffffffffull, 32, 0ull) \ |
865 | M(kU64MaxInt64, 32, 0x7fffffffull) \ |
866 | M(kU64MinInt32, 32, 0xffffffffull) \ |
867 | M(kU64MinInt64, 32, 0x80000000ull) \ |
868 | M(kU64MinusOne, 32, 0xffffffffull) \ |
869 | M(0ull, 62, 0ull) \ |
870 | M(1ull, 62, 0ull) \ |
871 | M(4ull, 62, 0ull) \ |
872 | M(0xffffull, 62, 0ull) \ |
873 | M(0xffffffffull, 62, 0ull) \ |
874 | M(kU64MaxInt64, 62, 1ull) \ |
875 | M(kU64MinInt32, 62, 3ull) \ |
876 | M(kU64MinInt64, 62, 2ull) \ |
877 | M(kU64MinusOne, 62, 3ull) \ |
878 | M(0ull, 63, 0ull) \ |
879 | M(1ull, 63, 0ull) \ |
880 | M(4ull, 63, 0ull) \ |
881 | M(0xffffull, 63, 0ull) \ |
882 | M(0xffffffffull, 63, 0ull) \ |
883 | M(kU64MaxInt64, 63, 0ull) \ |
884 | M(kU64MinInt32, 63, 1ull) \ |
885 | M(kU64MinInt64, 63, 1ull) \ |
886 | M(kU64MinusOne, 63, 1ull) |
887 | |
888 | #define FOR_EACH_LSL_64_TEST_CONFIG(M) \ |
889 | M(0ull, 0, 0ull) \ |
890 | M(1ull, 0, 1ull) \ |
891 | M(kU64MaxInt32, 0, kU64MaxInt32) \ |
892 | M(kU64MaxInt64, 0, kU64MaxInt64) \ |
893 | M(kU64MinInt32, 0, kU64MinInt32) \ |
894 | M(kU64MinInt64, 0, kU64MinInt64) \ |
895 | M(0ull, 1, 0ull) \ |
896 | M(1ull, 1, 2ull) \ |
897 | M(4ull, 1, 8ull) \ |
898 | M(0xffffull, 1, 0x1fffeull) \ |
899 | M(0xffffffffull, 1, 0x1fffffffeull) \ |
900 | M(kU64MaxInt32, 1, 0xfffffffeull) \ |
901 | M(kU64MaxInt64, 1, 0xfffffffffffffffeull) \ |
902 | M(kU64MinInt32, 1, 0xffffffff00000000ull) \ |
903 | M(kU64MinInt64, 1, 0ull) \ |
904 | M(kU64MinusOne, 1, 0xfffffffffffffffeull) \ |
905 | M(1ull, 2, 4ull) \ |
906 | M(4ull, 2, 16ull) \ |
907 | M(0xffffull, 2, 0x3fffcull) \ |
908 | M(0xffffffffull, 2, 0x3fffffffcull) \ |
909 | M(kU64MaxInt32, 2, 0x1fffffffcull) \ |
910 | M(kU64MaxInt64, 2, 0xfffffffffffffffcull) \ |
911 | M(kU64MinInt32, 2, 0xfffffffe00000000ull) \ |
912 | M(kU64MinInt64, 2, 0ull) \ |
913 | M(kU64MinusOne, 2, 0xfffffffffffffffcull) \ |
914 | M(0ull, 31, 0ull) \ |
915 | M(1ull, 31, 0x0000000080000000ull) \ |
916 | M(4ull, 31, 0x0000000200000000ull) \ |
917 | M(0xffffull, 31, 0x00007fff80000000ull) \ |
918 | M(0xffffffffull, 31, 0x7fffffff80000000ull) \ |
919 | M(kU64MaxInt32, 31, 0x3fffffff80000000ull) \ |
920 | M(kU64MaxInt64, 31, 0xffffffff80000000ull) \ |
921 | M(kU64MinInt32, 31, 0xc000000000000000ull) \ |
922 | M(kU64MinInt64, 31, 0ull) \ |
923 | M(kU64MinusOne, 31, 0xffffffff80000000ull) \ |
924 | M(0ull, 32, 0ull) \ |
925 | M(1ull, 32, 0x0000000100000000ull) \ |
926 | M(4ull, 32, 0x0000000400000000ull) \ |
927 | M(0xffffull, 32, 0x0000ffff00000000ull) \ |
928 | M(0xffffffffull, 32, 0xffffffff00000000ull) \ |
929 | M(kU64MaxInt64, 32, 0xffffffff00000000ull) \ |
930 | M(kU64MinInt32, 32, 0x8000000000000000ull) \ |
931 | M(kU64MinInt64, 32, 0ull) \ |
932 | M(kU64MinusOne, 32, 0xffffffff00000000ull) \ |
933 | M(0ull, 62, 0ull) \ |
934 | M(1ull, 62, 0x4000000000000000ull) \ |
935 | M(4ull, 62, 0ull) \ |
936 | M(0xffffull, 62, 0xc000000000000000ull) \ |
937 | M(0xffffffffull, 62, 0xc000000000000000ull) \ |
938 | M(kU64MaxInt64, 62, 0xc000000000000000ull) \ |
939 | M(kU64MinInt32, 62, 0ull) \ |
940 | M(kU64MinInt64, 62, 0ull) \ |
941 | M(kU64MinusOne, 62, 0xc000000000000000ull) \ |
942 | M(0ull, 63, 0ull) \ |
943 | M(1ull, 63, 0x8000000000000000ull) \ |
944 | M(4ull, 63, 0ull) \ |
945 | M(0xffffull, 63, 0x8000000000000000ull) \ |
946 | M(0xffffffffull, 63, 0x8000000000000000ull) \ |
947 | M(kU64MaxInt64, 63, 0x8000000000000000ull) \ |
948 | M(kU64MinInt32, 63, 0ull) \ |
949 | M(kU64MinInt64, 63, 0ull) \ |
950 | M(kU64MinusOne, 63, 0x8000000000000000ull) |
951 | |
952 | #define SHIFT_64_IMMEDIATE_TEST(macro_op, val, shift, expected) \ |
953 | ASSEMBLER_TEST_GENERATE(macro_op##_##val##_##shift, assembler) { \ |
954 | __ LoadImmediate(R1, bit_cast<int64_t>(val)); \ |
955 | __ macro_op(R0, R1, (shift)); \ |
956 | __ ret(); \ |
957 | } \ |
958 | \ |
959 | ASSEMBLER_TEST_RUN(macro_op##_##val##_##shift, test) { \ |
960 | typedef int64_t (*Int64Return)() DART_UNUSED; \ |
961 | EXPECT_EQ((expected), bit_cast<uint64_t>(EXECUTE_TEST_CODE_INT64( \ |
962 | Int64Return, test->entry()))); \ |
963 | } |
964 | |
965 | #define ASR_64_IMMEDIATE_TEST(val, shift, expected) \ |
966 | SHIFT_64_IMMEDIATE_TEST(AsrImmediate, val, shift, expected) |
967 | |
968 | #define LSR_64_IMMEDIATE_TEST(val, shift, expected) \ |
969 | SHIFT_64_IMMEDIATE_TEST(LsrImmediate, val, shift, expected) |
970 | |
971 | #define LSL_64_IMMEDIATE_TEST(val, shift, expected) \ |
972 | SHIFT_64_IMMEDIATE_TEST(LslImmediate, val, shift, expected) |
973 | |
974 | FOR_EACH_ASR_64_TEST_CONFIG(ASR_64_IMMEDIATE_TEST) |
975 | FOR_EACH_LSR_64_TEST_CONFIG(LSR_64_IMMEDIATE_TEST) |
976 | FOR_EACH_LSL_64_TEST_CONFIG(LSL_64_IMMEDIATE_TEST) |
977 | |
978 | #undef LSL_64_IMMEDIATE_TEST |
979 | #undef LSR_64_IMMEDIATE_TEST |
980 | #undef ASR_64_IMMEDIATE_TEST |
981 | #undef SHIFT_64_IMMEDIATE_TEST |
982 | #undef FOR_EACH_LSL_64_TESTS_LIST |
983 | #undef FOR_EACH_LSR_64_TESTS_LIST |
984 | #undef FOR_EACH_ASR_64_TESTS_LIST |
985 | |
986 | constexpr uint32_t kU32MinusOne = 0xffffffffu; |
987 | constexpr uint32_t kU32MinInt32 = 0x80000000u; |
988 | constexpr uint32_t kU32MaxInt32 = 0x7fffffffu; |
989 | |
990 | #define FOR_EACH_LSR_32_TEST_CONFIG(M) \ |
991 | M(0u, 0, 0u) \ |
992 | M(1u, 0, 1u) \ |
993 | M(kU32MaxInt32, 0, kU32MaxInt32) \ |
994 | M(kU32MinInt32, 0, kU32MinInt32) \ |
995 | M(0u, 1, 0u) \ |
996 | M(1u, 1, 0u) \ |
997 | M(4u, 1, 2u) \ |
998 | M(0xffffu, 1, 0x7fffu) \ |
999 | M(0xffffffffu, 1, 0x7fffffffu) \ |
1000 | M(kU32MaxInt32, 1, 0x3fffffffu) \ |
1001 | M(kU32MinInt32, 1, 0x40000000u) \ |
1002 | M(kU32MinusOne, 1, 0x7fffffffu) \ |
1003 | M(1u, 2, 0u) \ |
1004 | M(4u, 2, 1u) \ |
1005 | M(0xffffu, 2, 0x3fffu) \ |
1006 | M(0xffffffffu, 2, 0x3fffffffu) \ |
1007 | M(kU32MaxInt32, 2, 0x1fffffffu) \ |
1008 | M(kU32MinInt32, 2, 0x20000000u) \ |
1009 | M(kU32MinusOne, 2, 0x3fffffffu) \ |
1010 | M(0u, 31, 0u) \ |
1011 | M(1u, 31, 0u) \ |
1012 | M(4u, 31, 0u) \ |
1013 | M(0xffffu, 31, 0u) \ |
1014 | M(0xffffffffu, 31, 1u) \ |
1015 | M(kU32MaxInt32, 31, 0u) \ |
1016 | M(kU32MinInt32, 31, 1u) \ |
1017 | M(kU32MinusOne, 31, 1u) |
1018 | |
1019 | #define FOR_EACH_LSL_32_TEST_CONFIG(M) \ |
1020 | M(0u, 0, 0u) \ |
1021 | M(1u, 0, 1u) \ |
1022 | M(kU32MaxInt32, 0, kU32MaxInt32) \ |
1023 | M(kU32MinInt32, 0, kU32MinInt32) \ |
1024 | M(0u, 1, 0u) \ |
1025 | M(1u, 1, 2u) \ |
1026 | M(4u, 1, 8u) \ |
1027 | M(0xffffu, 1, 0x1fffeu) \ |
1028 | M(0xffffffffu, 1, 0xfffffffeu) \ |
1029 | M(kU32MaxInt32, 1, 0xfffffffeu) \ |
1030 | M(kU32MinInt32, 1, 0x00000000u) \ |
1031 | M(kU32MinusOne, 1, 0xfffffffeu) \ |
1032 | M(1u, 2, 4u) \ |
1033 | M(4u, 2, 16u) \ |
1034 | M(0xffffu, 2, 0x3fffcu) \ |
1035 | M(0xffffffffu, 2, 0xfffffffcu) \ |
1036 | M(kU32MaxInt32, 2, 0xfffffffcu) \ |
1037 | M(kU32MinInt32, 2, 0x00000000u) \ |
1038 | M(kU32MinusOne, 2, 0xfffffffcu) \ |
1039 | M(0u, 31, 0u) \ |
1040 | M(1u, 31, 0x80000000u) \ |
1041 | M(4u, 31, 0x00000000u) \ |
1042 | M(0xffffu, 31, 0x80000000u) \ |
1043 | M(0xffffffffu, 31, 0x80000000u) \ |
1044 | M(kU32MaxInt32, 31, 0x80000000u) \ |
1045 | M(kU32MinInt32, 31, 0x00000000u) \ |
1046 | M(kU32MinusOne, 31, 0x80000000u) |
1047 | |
1048 | #define SHIFT_32_IMMEDIATE_TEST(macro_op, val, shift, expected) \ |
1049 | ASSEMBLER_TEST_GENERATE(macro_op##a_##val##_##shift, assembler) { \ |
1050 | __ LoadImmediate(R1, bit_cast<int32_t>(val)); \ |
1051 | __ macro_op(R0, R1, (shift), kWord); \ |
1052 | __ ret(); \ |
1053 | } \ |
1054 | \ |
1055 | ASSEMBLER_TEST_RUN(macro_op##a_##val##_##shift, test) { \ |
1056 | typedef int32_t (*Int32Return)() DART_UNUSED; \ |
1057 | EXPECT_EQ((expected), bit_cast<uint32_t>((int32_t)EXECUTE_TEST_CODE_INT64( \ |
1058 | Int32Return, test->entry()))); \ |
1059 | } |
1060 | |
1061 | #define LSR_32_IMMEDIATE_TEST(val, shift, expected) \ |
1062 | SHIFT_32_IMMEDIATE_TEST(LsrImmediate, val, shift, expected) |
1063 | |
1064 | #define LSL_32_IMMEDIATE_TEST(val, shift, expected) \ |
1065 | SHIFT_32_IMMEDIATE_TEST(LslImmediate, val, shift, expected) |
1066 | |
1067 | FOR_EACH_LSR_32_TEST_CONFIG(LSR_32_IMMEDIATE_TEST) |
1068 | FOR_EACH_LSL_32_TEST_CONFIG(LSL_32_IMMEDIATE_TEST) |
1069 | |
1070 | #undef LSL_32_IMMEDIATE_TEST |
1071 | #undef LSR_32_IMMEDIATE_TEST |
1072 | #undef SHIFT_32_IMMEDIATE_TEST |
1073 | #undef FOR_EACH_LSL_32_TESTS_LIST |
1074 | #undef FOR_EACH_LSR_32_TESTS_LIST |
1075 | |
1076 | ASSEMBLER_TEST_GENERATE(AndShiftRegs, assembler) { |
1077 | __ movz(R1, Immediate(42), 0); |
1078 | __ movz(R2, Immediate(21), 0); |
1079 | __ and_(R0, R1, Operand(R2, LSL, 1)); |
1080 | __ ret(); |
1081 | } |
1082 | |
1083 | ASSEMBLER_TEST_RUN(AndShiftRegs, test) { |
1084 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1085 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1086 | } |
1087 | |
1088 | ASSEMBLER_TEST_GENERATE(BicRegs, assembler) { |
1089 | __ movz(R1, Immediate(42), 0); |
1090 | __ movz(R2, Immediate(5), 0); |
1091 | __ bic(R0, R1, Operand(R2)); |
1092 | __ ret(); |
1093 | } |
1094 | |
1095 | ASSEMBLER_TEST_RUN(BicRegs, test) { |
1096 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1097 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1098 | } |
1099 | |
1100 | ASSEMBLER_TEST_GENERATE(OrrRegs, assembler) { |
1101 | __ movz(R1, Immediate(32), 0); |
1102 | __ movz(R2, Immediate(10), 0); |
1103 | __ orr(R0, R1, Operand(R2)); |
1104 | __ ret(); |
1105 | } |
1106 | |
1107 | ASSEMBLER_TEST_RUN(OrrRegs, test) { |
1108 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1109 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1110 | } |
1111 | |
1112 | ASSEMBLER_TEST_GENERATE(OrnRegs, assembler) { |
1113 | __ movz(R1, Immediate(32), 0); |
1114 | __ movn(R2, Immediate(0), 0); // R2 <- 0xffffffffffffffff. |
1115 | __ movk(R2, Immediate(0xffd5), 0); // R2 <- 0xffffffffffffffe5. |
1116 | __ orn(R0, R1, Operand(R2)); |
1117 | __ ret(); |
1118 | } |
1119 | |
1120 | ASSEMBLER_TEST_RUN(OrnRegs, test) { |
1121 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1122 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1123 | } |
1124 | |
1125 | ASSEMBLER_TEST_GENERATE(EorRegs, assembler) { |
1126 | __ movz(R1, Immediate(0xffd5), 0); |
1127 | __ movz(R2, Immediate(0xffff), 0); |
1128 | __ eor(R0, R1, Operand(R2)); |
1129 | __ ret(); |
1130 | } |
1131 | |
1132 | ASSEMBLER_TEST_RUN(EorRegs, test) { |
1133 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1134 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1135 | } |
1136 | |
1137 | ASSEMBLER_TEST_GENERATE(EonRegs, assembler) { |
1138 | __ movz(R1, Immediate(0xffd5), 0); |
1139 | __ movn(R2, Immediate(0xffff), 0); |
1140 | __ eon(R0, R1, Operand(R2)); |
1141 | __ ret(); |
1142 | } |
1143 | |
1144 | ASSEMBLER_TEST_RUN(EonRegs, test) { |
1145 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1146 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1147 | } |
1148 | |
1149 | // Logical immediate operations. |
1150 | ASSEMBLER_TEST_GENERATE(AndImm, assembler) { |
1151 | __ movz(R1, Immediate(42), 0); |
1152 | __ andi(R0, R1, Immediate(0xaaaaaaaaaaaaaaaaULL)); |
1153 | __ ret(); |
1154 | } |
1155 | |
1156 | ASSEMBLER_TEST_RUN(AndImm, test) { |
1157 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1158 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1159 | } |
1160 | |
1161 | ASSEMBLER_TEST_GENERATE(AndImmCsp, assembler) { |
1162 | // Note we must maintain the ARM64 ABI invariants on CSP here. |
1163 | __ mov(TMP, CSP); |
1164 | __ sub(TMP2, CSP, Operand(31)); |
1165 | __ andi(CSP, TMP2, Immediate(~15)); |
1166 | __ mov(R0, CSP); |
1167 | __ sub(R0, TMP, Operand(R0)); |
1168 | __ mov(CSP, TMP); |
1169 | __ ret(); |
1170 | } |
1171 | |
1172 | ASSEMBLER_TEST_RUN(AndImmCsp, test) { |
1173 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1174 | EXPECT_EQ(32, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1175 | } |
1176 | |
1177 | ASSEMBLER_TEST_GENERATE(AndOneImm, assembler) { |
1178 | __ movz(R1, Immediate(43), 0); |
1179 | __ andi(R0, R1, Immediate(1)); |
1180 | __ ret(); |
1181 | } |
1182 | |
1183 | ASSEMBLER_TEST_RUN(AndOneImm, test) { |
1184 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1185 | EXPECT_EQ(1, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1186 | } |
1187 | |
1188 | ASSEMBLER_TEST_GENERATE(OrrImm, assembler) { |
1189 | __ movz(R1, Immediate(0), 0); |
1190 | __ movz(R2, Immediate(0x3f), 0); |
1191 | __ movz(R3, Immediate(0xa), 0); |
1192 | __ orri(R1, R1, Immediate(0x0020002000200020ULL)); |
1193 | __ orr(R1, R1, Operand(R3)); |
1194 | __ and_(R0, R1, Operand(R2)); |
1195 | __ ret(); |
1196 | } |
1197 | |
1198 | ASSEMBLER_TEST_RUN(OrrImm, test) { |
1199 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1200 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1201 | } |
1202 | |
1203 | ASSEMBLER_TEST_GENERATE(EorImm, assembler) { |
1204 | __ movn(R0, Immediate(0), 0); |
1205 | __ movk(R0, Immediate(0xffd5), 0); // R0 < 0xffffffffffffffd5. |
1206 | __ movz(R1, Immediate(0x3f), 0); |
1207 | __ eori(R0, R0, Immediate(0x3f3f3f3f3f3f3f3fULL)); |
1208 | __ and_(R0, R0, Operand(R1)); |
1209 | __ ret(); |
1210 | } |
1211 | |
1212 | ASSEMBLER_TEST_RUN(EorImm, test) { |
1213 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1214 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1215 | } |
1216 | |
1217 | ASSEMBLER_TEST_GENERATE(Clz, assembler) { |
1218 | Label error; |
1219 | |
1220 | __ clz(R1, ZR); |
1221 | __ cmp(R1, Operand(64)); |
1222 | __ b(&error, NE); |
1223 | __ LoadImmediate(R2, 42); |
1224 | __ clz(R2, R2); |
1225 | __ cmp(R2, Operand(58)); |
1226 | __ b(&error, NE); |
1227 | __ LoadImmediate(R0, -1); |
1228 | __ clz(R1, R0); |
1229 | __ cmp(R1, Operand(0)); |
1230 | __ b(&error, NE); |
1231 | __ add(R0, ZR, Operand(R0, LSR, 3)); |
1232 | __ clz(R1, R0); |
1233 | __ cmp(R1, Operand(3)); |
1234 | __ b(&error, NE); |
1235 | __ mov(R0, ZR); |
1236 | __ ret(); |
1237 | __ Bind(&error); |
1238 | __ LoadImmediate(R0, 1); |
1239 | __ ret(); |
1240 | } |
1241 | |
1242 | ASSEMBLER_TEST_RUN(Clz, test) { |
1243 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1244 | EXPECT_EQ(0, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1245 | } |
1246 | |
1247 | ASSEMBLER_TEST_GENERATE(Rbit, assembler) { |
1248 | const int64_t immediate = 0x0000000000000015; |
1249 | __ LoadImmediate(R0, immediate); |
1250 | __ rbit(R0, R0); |
1251 | __ ret(); |
1252 | } |
1253 | |
1254 | ASSEMBLER_TEST_RUN(Rbit, test) { |
1255 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1256 | const int64_t expected = 0xa800000000000000; |
1257 | EXPECT_EQ(expected, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1258 | } |
1259 | |
1260 | // Comparisons, branching. |
1261 | ASSEMBLER_TEST_GENERATE(BranchALForward, assembler) { |
1262 | Label l; |
1263 | __ movz(R0, Immediate(42), 0); |
1264 | __ b(&l, AL); |
1265 | __ movz(R0, Immediate(0), 0); |
1266 | __ Bind(&l); |
1267 | __ ret(); |
1268 | } |
1269 | |
1270 | ASSEMBLER_TEST_RUN(BranchALForward, test) { |
1271 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1272 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1273 | } |
1274 | |
1275 | ASSEMBLER_TEST_GENERATE(BranchALBackwards, assembler) { |
1276 | Label l, leave; |
1277 | __ movz(R0, Immediate(42), 0); |
1278 | __ b(&l, AL); |
1279 | |
1280 | __ movz(R0, Immediate(0), 0); |
1281 | __ Bind(&leave); |
1282 | __ ret(); |
1283 | __ movz(R0, Immediate(0), 0); |
1284 | |
1285 | __ Bind(&l); |
1286 | __ b(&leave, AL); |
1287 | __ movz(R0, Immediate(0), 0); |
1288 | __ ret(); |
1289 | } |
1290 | |
1291 | ASSEMBLER_TEST_RUN(BranchALBackwards, test) { |
1292 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1293 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1294 | } |
1295 | |
1296 | ASSEMBLER_TEST_GENERATE(CmpEqBranch, assembler) { |
1297 | Label l; |
1298 | |
1299 | __ movz(R0, Immediate(42), 0); |
1300 | __ movz(R1, Immediate(234), 0); |
1301 | __ movz(R2, Immediate(234), 0); |
1302 | |
1303 | __ cmp(R1, Operand(R2)); |
1304 | __ b(&l, EQ); |
1305 | __ movz(R0, Immediate(0), 0); |
1306 | __ Bind(&l); |
1307 | __ ret(); |
1308 | } |
1309 | |
1310 | ASSEMBLER_TEST_RUN(CmpEqBranch, test) { |
1311 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1312 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1313 | } |
1314 | |
1315 | ASSEMBLER_TEST_GENERATE(CmpEqBranchNotTaken, assembler) { |
1316 | Label l; |
1317 | |
1318 | __ movz(R0, Immediate(0), 0); |
1319 | __ movz(R1, Immediate(233), 0); |
1320 | __ movz(R2, Immediate(234), 0); |
1321 | |
1322 | __ cmp(R1, Operand(R2)); |
1323 | __ b(&l, EQ); |
1324 | __ movz(R0, Immediate(42), 0); |
1325 | __ Bind(&l); |
1326 | __ ret(); |
1327 | } |
1328 | |
1329 | ASSEMBLER_TEST_RUN(CmpEqBranchNotTaken, test) { |
1330 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1331 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1332 | } |
1333 | |
1334 | ASSEMBLER_TEST_GENERATE(CmpEq1Branch, assembler) { |
1335 | Label l; |
1336 | |
1337 | __ movz(R0, Immediate(42), 0); |
1338 | __ movz(R1, Immediate(1), 0); |
1339 | |
1340 | __ cmp(R1, Operand(1)); |
1341 | __ b(&l, EQ); |
1342 | __ movz(R0, Immediate(0), 0); |
1343 | __ Bind(&l); |
1344 | __ ret(); |
1345 | } |
1346 | |
1347 | ASSEMBLER_TEST_RUN(CmpEq1Branch, test) { |
1348 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1349 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1350 | } |
1351 | |
1352 | ASSEMBLER_TEST_GENERATE(CmnEq1Branch, assembler) { |
1353 | Label l; |
1354 | |
1355 | __ movz(R0, Immediate(42), 0); |
1356 | __ movn(R1, Immediate(0), 0); // R1 <- -1 |
1357 | |
1358 | __ cmn(R1, Operand(1)); |
1359 | __ b(&l, EQ); |
1360 | __ movz(R0, Immediate(0), 0); |
1361 | __ Bind(&l); |
1362 | __ ret(); |
1363 | } |
1364 | |
1365 | ASSEMBLER_TEST_RUN(CmnEq1Branch, test) { |
1366 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1367 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1368 | } |
1369 | |
1370 | ASSEMBLER_TEST_GENERATE(CmpLtBranch, assembler) { |
1371 | Label l; |
1372 | |
1373 | __ movz(R0, Immediate(42), 0); |
1374 | __ movz(R1, Immediate(233), 0); |
1375 | __ movz(R2, Immediate(234), 0); |
1376 | |
1377 | __ cmp(R1, Operand(R2)); |
1378 | __ b(&l, LT); |
1379 | __ movz(R0, Immediate(0), 0); |
1380 | __ Bind(&l); |
1381 | __ ret(); |
1382 | } |
1383 | |
1384 | ASSEMBLER_TEST_RUN(CmpLtBranch, test) { |
1385 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1386 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1387 | } |
1388 | |
1389 | ASSEMBLER_TEST_GENERATE(CmpLtBranchNotTaken, assembler) { |
1390 | Label l; |
1391 | |
1392 | __ movz(R0, Immediate(0), 0); |
1393 | __ movz(R1, Immediate(235), 0); |
1394 | __ movz(R2, Immediate(234), 0); |
1395 | |
1396 | __ cmp(R1, Operand(R2)); |
1397 | __ b(&l, LT); |
1398 | __ movz(R0, Immediate(42), 0); |
1399 | __ Bind(&l); |
1400 | __ ret(); |
1401 | } |
1402 | |
1403 | ASSEMBLER_TEST_RUN(CmpLtBranchNotTaken, test) { |
1404 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1405 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1406 | } |
1407 | |
1408 | ASSEMBLER_TEST_GENERATE(CmpBranchIfZero, assembler) { |
1409 | Label l; |
1410 | |
1411 | __ movz(R0, Immediate(42), 0); |
1412 | __ movz(R1, Immediate(0), 0); |
1413 | |
1414 | __ cbz(&l, R1); |
1415 | __ movz(R0, Immediate(0), 0); |
1416 | __ Bind(&l); |
1417 | __ ret(); |
1418 | } |
1419 | |
1420 | ASSEMBLER_TEST_RUN(CmpBranchIfZero, test) { |
1421 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1422 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1423 | } |
1424 | |
1425 | ASSEMBLER_TEST_GENERATE(CmpBranchIfZeroNotTaken, assembler) { |
1426 | Label l; |
1427 | |
1428 | __ movz(R0, Immediate(0), 0); |
1429 | __ movz(R1, Immediate(1), 0); |
1430 | |
1431 | __ cbz(&l, R1); |
1432 | __ movz(R0, Immediate(42), 0); |
1433 | __ Bind(&l); |
1434 | __ ret(); |
1435 | } |
1436 | |
1437 | ASSEMBLER_TEST_RUN(CmpBranchIfZeroNotTaken, test) { |
1438 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1439 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1440 | } |
1441 | |
1442 | ASSEMBLER_TEST_GENERATE(CmpBranchIfNotZero, assembler) { |
1443 | Label l; |
1444 | |
1445 | __ movz(R0, Immediate(42), 0); |
1446 | __ movz(R1, Immediate(1), 0); |
1447 | |
1448 | __ cbnz(&l, R1); |
1449 | __ movz(R0, Immediate(0), 0); |
1450 | __ Bind(&l); |
1451 | __ ret(); |
1452 | } |
1453 | |
1454 | ASSEMBLER_TEST_RUN(CmpBranchIfNotZero, test) { |
1455 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1456 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1457 | } |
1458 | |
1459 | ASSEMBLER_TEST_GENERATE(CmpBranchIfNotZeroNotTaken, assembler) { |
1460 | Label l; |
1461 | |
1462 | __ movz(R0, Immediate(0), 0); |
1463 | __ movz(R1, Immediate(0), 0); |
1464 | |
1465 | __ cbnz(&l, R1); |
1466 | __ movz(R0, Immediate(42), 0); |
1467 | __ Bind(&l); |
1468 | __ ret(); |
1469 | } |
1470 | |
1471 | ASSEMBLER_TEST_RUN(CmpBranchIfNotZeroNotTaken, test) { |
1472 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1473 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1474 | } |
1475 | |
1476 | static const int64_t kBits5And35 = (1 << 5) | (1ll << 35); |
1477 | |
1478 | ASSEMBLER_TEST_GENERATE(TstBranchIfZero, assembler) { |
1479 | Label l, l2; |
1480 | |
1481 | __ movz(R0, Immediate(42), 0); |
1482 | __ LoadImmediate(R1, ~kBits5And35); |
1483 | |
1484 | __ tbz(&l, R1, 5); |
1485 | __ movz(R0, Immediate(0), 0); |
1486 | __ Bind(&l); |
1487 | |
1488 | __ tbz(&l2, R1, 35); |
1489 | __ movz(R0, Immediate(0), 0); |
1490 | __ Bind(&l2); |
1491 | |
1492 | __ ret(); |
1493 | } |
1494 | |
1495 | ASSEMBLER_TEST_RUN(TstBranchIfZero, test) { |
1496 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1497 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1498 | } |
1499 | |
1500 | ASSEMBLER_TEST_GENERATE(TstBranchIfZeroNotTaken, assembler) { |
1501 | Label l; |
1502 | |
1503 | __ movz(R0, Immediate(0), 0); |
1504 | __ LoadImmediate(R1, kBits5And35); |
1505 | |
1506 | __ tbz(&l, R1, 5); |
1507 | __ movz(R0, Immediate(42), 0); |
1508 | __ Bind(&l); |
1509 | __ ret(); |
1510 | } |
1511 | |
1512 | ASSEMBLER_TEST_RUN(TstBranchIfZeroNotTaken, test) { |
1513 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1514 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1515 | } |
1516 | |
1517 | ASSEMBLER_TEST_GENERATE(TstBranchIfNotZero, assembler) { |
1518 | Label l, l2; |
1519 | |
1520 | __ movz(R0, Immediate(42), 0); |
1521 | __ LoadImmediate(R1, kBits5And35); |
1522 | |
1523 | __ tbnz(&l, R1, 5); |
1524 | __ movz(R0, Immediate(0), 0); |
1525 | __ Bind(&l); |
1526 | |
1527 | __ tbnz(&l2, R1, 35); |
1528 | __ movz(R0, Immediate(0), 0); |
1529 | __ Bind(&l2); |
1530 | |
1531 | __ ret(); |
1532 | } |
1533 | |
1534 | ASSEMBLER_TEST_RUN(TstBranchIfNotZero, test) { |
1535 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1536 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1537 | } |
1538 | |
1539 | ASSEMBLER_TEST_GENERATE(TstBranchIfNotZeroNotTaken, assembler) { |
1540 | Label l; |
1541 | |
1542 | __ movz(R0, Immediate(0), 0); |
1543 | __ LoadImmediate(R1, ~kBits5And35); |
1544 | |
1545 | __ tbnz(&l, R1, 5); |
1546 | __ movz(R0, Immediate(42), 0); |
1547 | __ Bind(&l); |
1548 | __ ret(); |
1549 | } |
1550 | |
1551 | ASSEMBLER_TEST_RUN(TstBranchIfNotZeroNotTaken, test) { |
1552 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1553 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1554 | } |
1555 | |
1556 | ASSEMBLER_TEST_GENERATE(TstBranchIfZeroFar, assembler) { |
1557 | Label l; |
1558 | |
1559 | __ movz(R0, Immediate(42), 0); |
1560 | __ LoadImmediate(R1, ~kBits5And35); |
1561 | |
1562 | __ tbz(&l, R1, 5); |
1563 | |
1564 | const intptr_t kRange = 1 << 14; // tbz has 14 bits of range. |
1565 | for (intptr_t i = 0; i < kRange; i++) { |
1566 | __ brk(0); |
1567 | } |
1568 | |
1569 | __ movz(R0, Immediate(0), 0); |
1570 | __ Bind(&l); |
1571 | __ ret(); |
1572 | } |
1573 | |
1574 | ASSEMBLER_TEST_RUN(TstBranchIfZeroFar, test) { |
1575 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1576 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1577 | } |
1578 | |
1579 | ASSEMBLER_TEST_GENERATE(TstBranchIfNotZeroFar, assembler) { |
1580 | Label l; |
1581 | |
1582 | __ movz(R0, Immediate(42), 0); |
1583 | __ LoadImmediate(R1, kBits5And35); |
1584 | |
1585 | __ tbnz(&l, R1, 5); |
1586 | |
1587 | const intptr_t kRange = 1 << 14; // tbnz has 14 bits of range. |
1588 | for (intptr_t i = 0; i < kRange; i++) { |
1589 | __ brk(0); |
1590 | } |
1591 | |
1592 | __ movz(R0, Immediate(0), 0); |
1593 | __ Bind(&l); |
1594 | __ ret(); |
1595 | } |
1596 | |
1597 | ASSEMBLER_TEST_RUN(TstBranchIfNotZeroFar, test) { |
1598 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1599 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1600 | } |
1601 | |
1602 | ASSEMBLER_TEST_GENERATE(FcmpEqBranch, assembler) { |
1603 | Label l; |
1604 | |
1605 | __ LoadDImmediate(V0, 42.0); |
1606 | __ LoadDImmediate(V1, 234.0); |
1607 | __ LoadDImmediate(V2, 234.0); |
1608 | |
1609 | __ fcmpd(V1, V2); |
1610 | __ b(&l, EQ); |
1611 | __ LoadDImmediate(V0, 0.0); |
1612 | __ Bind(&l); |
1613 | __ ret(); |
1614 | } |
1615 | |
1616 | ASSEMBLER_TEST_RUN(FcmpEqBranch, test) { |
1617 | typedef double (*DoubleReturn)() DART_UNUSED; |
1618 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
1619 | } |
1620 | |
1621 | ASSEMBLER_TEST_GENERATE(TstBranchIfZeroFar1, assembler) { |
1622 | Label l; |
1623 | |
1624 | __ LoadImmediate(R0, 41); |
1625 | __ tbnz(&l, R0, 5); |
1626 | __ Stop("Hammertime" ); |
1627 | |
1628 | for (int i = 0; i < 0x10000; i++) { |
1629 | __ add(R0, R0, Operand(1)); |
1630 | __ sub(R0, R0, Operand(1)); |
1631 | } |
1632 | |
1633 | __ AddImmediate(R0, R0, -1); // Not run. |
1634 | |
1635 | __ Bind(&l); |
1636 | __ AddImmediate(R0, R0, 1); |
1637 | __ ret(); |
1638 | } |
1639 | |
1640 | ASSEMBLER_TEST_RUN(TstBranchIfZeroFar1, test) { |
1641 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1642 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1643 | } |
1644 | |
1645 | ASSEMBLER_TEST_GENERATE(TstBranchIfZeroFar2, assembler) { |
1646 | Label l; |
1647 | |
1648 | for (int i = 0; i < 0x10000; i++) { |
1649 | __ add(R0, R0, Operand(1)); |
1650 | __ sub(R0, R0, Operand(1)); |
1651 | } |
1652 | |
1653 | __ LoadImmediate(R0, 41); |
1654 | __ tbnz(&l, R0, 5); |
1655 | __ Stop("Hammertime" ); |
1656 | |
1657 | __ AddImmediate(R0, R0, -1); // Not run. |
1658 | |
1659 | __ Bind(&l); |
1660 | __ AddImmediate(R0, R0, 1); |
1661 | __ ret(); |
1662 | } |
1663 | |
1664 | ASSEMBLER_TEST_RUN(TstBranchIfZeroFar2, test) { |
1665 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1666 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1667 | } |
1668 | |
1669 | ASSEMBLER_TEST_GENERATE(TstBranchIfZeroFar3, assembler) { |
1670 | Label l, l2; |
1671 | __ LoadImmediate(R0, 41); |
1672 | __ b(&l, AL); |
1673 | |
1674 | __ AddImmediate(R0, R0, -1); // Not run. |
1675 | |
1676 | __ Bind(&l2); |
1677 | __ AddImmediate(R0, R0, 1); |
1678 | __ ret(); |
1679 | |
1680 | for (int i = 0; i < 0x10000; i++) { |
1681 | __ add(R0, R0, Operand(1)); |
1682 | __ sub(R0, R0, Operand(1)); |
1683 | } |
1684 | |
1685 | __ Bind(&l); |
1686 | __ tbnz(&l2, R0, 5); |
1687 | __ Stop("Hammertime" ); |
1688 | } |
1689 | |
1690 | ASSEMBLER_TEST_RUN(TstBranchIfZeroFar3, test) { |
1691 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1692 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1693 | } |
1694 | |
1695 | ASSEMBLER_TEST_GENERATE(FcmpEqBranchNotTaken, assembler) { |
1696 | Label l; |
1697 | |
1698 | __ LoadDImmediate(V0, 0.0); |
1699 | __ LoadDImmediate(V1, 233.0); |
1700 | __ LoadDImmediate(V2, 234.0); |
1701 | |
1702 | __ fcmpd(V1, V2); |
1703 | __ b(&l, EQ); |
1704 | __ LoadDImmediate(V0, 42.0); |
1705 | __ Bind(&l); |
1706 | __ ret(); |
1707 | } |
1708 | |
1709 | ASSEMBLER_TEST_RUN(FcmpEqBranchNotTaken, test) { |
1710 | typedef double (*DoubleReturn)() DART_UNUSED; |
1711 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
1712 | } |
1713 | |
1714 | ASSEMBLER_TEST_GENERATE(FcmpLtBranch, assembler) { |
1715 | Label l; |
1716 | |
1717 | __ LoadDImmediate(V0, 42.0); |
1718 | __ LoadDImmediate(V1, 233.0); |
1719 | __ LoadDImmediate(V2, 234.0); |
1720 | |
1721 | __ fcmpd(V1, V2); |
1722 | __ b(&l, LT); |
1723 | __ LoadDImmediate(V0, 0.0); |
1724 | __ Bind(&l); |
1725 | __ ret(); |
1726 | } |
1727 | |
1728 | ASSEMBLER_TEST_RUN(FcmpLtBranch, test) { |
1729 | typedef double (*DoubleReturn)() DART_UNUSED; |
1730 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
1731 | } |
1732 | |
1733 | ASSEMBLER_TEST_GENERATE(FcmpLtBranchNotTaken, assembler) { |
1734 | Label l; |
1735 | |
1736 | __ LoadDImmediate(V0, 0.0); |
1737 | __ LoadDImmediate(V1, 235.0); |
1738 | __ LoadDImmediate(V2, 234.0); |
1739 | |
1740 | __ fcmpd(V1, V2); |
1741 | __ b(&l, LT); |
1742 | __ LoadDImmediate(V0, 42.0); |
1743 | __ Bind(&l); |
1744 | __ ret(); |
1745 | } |
1746 | |
1747 | ASSEMBLER_TEST_RUN(FcmpLtBranchNotTaken, test) { |
1748 | typedef double (*DoubleReturn)() DART_UNUSED; |
1749 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
1750 | } |
1751 | |
1752 | ASSEMBLER_TEST_GENERATE(FcmpzGtBranch, assembler) { |
1753 | Label l; |
1754 | |
1755 | __ LoadDImmediate(V0, 235.0); |
1756 | __ LoadDImmediate(V1, 233.0); |
1757 | |
1758 | __ fcmpdz(V1); |
1759 | __ b(&l, GT); |
1760 | __ LoadDImmediate(V0, 0.0); |
1761 | __ ret(); |
1762 | __ Bind(&l); |
1763 | __ LoadDImmediate(V0, 42.0); |
1764 | __ ret(); |
1765 | } |
1766 | |
1767 | ASSEMBLER_TEST_RUN(FcmpzGtBranch, test) { |
1768 | typedef double (*DoubleReturn)() DART_UNUSED; |
1769 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
1770 | } |
1771 | |
1772 | ASSEMBLER_TEST_GENERATE(AndsBranch, assembler) { |
1773 | Label l; |
1774 | |
1775 | __ movz(R0, Immediate(42), 0); |
1776 | __ movz(R1, Immediate(2), 0); |
1777 | __ movz(R2, Immediate(1), 0); |
1778 | |
1779 | __ ands(R3, R1, Operand(R2)); |
1780 | __ b(&l, EQ); |
1781 | __ movz(R0, Immediate(0), 0); |
1782 | __ Bind(&l); |
1783 | __ ret(); |
1784 | } |
1785 | |
1786 | ASSEMBLER_TEST_RUN(AndsBranch, test) { |
1787 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1788 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1789 | } |
1790 | |
1791 | ASSEMBLER_TEST_GENERATE(AndsBranchNotTaken, assembler) { |
1792 | Label l; |
1793 | |
1794 | __ movz(R0, Immediate(0), 0); |
1795 | __ movz(R1, Immediate(2), 0); |
1796 | __ movz(R2, Immediate(2), 0); |
1797 | |
1798 | __ ands(R3, R1, Operand(R2)); |
1799 | __ b(&l, EQ); |
1800 | __ movz(R0, Immediate(42), 0); |
1801 | __ Bind(&l); |
1802 | __ ret(); |
1803 | } |
1804 | |
1805 | ASSEMBLER_TEST_RUN(AndsBranchNotTaken, test) { |
1806 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1807 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1808 | } |
1809 | |
1810 | ASSEMBLER_TEST_GENERATE(BicsBranch, assembler) { |
1811 | Label l; |
1812 | |
1813 | __ movz(R0, Immediate(42), 0); |
1814 | __ movz(R1, Immediate(2), 0); |
1815 | __ movz(R2, Immediate(2), 0); |
1816 | |
1817 | __ bics(R3, R1, Operand(R2)); |
1818 | __ b(&l, EQ); |
1819 | __ movz(R0, Immediate(0), 0); |
1820 | __ Bind(&l); |
1821 | __ ret(); |
1822 | } |
1823 | |
1824 | ASSEMBLER_TEST_RUN(BicsBranch, test) { |
1825 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1826 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1827 | } |
1828 | |
1829 | ASSEMBLER_TEST_GENERATE(BicsBranchNotTaken, assembler) { |
1830 | Label l; |
1831 | |
1832 | __ movz(R0, Immediate(0), 0); |
1833 | __ movz(R1, Immediate(2), 0); |
1834 | __ movz(R2, Immediate(1), 0); |
1835 | |
1836 | __ bics(R3, R1, Operand(R2)); |
1837 | __ b(&l, EQ); |
1838 | __ movz(R0, Immediate(42), 0); |
1839 | __ Bind(&l); |
1840 | __ ret(); |
1841 | } |
1842 | |
1843 | ASSEMBLER_TEST_RUN(BicsBranchNotTaken, test) { |
1844 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1845 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1846 | } |
1847 | |
1848 | ASSEMBLER_TEST_GENERATE(AndisBranch, assembler) { |
1849 | Label l; |
1850 | |
1851 | __ movz(R0, Immediate(42), 0); |
1852 | __ movz(R1, Immediate(2), 0); |
1853 | |
1854 | __ andis(R3, R1, Immediate(1)); |
1855 | __ b(&l, EQ); |
1856 | __ movz(R0, Immediate(0), 0); |
1857 | __ Bind(&l); |
1858 | __ ret(); |
1859 | } |
1860 | |
1861 | ASSEMBLER_TEST_RUN(AndisBranch, test) { |
1862 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1863 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1864 | } |
1865 | |
1866 | ASSEMBLER_TEST_GENERATE(AndisBranchNotTaken, assembler) { |
1867 | Label l; |
1868 | |
1869 | __ movz(R0, Immediate(0), 0); |
1870 | __ movz(R1, Immediate(2), 0); |
1871 | |
1872 | __ andis(R3, R1, Immediate(2)); |
1873 | __ b(&l, EQ); |
1874 | __ movz(R0, Immediate(42), 0); |
1875 | __ Bind(&l); |
1876 | __ ret(); |
1877 | } |
1878 | |
1879 | ASSEMBLER_TEST_RUN(AndisBranchNotTaken, test) { |
1880 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1881 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1882 | } |
1883 | |
1884 | // Address of PC-rel offset, br, blr. |
1885 | ASSEMBLER_TEST_GENERATE(AdrBr, assembler) { |
1886 | __ movz(R0, Immediate(123), 0); |
1887 | // R1 <- PC + 3*Instr::kInstrSize |
1888 | __ adr(R1, Immediate(3 * Instr::kInstrSize)); |
1889 | __ br(R1); |
1890 | __ ret(); |
1891 | |
1892 | // br goes here. |
1893 | __ movz(R0, Immediate(42), 0); |
1894 | __ ret(); |
1895 | } |
1896 | |
1897 | ASSEMBLER_TEST_RUN(AdrBr, test) { |
1898 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1899 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1900 | } |
1901 | |
1902 | ASSEMBLER_TEST_GENERATE(AdrBlr, assembler) { |
1903 | __ movz(R0, Immediate(123), 0); |
1904 | __ add(R3, ZR, Operand(LR)); // Save LR. |
1905 | // R1 <- PC + 4*Instr::kInstrSize |
1906 | __ adr(R1, Immediate(4 * Instr::kInstrSize)); |
1907 | __ blr(R1); |
1908 | __ add(LR, ZR, Operand(R3)); |
1909 | __ ret(); |
1910 | |
1911 | // blr goes here. |
1912 | __ movz(R0, Immediate(42), 0); |
1913 | __ ret(); |
1914 | } |
1915 | |
1916 | ASSEMBLER_TEST_RUN(AdrBlr, test) { |
1917 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1918 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1919 | } |
1920 | |
1921 | // Misc. arithmetic. |
1922 | ASSEMBLER_TEST_GENERATE(Udiv, assembler) { |
1923 | __ movz(R0, Immediate(27), 0); |
1924 | __ movz(R1, Immediate(9), 0); |
1925 | __ udiv(R2, R0, R1); |
1926 | __ mov(R0, R2); |
1927 | __ ret(); |
1928 | } |
1929 | |
1930 | ASSEMBLER_TEST_RUN(Udiv, test) { |
1931 | EXPECT(test != NULL); |
1932 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1933 | EXPECT_EQ(3, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1934 | } |
1935 | |
1936 | ASSEMBLER_TEST_GENERATE(Sdiv, assembler) { |
1937 | __ movz(R0, Immediate(27), 0); |
1938 | __ movz(R1, Immediate(9), 0); |
1939 | __ neg(R1, R1); |
1940 | __ sdiv(R2, R0, R1); |
1941 | __ mov(R0, R2); |
1942 | __ ret(); |
1943 | } |
1944 | |
1945 | ASSEMBLER_TEST_RUN(Sdiv, test) { |
1946 | EXPECT(test != NULL); |
1947 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1948 | EXPECT_EQ(-3, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1949 | } |
1950 | |
1951 | ASSEMBLER_TEST_GENERATE(Udiv_zero, assembler) { |
1952 | __ movz(R0, Immediate(27), 0); |
1953 | __ movz(R1, Immediate(0), 0); |
1954 | __ udiv(R2, R0, R1); |
1955 | __ mov(R0, R2); |
1956 | __ ret(); |
1957 | } |
1958 | |
1959 | ASSEMBLER_TEST_RUN(Udiv_zero, test) { |
1960 | EXPECT(test != NULL); |
1961 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1962 | EXPECT_EQ(0, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1963 | } |
1964 | |
1965 | ASSEMBLER_TEST_GENERATE(Sdiv_zero, assembler) { |
1966 | __ movz(R0, Immediate(27), 0); |
1967 | __ movz(R1, Immediate(0), 0); |
1968 | __ sdiv(R2, R0, R1); |
1969 | __ mov(R0, R2); |
1970 | __ ret(); |
1971 | } |
1972 | |
1973 | ASSEMBLER_TEST_RUN(Sdiv_zero, test) { |
1974 | EXPECT(test != NULL); |
1975 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1976 | EXPECT_EQ(0, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1977 | } |
1978 | |
1979 | ASSEMBLER_TEST_GENERATE(Udiv_corner, assembler) { |
1980 | __ movz(R0, Immediate(0x8000), 3); // R0 <- 0x8000000000000000 |
1981 | __ movn(R1, Immediate(0), 0); // R1 <- 0xffffffffffffffff |
1982 | __ udiv(R2, R0, R1); |
1983 | __ mov(R0, R2); |
1984 | __ ret(); |
1985 | } |
1986 | |
1987 | ASSEMBLER_TEST_RUN(Udiv_corner, test) { |
1988 | EXPECT(test != NULL); |
1989 | typedef int64_t (*Int64Return)() DART_UNUSED; |
1990 | EXPECT_EQ(0, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
1991 | } |
1992 | |
1993 | ASSEMBLER_TEST_GENERATE(Sdiv_corner, assembler) { |
1994 | __ movz(R3, Immediate(0x8000), 3); // R0 <- 0x8000000000000000 |
1995 | __ movn(R1, Immediate(0), 0); // R1 <- 0xffffffffffffffff |
1996 | __ sdiv(R2, R3, R1); |
1997 | __ mov(R0, R2); |
1998 | __ ret(); |
1999 | } |
2000 | |
2001 | ASSEMBLER_TEST_RUN(Sdiv_corner, test) { |
2002 | EXPECT(test != NULL); |
2003 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2004 | EXPECT_EQ(static_cast<int64_t>(0x8000000000000000), |
2005 | EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2006 | } |
2007 | |
2008 | ASSEMBLER_TEST_GENERATE(Lslv, assembler) { |
2009 | __ movz(R1, Immediate(21), 0); |
2010 | __ movz(R2, Immediate(1), 0); |
2011 | __ lslv(R0, R1, R2); |
2012 | __ ret(); |
2013 | } |
2014 | |
2015 | ASSEMBLER_TEST_RUN(Lslv, test) { |
2016 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2017 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2018 | } |
2019 | |
2020 | ASSEMBLER_TEST_GENERATE(Lsrv, assembler) { |
2021 | __ movz(R1, Immediate(84), 0); |
2022 | __ movz(R2, Immediate(1), 0); |
2023 | __ lsrv(R0, R1, R2); |
2024 | __ ret(); |
2025 | } |
2026 | |
2027 | ASSEMBLER_TEST_RUN(Lsrv, test) { |
2028 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2029 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2030 | } |
2031 | |
2032 | ASSEMBLER_TEST_GENERATE(LShiftingV, assembler) { |
2033 | __ movz(R1, Immediate(1), 0); |
2034 | __ movz(R2, Immediate(63), 0); |
2035 | __ lslv(R1, R1, R2); |
2036 | __ lsrv(R0, R1, R2); |
2037 | __ ret(); |
2038 | } |
2039 | |
2040 | ASSEMBLER_TEST_RUN(LShiftingV, test) { |
2041 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2042 | EXPECT_EQ(1, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2043 | } |
2044 | |
2045 | ASSEMBLER_TEST_GENERATE(RShiftingV, assembler) { |
2046 | __ movz(R1, Immediate(1), 0); |
2047 | __ movz(R2, Immediate(63), 0); |
2048 | __ lslv(R1, R1, R2); |
2049 | __ asrv(R0, R1, R2); |
2050 | __ ret(); |
2051 | } |
2052 | |
2053 | ASSEMBLER_TEST_RUN(RShiftingV, test) { |
2054 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2055 | EXPECT_EQ(-1, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2056 | } |
2057 | |
2058 | ASSEMBLER_TEST_GENERATE(Mult_pos, assembler) { |
2059 | __ movz(R1, Immediate(6), 0); |
2060 | __ movz(R2, Immediate(7), 0); |
2061 | __ mul(R0, R1, R2); |
2062 | __ ret(); |
2063 | } |
2064 | |
2065 | ASSEMBLER_TEST_RUN(Mult_pos, test) { |
2066 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2067 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2068 | } |
2069 | |
2070 | ASSEMBLER_TEST_GENERATE(Mult_neg, assembler) { |
2071 | __ movz(R1, Immediate(6), 0); |
2072 | __ movz(R2, Immediate(7), 0); |
2073 | __ neg(R2, R2); |
2074 | __ mul(R0, R1, R2); |
2075 | __ ret(); |
2076 | } |
2077 | |
2078 | ASSEMBLER_TEST_RUN(Mult_neg, test) { |
2079 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2080 | EXPECT_EQ(-42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2081 | } |
2082 | |
2083 | ASSEMBLER_TEST_GENERATE(Smulh_pos, assembler) { |
2084 | __ movz(R1, Immediate(6), 0); |
2085 | __ movz(R2, Immediate(7), 0); |
2086 | __ smulh(R0, R1, R2); |
2087 | __ ret(); |
2088 | } |
2089 | |
2090 | ASSEMBLER_TEST_RUN(Smulh_pos, test) { |
2091 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2092 | EXPECT_EQ(0, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2093 | } |
2094 | |
2095 | ASSEMBLER_TEST_GENERATE(Smulh_neg, assembler) { |
2096 | __ movz(R1, Immediate(6), 0); |
2097 | __ movz(R2, Immediate(7), 0); |
2098 | __ neg(R2, R2); |
2099 | __ smulh(R0, R1, R2); |
2100 | __ ret(); |
2101 | } |
2102 | |
2103 | ASSEMBLER_TEST_RUN(Smulh_neg, test) { |
2104 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2105 | EXPECT_EQ(-1, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2106 | } |
2107 | |
2108 | ASSEMBLER_TEST_GENERATE(Umulh, assembler) { |
2109 | __ movz(R1, Immediate(-1), 3); // 0xffff000000000000 |
2110 | __ movz(R2, Immediate(7), 3); // 0x0007000000000000 |
2111 | __ umulh(R0, R1, R2); // 0x0006fff900000000 |
2112 | __ ret(); |
2113 | } |
2114 | |
2115 | ASSEMBLER_TEST_RUN(Umulh, test) { |
2116 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2117 | EXPECT_EQ(static_cast<int64_t>(0x6fff900000000), |
2118 | EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2119 | } |
2120 | |
2121 | ASSEMBLER_TEST_GENERATE(Umaddl, assembler) { |
2122 | __ movn(R1, Immediate(0), 0); // W1 = 0xffffffff. |
2123 | __ movz(R2, Immediate(7), 0); // W2 = 7. |
2124 | __ movz(R3, Immediate(8), 0); // X3 = 8. |
2125 | __ umaddl(R0, R1, R2, R3); // X0 = W1*W2 + X3 = 0x700000001. |
2126 | __ ret(); |
2127 | } |
2128 | |
2129 | ASSEMBLER_TEST_RUN(Umaddl, test) { |
2130 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2131 | EXPECT_EQ(0x700000001, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2132 | } |
2133 | |
2134 | ASSEMBLER_TEST_GENERATE(Smaddl, assembler) { |
2135 | __ movn(R1, Immediate(1), 0); // W1 = -2. |
2136 | __ movz(R2, Immediate(7), 0); // W2 = 7. |
2137 | __ movz(R3, Immediate(20), 0); // X3 = 20. |
2138 | __ smaddl(R0, R1, R2, R3); // X0 = W1*W2 + X3 = 6. |
2139 | __ ret(); |
2140 | } |
2141 | |
2142 | ASSEMBLER_TEST_RUN(Smaddl, test) { |
2143 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2144 | EXPECT_EQ(6, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2145 | } |
2146 | |
2147 | ASSEMBLER_TEST_GENERATE(Smaddl2, assembler) { |
2148 | __ movn(R1, Immediate(1), 0); // W1 = -2. |
2149 | __ movn(R2, Immediate(0), 0); // W2 = -1. |
2150 | __ smull(R0, R1, R2); // X0 = W1*W2 = 2, alias of smaddl. |
2151 | __ ret(); |
2152 | } |
2153 | |
2154 | ASSEMBLER_TEST_RUN(Smaddl2, test) { |
2155 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2156 | EXPECT_EQ(2, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2157 | } |
2158 | |
2159 | ASSEMBLER_TEST_GENERATE(Smaddl3, assembler) { |
2160 | __ movz(R1, Immediate(0xffff), 0); // W1 = 0xffff. |
2161 | __ movz(R2, Immediate(0xffff), 0); // W2 = 0xffff. |
2162 | __ smull(R0, R1, R2); // X0 = W1*W2, alias of smaddl. |
2163 | __ ret(); |
2164 | } |
2165 | |
2166 | ASSEMBLER_TEST_RUN(Smaddl3, test) { |
2167 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2168 | EXPECT_EQ(0xffffl * 0xffffl, |
2169 | EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2170 | } |
2171 | |
2172 | ASSEMBLER_TEST_GENERATE(SmaddlOverflow, assembler) { |
2173 | Label return_ltuae; |
2174 | __ movz(R1, Immediate(0xffff), 0); // W1 = 0xffff. |
2175 | __ AddImmediate(R1, 4); // W1 = 0x10003. |
2176 | __ movz(R2, Immediate(0x7fff), 0); // W2 = 0xffff. |
2177 | __ smull(R0, R1, R2); // X0 = W1*W2, alias of smaddl. |
2178 | __ AsrImmediate(R3, R0, 31); |
2179 | __ cmp(R3, Operand(R0, ASR, 63)); // Detect signed 32 bit overflow. |
2180 | __ b(&return_ltuae, NE); |
2181 | __ ret(); |
2182 | __ Bind(&return_ltuae); |
2183 | __ movz(R0, Immediate(42), 0); |
2184 | __ ret(); |
2185 | } |
2186 | |
2187 | ASSEMBLER_TEST_RUN(SmaddlOverflow, test) { |
2188 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2189 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2190 | } |
2191 | |
2192 | ASSEMBLER_TEST_GENERATE(SmaddlOverflow2, assembler) { |
2193 | Label return_ltuae; |
2194 | __ movz(R1, Immediate(0xffff), 0); // W1 = 0xffff. |
2195 | __ movn(R2, Immediate(0xffff), 0); // W2 = -0x10000. |
2196 | __ AddImmediate(R2, -3); // W2 = -0x10003. |
2197 | __ smull(R0, R1, R2); // X0 = W1*W2, alias of smaddl. |
2198 | __ AsrImmediate(R3, R0, 31); |
2199 | __ cmp(R3, Operand(R0, ASR, 63)); // Detect signed 32 bit overflow. |
2200 | __ b(&return_ltuae, NE); |
2201 | __ ret(); |
2202 | __ Bind(&return_ltuae); |
2203 | __ movz(R0, Immediate(42), 0); |
2204 | __ ret(); |
2205 | } |
2206 | |
2207 | ASSEMBLER_TEST_RUN(SmaddlOverflow2, test) { |
2208 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2209 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2210 | } |
2211 | |
2212 | ASSEMBLER_TEST_GENERATE(SmaddlOverflow3, assembler) { |
2213 | Label return_ltuae; |
2214 | __ LoadImmediate(R1, 0x01007fff); |
2215 | __ LoadImmediate(R2, 0x01007fff); |
2216 | __ smull(R0, R1, R2); // X0 = W1*W2, alias of smaddl. |
2217 | __ AsrImmediate(R3, R0, 31); |
2218 | __ cmp(R3, Operand(R0, ASR, 63)); // Detect signed 32 bit overflow. |
2219 | __ b(&return_ltuae, NE); |
2220 | __ ret(); |
2221 | __ Bind(&return_ltuae); |
2222 | __ movz(R0, Immediate(42), 0); |
2223 | __ ret(); |
2224 | } |
2225 | |
2226 | ASSEMBLER_TEST_RUN(SmaddlOverflow3, test) { |
2227 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2228 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2229 | } |
2230 | |
2231 | ASSEMBLER_TEST_GENERATE(NegNoOverflow, assembler) { |
2232 | Label return_ltuae; |
2233 | __ LoadImmediate(R1, 0x7fffffff); |
2234 | __ negsw(R0, R1); // X0 = W1*W2, alias of smaddl. |
2235 | __ sxtw(R0, R0); |
2236 | __ b(&return_ltuae, VS); // Branch on overflow set. |
2237 | __ ret(); |
2238 | __ Bind(&return_ltuae); |
2239 | __ movz(R0, Immediate(42), 0); |
2240 | __ ret(); |
2241 | } |
2242 | |
2243 | ASSEMBLER_TEST_RUN(NegNoOverflow, test) { |
2244 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2245 | EXPECT_EQ(-0x7fffffff, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2246 | } |
2247 | |
2248 | ASSEMBLER_TEST_GENERATE(NegNoOverflow2, assembler) { |
2249 | Label return_ltuae; |
2250 | __ LoadImmediate(R1, 0x7123); |
2251 | __ negsw(R0, R1); // X0 = W1*W2, alias of smaddl. |
2252 | __ sxtw(R0, R0); |
2253 | __ b(&return_ltuae, VS); // Branch on overflow set. |
2254 | __ ret(); |
2255 | __ Bind(&return_ltuae); |
2256 | __ movz(R0, Immediate(42), 0); |
2257 | __ ret(); |
2258 | } |
2259 | |
2260 | ASSEMBLER_TEST_RUN(NegNoOverflow2, test) { |
2261 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2262 | EXPECT_EQ(-0x7123, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2263 | } |
2264 | |
2265 | ASSEMBLER_TEST_GENERATE(NegOverflow, assembler) { |
2266 | Label return_ltuae; |
2267 | __ LoadImmediate(R1, -0x80000000ll); |
2268 | __ negsw(R0, R1); // X0 = W1*W2, alias of smaddl. |
2269 | __ sxtw(R0, R0); |
2270 | __ b(&return_ltuae, VS); // Branch on overflow set. |
2271 | __ ret(); |
2272 | __ Bind(&return_ltuae); |
2273 | __ movz(R0, Immediate(42), 0); |
2274 | __ ret(); |
2275 | } |
2276 | |
2277 | ASSEMBLER_TEST_RUN(NegOverflow, test) { |
2278 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2279 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2280 | } |
2281 | |
2282 | // Loading immediate values without the object pool. |
2283 | ASSEMBLER_TEST_GENERATE(LoadImmediateSmall, assembler) { |
2284 | __ LoadImmediate(R0, 42); |
2285 | __ ret(); |
2286 | } |
2287 | |
2288 | ASSEMBLER_TEST_RUN(LoadImmediateSmall, test) { |
2289 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2290 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2291 | } |
2292 | |
2293 | ASSEMBLER_TEST_GENERATE(LoadImmediateMed, assembler) { |
2294 | __ LoadImmediate(R0, 0xf1234123); |
2295 | __ ret(); |
2296 | } |
2297 | |
2298 | ASSEMBLER_TEST_RUN(LoadImmediateMed, test) { |
2299 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2300 | EXPECT_EQ(0xf1234123, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2301 | } |
2302 | |
2303 | ASSEMBLER_TEST_GENERATE(LoadImmediateMed2, assembler) { |
2304 | __ LoadImmediate(R0, 0x4321f1234123); |
2305 | __ ret(); |
2306 | } |
2307 | |
2308 | ASSEMBLER_TEST_RUN(LoadImmediateMed2, test) { |
2309 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2310 | EXPECT_EQ(0x4321f1234123, |
2311 | EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2312 | } |
2313 | |
2314 | ASSEMBLER_TEST_GENERATE(LoadImmediateLarge, assembler) { |
2315 | __ LoadImmediate(R0, 0x9287436598237465); |
2316 | __ ret(); |
2317 | } |
2318 | |
2319 | ASSEMBLER_TEST_RUN(LoadImmediateLarge, test) { |
2320 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2321 | EXPECT_EQ(static_cast<int64_t>(0x9287436598237465), |
2322 | EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2323 | } |
2324 | |
2325 | ASSEMBLER_TEST_GENERATE(LoadImmediateSmallNeg, assembler) { |
2326 | __ LoadImmediate(R0, -42); |
2327 | __ ret(); |
2328 | } |
2329 | |
2330 | ASSEMBLER_TEST_RUN(LoadImmediateSmallNeg, test) { |
2331 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2332 | EXPECT_EQ(-42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2333 | } |
2334 | |
2335 | ASSEMBLER_TEST_GENERATE(LoadImmediateMedNeg, assembler) { |
2336 | __ LoadImmediate(R0, -0x1212341234); |
2337 | __ ret(); |
2338 | } |
2339 | |
2340 | ASSEMBLER_TEST_RUN(LoadImmediateMedNeg, test) { |
2341 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2342 | EXPECT_EQ(-0x1212341234, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2343 | } |
2344 | |
2345 | ASSEMBLER_TEST_GENERATE(LoadImmediateMedNeg2, assembler) { |
2346 | __ LoadImmediate(R0, -0x1212340000); |
2347 | __ ret(); |
2348 | } |
2349 | |
2350 | ASSEMBLER_TEST_RUN(LoadImmediateMedNeg2, test) { |
2351 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2352 | EXPECT_EQ(-0x1212340000, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2353 | } |
2354 | |
2355 | ASSEMBLER_TEST_GENERATE(LoadImmediateMedNeg3, assembler) { |
2356 | __ LoadImmediate(R0, -0x1200001234); |
2357 | __ ret(); |
2358 | } |
2359 | |
2360 | ASSEMBLER_TEST_RUN(LoadImmediateMedNeg3, test) { |
2361 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2362 | EXPECT_EQ(-0x1200001234, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2363 | } |
2364 | |
2365 | ASSEMBLER_TEST_GENERATE(LoadImmediateMedNeg4, assembler) { |
2366 | __ LoadImmediate(R0, -0x12341234); |
2367 | __ ret(); |
2368 | } |
2369 | |
2370 | ASSEMBLER_TEST_RUN(LoadImmediateMedNeg4, test) { |
2371 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2372 | EXPECT_EQ(-0x12341234, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2373 | } |
2374 | |
2375 | ASSEMBLER_TEST_GENERATE(LoadHalfWordUnaligned, assembler) { |
2376 | __ ldr(R1, R0, kHalfword); |
2377 | __ mov(R0, R1); |
2378 | __ ret(); |
2379 | } |
2380 | |
2381 | ASSEMBLER_TEST_RUN(LoadHalfWordUnaligned, test) { |
2382 | EXPECT(test != NULL); |
2383 | typedef intptr_t (*LoadHalfWordUnaligned)(intptr_t) DART_UNUSED; |
2384 | uint8_t buffer[4] = { |
2385 | 0x89, 0xAB, 0xCD, 0xEF, |
2386 | }; |
2387 | |
2388 | EXPECT_EQ( |
2389 | static_cast<int16_t>(static_cast<uint16_t>(0xAB89)), |
2390 | EXECUTE_TEST_CODE_INTPTR_INTPTR(LoadHalfWordUnaligned, test->entry(), |
2391 | reinterpret_cast<intptr_t>(&buffer[0]))); |
2392 | EXPECT_EQ( |
2393 | static_cast<int16_t>(static_cast<uint16_t>(0xCDAB)), |
2394 | EXECUTE_TEST_CODE_INTPTR_INTPTR(LoadHalfWordUnaligned, test->entry(), |
2395 | reinterpret_cast<intptr_t>(&buffer[1]))); |
2396 | } |
2397 | |
2398 | ASSEMBLER_TEST_GENERATE(LoadHalfWordUnsignedUnaligned, assembler) { |
2399 | __ ldr(R1, R0, kUnsignedHalfword); |
2400 | __ mov(R0, R1); |
2401 | __ ret(); |
2402 | } |
2403 | |
2404 | ASSEMBLER_TEST_RUN(LoadHalfWordUnsignedUnaligned, test) { |
2405 | EXPECT(test != NULL); |
2406 | typedef intptr_t (*LoadHalfWordUnsignedUnaligned)(intptr_t) DART_UNUSED; |
2407 | uint8_t buffer[4] = { |
2408 | 0x89, 0xAB, 0xCD, 0xEF, |
2409 | }; |
2410 | |
2411 | EXPECT_EQ(0xAB89, EXECUTE_TEST_CODE_INTPTR_INTPTR( |
2412 | LoadHalfWordUnsignedUnaligned, test->entry(), |
2413 | reinterpret_cast<intptr_t>(&buffer[0]))); |
2414 | EXPECT_EQ(0xCDAB, EXECUTE_TEST_CODE_INTPTR_INTPTR( |
2415 | LoadHalfWordUnsignedUnaligned, test->entry(), |
2416 | reinterpret_cast<intptr_t>(&buffer[1]))); |
2417 | } |
2418 | |
2419 | ASSEMBLER_TEST_GENERATE(StoreHalfWordUnaligned, assembler) { |
2420 | __ LoadImmediate(R1, 0xABCD); |
2421 | __ str(R1, R0, kHalfword); |
2422 | __ mov(R0, R1); |
2423 | __ ret(); |
2424 | } |
2425 | |
2426 | ASSEMBLER_TEST_RUN(StoreHalfWordUnaligned, test) { |
2427 | EXPECT(test != NULL); |
2428 | typedef intptr_t (*StoreHalfWordUnaligned)(intptr_t) DART_UNUSED; |
2429 | uint8_t buffer[4] = { |
2430 | 0, 0, 0, 0, |
2431 | }; |
2432 | |
2433 | EXPECT_EQ(0xABCD, EXECUTE_TEST_CODE_INTPTR_INTPTR( |
2434 | StoreHalfWordUnaligned, test->entry(), |
2435 | reinterpret_cast<intptr_t>(&buffer[0]))); |
2436 | EXPECT_EQ(0xCD, buffer[0]); |
2437 | EXPECT_EQ(0xAB, buffer[1]); |
2438 | EXPECT_EQ(0, buffer[2]); |
2439 | |
2440 | EXPECT_EQ(0xABCD, EXECUTE_TEST_CODE_INTPTR_INTPTR( |
2441 | StoreHalfWordUnaligned, test->entry(), |
2442 | reinterpret_cast<intptr_t>(&buffer[1]))); |
2443 | EXPECT_EQ(0xCD, buffer[1]); |
2444 | EXPECT_EQ(0xAB, buffer[2]); |
2445 | EXPECT_EQ(0, buffer[3]); |
2446 | } |
2447 | |
2448 | ASSEMBLER_TEST_GENERATE(LoadWordUnaligned, assembler) { |
2449 | __ ldr(R1, R0, kUnsignedWord); |
2450 | __ mov(R0, R1); |
2451 | __ ret(); |
2452 | } |
2453 | |
2454 | ASSEMBLER_TEST_RUN(LoadWordUnaligned, test) { |
2455 | EXPECT(test != NULL); |
2456 | typedef int32_t (*LoadWordUnaligned)(intptr_t) DART_UNUSED; |
2457 | uint8_t buffer[8] = {0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xF0}; |
2458 | |
2459 | EXPECT_EQ( |
2460 | static_cast<int32_t>(0x78563412), |
2461 | EXECUTE_TEST_CODE_INT32_INTPTR(LoadWordUnaligned, test->entry(), |
2462 | reinterpret_cast<intptr_t>(&buffer[0]))); |
2463 | EXPECT_EQ( |
2464 | static_cast<int32_t>(0x9A785634), |
2465 | EXECUTE_TEST_CODE_INT32_INTPTR(LoadWordUnaligned, test->entry(), |
2466 | reinterpret_cast<intptr_t>(&buffer[1]))); |
2467 | EXPECT_EQ( |
2468 | static_cast<int32_t>(0xBC9A7856), |
2469 | EXECUTE_TEST_CODE_INT32_INTPTR(LoadWordUnaligned, test->entry(), |
2470 | reinterpret_cast<intptr_t>(&buffer[2]))); |
2471 | EXPECT_EQ( |
2472 | static_cast<int32_t>(0xDEBC9A78), |
2473 | EXECUTE_TEST_CODE_INT32_INTPTR(LoadWordUnaligned, test->entry(), |
2474 | reinterpret_cast<intptr_t>(&buffer[3]))); |
2475 | } |
2476 | |
2477 | ASSEMBLER_TEST_GENERATE(StoreWordUnaligned, assembler) { |
2478 | __ LoadImmediate(R1, 0x12345678); |
2479 | __ str(R1, R0, kUnsignedWord); |
2480 | __ mov(R0, R1); |
2481 | __ ret(); |
2482 | } |
2483 | |
2484 | ASSEMBLER_TEST_RUN(StoreWordUnaligned, test) { |
2485 | EXPECT(test != NULL); |
2486 | typedef intptr_t (*StoreWordUnaligned)(intptr_t) DART_UNUSED; |
2487 | uint8_t buffer[8] = {0, 0, 0, 0, 0, 0, 0, 0}; |
2488 | |
2489 | EXPECT_EQ(0x12345678, EXECUTE_TEST_CODE_INTPTR_INTPTR( |
2490 | StoreWordUnaligned, test->entry(), |
2491 | reinterpret_cast<intptr_t>(&buffer[0]))); |
2492 | EXPECT_EQ(0x78, buffer[0]); |
2493 | EXPECT_EQ(0x56, buffer[1]); |
2494 | EXPECT_EQ(0x34, buffer[2]); |
2495 | EXPECT_EQ(0x12, buffer[3]); |
2496 | |
2497 | EXPECT_EQ(0x12345678, EXECUTE_TEST_CODE_INTPTR_INTPTR( |
2498 | StoreWordUnaligned, test->entry(), |
2499 | reinterpret_cast<intptr_t>(&buffer[1]))); |
2500 | EXPECT_EQ(0x78, buffer[1]); |
2501 | EXPECT_EQ(0x56, buffer[2]); |
2502 | EXPECT_EQ(0x34, buffer[3]); |
2503 | EXPECT_EQ(0x12, buffer[4]); |
2504 | |
2505 | EXPECT_EQ(0x12345678, EXECUTE_TEST_CODE_INTPTR_INTPTR( |
2506 | StoreWordUnaligned, test->entry(), |
2507 | reinterpret_cast<intptr_t>(&buffer[2]))); |
2508 | EXPECT_EQ(0x78, buffer[2]); |
2509 | EXPECT_EQ(0x56, buffer[3]); |
2510 | EXPECT_EQ(0x34, buffer[4]); |
2511 | EXPECT_EQ(0x12, buffer[5]); |
2512 | |
2513 | EXPECT_EQ(0x12345678, EXECUTE_TEST_CODE_INTPTR_INTPTR( |
2514 | StoreWordUnaligned, test->entry(), |
2515 | reinterpret_cast<intptr_t>(&buffer[3]))); |
2516 | EXPECT_EQ(0x78, buffer[3]); |
2517 | EXPECT_EQ(0x56, buffer[4]); |
2518 | EXPECT_EQ(0x34, buffer[5]); |
2519 | EXPECT_EQ(0x12, buffer[6]); |
2520 | } |
2521 | |
2522 | static void EnterTestFrame(Assembler* assembler) { |
2523 | __ EnterFrame(0); |
2524 | __ Push(CODE_REG); |
2525 | __ Push(THR); |
2526 | __ Push(BARRIER_MASK); |
2527 | __ Push(NULL_REG); |
2528 | __ TagAndPushPP(); |
2529 | __ ldr(CODE_REG, Address(R0, VMHandles::kOffsetOfRawPtrInHandle)); |
2530 | __ mov(THR, R1); |
2531 | __ ldr(BARRIER_MASK, Address(THR, Thread::write_barrier_mask_offset())); |
2532 | __ ldr(NULL_REG, Address(THR, Thread::object_null_offset())); |
2533 | __ LoadPoolPointer(PP); |
2534 | } |
2535 | |
2536 | static void LeaveTestFrame(Assembler* assembler) { |
2537 | __ PopAndUntagPP(); |
2538 | __ Pop(NULL_REG); |
2539 | __ Pop(BARRIER_MASK); |
2540 | __ Pop(THR); |
2541 | __ Pop(CODE_REG); |
2542 | __ LeaveFrame(); |
2543 | } |
2544 | |
2545 | // Loading immediate values with the object pool. |
2546 | ASSEMBLER_TEST_GENERATE(LoadImmediatePPSmall, assembler) { |
2547 | __ SetupDartSP(); |
2548 | EnterTestFrame(assembler); |
2549 | __ LoadImmediate(R0, 42); |
2550 | LeaveTestFrame(assembler); |
2551 | __ RestoreCSP(); |
2552 | __ ret(); |
2553 | } |
2554 | |
2555 | ASSEMBLER_TEST_RUN(LoadImmediatePPSmall, test) { |
2556 | EXPECT_EQ(42, test->InvokeWithCodeAndThread<int64_t>()); |
2557 | } |
2558 | |
2559 | ASSEMBLER_TEST_GENERATE(LoadImmediatePPMed, assembler) { |
2560 | __ SetupDartSP(); |
2561 | EnterTestFrame(assembler); |
2562 | __ LoadImmediate(R0, 0xf1234123); |
2563 | LeaveTestFrame(assembler); |
2564 | __ RestoreCSP(); |
2565 | __ ret(); |
2566 | } |
2567 | |
2568 | ASSEMBLER_TEST_RUN(LoadImmediatePPMed, test) { |
2569 | EXPECT_EQ(0xf1234123, test->InvokeWithCodeAndThread<int64_t>()); |
2570 | } |
2571 | |
2572 | ASSEMBLER_TEST_GENERATE(LoadImmediatePPMed2, assembler) { |
2573 | __ SetupDartSP(); |
2574 | EnterTestFrame(assembler); |
2575 | __ LoadImmediate(R0, 0x4321f1234124); |
2576 | LeaveTestFrame(assembler); |
2577 | __ RestoreCSP(); |
2578 | __ ret(); |
2579 | } |
2580 | |
2581 | ASSEMBLER_TEST_RUN(LoadImmediatePPMed2, test) { |
2582 | EXPECT_EQ(0x4321f1234124, test->InvokeWithCodeAndThread<int64_t>()); |
2583 | } |
2584 | |
2585 | ASSEMBLER_TEST_GENERATE(LoadImmediatePPLarge, assembler) { |
2586 | __ SetupDartSP(); |
2587 | EnterTestFrame(assembler); |
2588 | __ LoadImmediate(R0, 0x9287436598237465); |
2589 | LeaveTestFrame(assembler); |
2590 | __ RestoreCSP(); |
2591 | __ ret(); |
2592 | } |
2593 | |
2594 | ASSEMBLER_TEST_RUN(LoadImmediatePPLarge, test) { |
2595 | EXPECT_EQ(static_cast<int64_t>(0x9287436598237465), |
2596 | test->InvokeWithCodeAndThread<int64_t>()); |
2597 | } |
2598 | |
2599 | // LoadObject null. |
2600 | ASSEMBLER_TEST_GENERATE(LoadObjectNull, assembler) { |
2601 | __ SetupDartSP(); |
2602 | EnterTestFrame(assembler); |
2603 | __ LoadObject(R0, Object::null_object()); |
2604 | LeaveTestFrame(assembler); |
2605 | __ RestoreCSP(); |
2606 | __ ret(); |
2607 | } |
2608 | |
2609 | ASSEMBLER_TEST_RUN(LoadObjectNull, test) { |
2610 | EXPECT_EQ(static_cast<uword>(Object::null()), |
2611 | test->InvokeWithCodeAndThread<uword>()); |
2612 | } |
2613 | |
2614 | // PushObject null. |
2615 | ASSEMBLER_TEST_GENERATE(PushObjectNull, assembler) { |
2616 | __ SetupDartSP(); |
2617 | EnterTestFrame(assembler); |
2618 | __ PushObject(Object::null_object()); |
2619 | __ Pop(R0); |
2620 | LeaveTestFrame(assembler); |
2621 | __ RestoreCSP(); |
2622 | __ ret(); |
2623 | } |
2624 | |
2625 | ASSEMBLER_TEST_RUN(PushObjectNull, test) { |
2626 | EXPECT_EQ(static_cast<uword>(Object::null()), |
2627 | test->InvokeWithCodeAndThread<uword>()); |
2628 | } |
2629 | |
2630 | // CompareObject null. |
2631 | ASSEMBLER_TEST_GENERATE(CompareObjectNull, assembler) { |
2632 | __ SetupDartSP(); |
2633 | EnterTestFrame(assembler); |
2634 | __ LoadObject(R0, Object::bool_true()); |
2635 | __ LoadObject(R1, Object::bool_false()); |
2636 | __ ldr(R2, Address(THR, Thread::object_null_offset())); |
2637 | __ CompareObject(R2, Object::null_object()); |
2638 | __ csel(R0, R0, R1, EQ); |
2639 | LeaveTestFrame(assembler); |
2640 | __ RestoreCSP(); |
2641 | __ ret(); |
2642 | } |
2643 | |
2644 | ASSEMBLER_TEST_RUN(CompareObjectNull, test) { |
2645 | EXPECT_EQ(static_cast<uword>(Bool::True().raw()), |
2646 | test->InvokeWithCodeAndThread<uword>()); |
2647 | } |
2648 | |
2649 | ASSEMBLER_TEST_GENERATE(LoadObjectTrue, assembler) { |
2650 | __ SetupDartSP(); |
2651 | EnterTestFrame(assembler); |
2652 | __ LoadObject(R0, Bool::True()); |
2653 | LeaveTestFrame(assembler); |
2654 | __ RestoreCSP(); |
2655 | __ ret(); |
2656 | } |
2657 | |
2658 | ASSEMBLER_TEST_RUN(LoadObjectTrue, test) { |
2659 | EXPECT_EQ(static_cast<uword>(Bool::True().raw()), |
2660 | test->InvokeWithCodeAndThread<uword>()); |
2661 | } |
2662 | |
2663 | ASSEMBLER_TEST_GENERATE(LoadObjectFalse, assembler) { |
2664 | __ SetupDartSP(); |
2665 | EnterTestFrame(assembler); |
2666 | __ LoadObject(R0, Bool::False()); |
2667 | LeaveTestFrame(assembler); |
2668 | __ RestoreCSP(); |
2669 | __ ret(); |
2670 | } |
2671 | |
2672 | ASSEMBLER_TEST_RUN(LoadObjectFalse, test) { |
2673 | EXPECT_EQ(static_cast<uword>(Bool::False().raw()), |
2674 | test->InvokeWithCodeAndThread<uword>()); |
2675 | } |
2676 | |
2677 | ASSEMBLER_TEST_GENERATE(CSelTrue, assembler) { |
2678 | __ LoadImmediate(R1, 42); |
2679 | __ LoadImmediate(R2, 1234); |
2680 | __ CompareRegisters(R1, R2); |
2681 | __ csel(R0, R1, R2, LT); |
2682 | __ ret(); |
2683 | } |
2684 | |
2685 | ASSEMBLER_TEST_RUN(CSelTrue, test) { |
2686 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2687 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2688 | } |
2689 | |
2690 | ASSEMBLER_TEST_GENERATE(CSelFalse, assembler) { |
2691 | __ LoadImmediate(R1, 42); |
2692 | __ LoadImmediate(R2, 1234); |
2693 | __ CompareRegisters(R1, R2); |
2694 | __ csel(R0, R1, R2, GE); |
2695 | __ ret(); |
2696 | } |
2697 | |
2698 | ASSEMBLER_TEST_RUN(CSelFalse, test) { |
2699 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2700 | EXPECT_EQ(1234, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2701 | } |
2702 | |
2703 | ASSEMBLER_TEST_GENERATE(CsincFalse, assembler) { |
2704 | __ LoadImmediate(R1, 42); |
2705 | __ LoadImmediate(R2, 1234); |
2706 | __ CompareRegisters(R1, R2); |
2707 | __ csinc(R0, R2, R1, GE); |
2708 | __ ret(); |
2709 | } |
2710 | |
2711 | ASSEMBLER_TEST_RUN(CsincFalse, test) { |
2712 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2713 | EXPECT_EQ(43, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2714 | } |
2715 | |
2716 | ASSEMBLER_TEST_GENERATE(CsincTrue, assembler) { |
2717 | __ LoadImmediate(R1, 42); |
2718 | __ LoadImmediate(R2, 1234); |
2719 | __ CompareRegisters(R1, R2); |
2720 | __ csinc(R0, R2, R1, LT); |
2721 | __ ret(); |
2722 | } |
2723 | |
2724 | ASSEMBLER_TEST_RUN(CsincTrue, test) { |
2725 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2726 | EXPECT_EQ(1234, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2727 | } |
2728 | |
2729 | ASSEMBLER_TEST_GENERATE(CsinvFalse, assembler) { |
2730 | __ LoadImmediate(R1, 42); |
2731 | __ LoadImmediate(R2, 1234); |
2732 | __ CompareRegisters(R1, R2); |
2733 | __ csinv(R0, R2, R1, GE); |
2734 | __ ret(); |
2735 | } |
2736 | |
2737 | ASSEMBLER_TEST_RUN(CsinvFalse, test) { |
2738 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2739 | EXPECT_EQ(~42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2740 | } |
2741 | |
2742 | ASSEMBLER_TEST_GENERATE(CsinvTrue, assembler) { |
2743 | __ LoadImmediate(R1, 42); |
2744 | __ LoadImmediate(R2, 1234); |
2745 | __ CompareRegisters(R1, R2); |
2746 | __ csinv(R0, R2, R1, LT); |
2747 | __ ret(); |
2748 | } |
2749 | |
2750 | ASSEMBLER_TEST_RUN(CsinvTrue, test) { |
2751 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2752 | EXPECT_EQ(1234, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2753 | } |
2754 | |
2755 | ASSEMBLER_TEST_GENERATE(CsnegFalse, assembler) { |
2756 | __ LoadImmediate(R1, 42); |
2757 | __ LoadImmediate(R2, 1234); |
2758 | __ CompareRegisters(R1, R2); |
2759 | __ csneg(R0, R2, R1, GE); |
2760 | __ ret(); |
2761 | } |
2762 | |
2763 | ASSEMBLER_TEST_RUN(CsnegFalse, test) { |
2764 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2765 | EXPECT_EQ(-42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2766 | } |
2767 | |
2768 | ASSEMBLER_TEST_GENERATE(CsnegTrue, assembler) { |
2769 | __ LoadImmediate(R1, 42); |
2770 | __ LoadImmediate(R2, 1234); |
2771 | __ CompareRegisters(R1, R2); |
2772 | __ csneg(R0, R2, R1, LT); |
2773 | __ ret(); |
2774 | } |
2775 | |
2776 | ASSEMBLER_TEST_RUN(CsnegTrue, test) { |
2777 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2778 | EXPECT_EQ(1234, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2779 | } |
2780 | |
2781 | ASSEMBLER_TEST_GENERATE(Ubfx, assembler) { |
2782 | __ LoadImmediate(R1, 0x819); |
2783 | __ LoadImmediate(R0, 0x5a5a5a5a); // Overwritten. |
2784 | __ ubfx(R0, R1, 4, 8); |
2785 | __ ret(); |
2786 | } |
2787 | |
2788 | ASSEMBLER_TEST_RUN(Ubfx, test) { |
2789 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2790 | EXPECT_EQ(0x81, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2791 | } |
2792 | |
2793 | ASSEMBLER_TEST_GENERATE(Sbfx, assembler) { |
2794 | __ LoadImmediate(R1, 0x819); |
2795 | __ LoadImmediate(R0, 0x5a5a5a5a); // Overwritten. |
2796 | __ sbfx(R0, R1, 4, 8); |
2797 | __ ret(); |
2798 | } |
2799 | |
2800 | ASSEMBLER_TEST_RUN(Sbfx, test) { |
2801 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2802 | EXPECT_EQ(-0x7f, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2803 | } |
2804 | |
2805 | ASSEMBLER_TEST_GENERATE(Bfi, assembler) { |
2806 | __ LoadImmediate(R1, 0x819); |
2807 | __ LoadImmediate(R0, 0x5a5a5a5a); |
2808 | __ bfi(R0, R1, 12, 5); |
2809 | __ ret(); |
2810 | } |
2811 | |
2812 | ASSEMBLER_TEST_RUN(Bfi, test) { |
2813 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2814 | EXPECT_EQ(0x5a5b9a5a, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2815 | } |
2816 | |
2817 | ASSEMBLER_TEST_GENERATE(Ubfiz, assembler) { |
2818 | __ LoadImmediate(R1, 0xff1248ff); |
2819 | __ LoadImmediate(R0, 0x5a5a5a5a); |
2820 | // Take 30 low bits and place at position 1 in R0, zeroing the rest. |
2821 | __ ubfiz(R0, R1, 1, 30); |
2822 | __ ret(); |
2823 | } |
2824 | |
2825 | ASSEMBLER_TEST_RUN(Ubfiz, test) { |
2826 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2827 | EXPECT_EQ(0x7e2491fe, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2828 | } |
2829 | |
2830 | ASSEMBLER_TEST_GENERATE(Bfxil, assembler) { |
2831 | __ LoadImmediate(R1, 0x819); |
2832 | __ LoadImmediate(R0, 0x5a5a5a5a); |
2833 | __ bfxil(R0, R1, 4, 8); |
2834 | __ ret(); |
2835 | } |
2836 | |
2837 | ASSEMBLER_TEST_RUN(Bfxil, test) { |
2838 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2839 | EXPECT_EQ(0x5a5a5a81, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2840 | } |
2841 | |
2842 | ASSEMBLER_TEST_GENERATE(Sbfiz, assembler) { |
2843 | __ LoadImmediate(R1, 0x819); |
2844 | __ LoadImmediate(R0, 0x5a5a5a5a); // Overwritten. |
2845 | __ sbfiz(R0, R1, 4, 12); |
2846 | __ ret(); |
2847 | } |
2848 | |
2849 | ASSEMBLER_TEST_RUN(Sbfiz, test) { |
2850 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2851 | EXPECT_EQ(-0x7e70, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2852 | } |
2853 | |
2854 | ASSEMBLER_TEST_GENERATE(Sxtb, assembler) { |
2855 | __ LoadImmediate(R1, 0xff); |
2856 | __ LoadImmediate(R0, 0x5a5a5a5a); // Overwritten. |
2857 | __ sxtb(R0, R1); |
2858 | __ LoadImmediate(R2, 0x2a); |
2859 | __ LoadImmediate(R1, 0x5a5a5a5a); // Overwritten. |
2860 | __ sxtb(R1, R2); |
2861 | __ add(R0, R0, Operand(R1)); |
2862 | __ ret(); |
2863 | } |
2864 | |
2865 | ASSEMBLER_TEST_RUN(Sxtb, test) { |
2866 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2867 | EXPECT_EQ(0x29, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2868 | } |
2869 | |
2870 | ASSEMBLER_TEST_GENERATE(Sxth, assembler) { |
2871 | __ LoadImmediate(R1, 0xffff); |
2872 | __ LoadImmediate(R0, 0x5a5a5a5a); // Overwritten. |
2873 | __ sxth(R0, R1); |
2874 | __ LoadImmediate(R2, 0x1002a); |
2875 | __ LoadImmediate(R1, 0x5a5a5a5a); // Overwritten. |
2876 | __ sxth(R1, R2); |
2877 | __ add(R0, R0, Operand(R1)); |
2878 | __ ret(); |
2879 | } |
2880 | |
2881 | ASSEMBLER_TEST_RUN(Sxth, test) { |
2882 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2883 | EXPECT_EQ(0x29, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2884 | } |
2885 | |
2886 | ASSEMBLER_TEST_GENERATE(Sxtw, assembler) { |
2887 | __ LoadImmediate(R1, 0xffffffffll); |
2888 | __ LoadImmediate(R0, 0x5a5a5a5a); // Overwritten. |
2889 | __ sxtw(R0, R1); |
2890 | __ LoadImmediate(R2, 0x10000002all); |
2891 | __ LoadImmediate(R1, 0x5a5a5a5a); // Overwritten. |
2892 | __ sxtw(R1, R2); |
2893 | __ add(R0, R0, Operand(R1)); |
2894 | __ ret(); |
2895 | } |
2896 | |
2897 | ASSEMBLER_TEST_RUN(Sxtw, test) { |
2898 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2899 | EXPECT_EQ(0x29, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2900 | } |
2901 | |
2902 | ASSEMBLER_TEST_GENERATE(Uxtw, assembler) { |
2903 | __ LoadImmediate(R1, 0xffffffffll); |
2904 | __ LoadImmediate(R0, 0x5a5a5a5a); // Overwritten. |
2905 | __ ubfiz(R0, R1, 0, 32); // Zero extend word. |
2906 | __ LoadImmediate(R2, 0x10000002all); |
2907 | __ LoadImmediate(R1, 0x5a5a5a5a); // Overwritten. |
2908 | __ ubfiz(R1, R2, 0, 32); // Zero extend word. |
2909 | __ add(R0, R0, Operand(R1)); |
2910 | __ ret(); |
2911 | } |
2912 | |
2913 | ASSEMBLER_TEST_RUN(Uxtw, test) { |
2914 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2915 | EXPECT_EQ(0xffffffffll + 42, |
2916 | EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2917 | } |
2918 | |
2919 | ASSEMBLER_TEST_GENERATE(Uxtb, assembler) { |
2920 | __ LoadImmediate(R1, -1); |
2921 | __ LoadImmediate(R0, 0x5a5a5a5a); // Overwritten. |
2922 | __ uxtb(R0, R1); |
2923 | __ LoadImmediate(R2, 0x12a); |
2924 | __ LoadImmediate(R1, 0x5a5a5a5a); // Overwritten. |
2925 | __ uxtb(R1, R2); |
2926 | __ add(R0, R0, Operand(R1)); |
2927 | __ ret(); |
2928 | } |
2929 | |
2930 | ASSEMBLER_TEST_RUN(Uxtb, test) { |
2931 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2932 | EXPECT_EQ(0xff + 0x2a, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2933 | } |
2934 | |
2935 | ASSEMBLER_TEST_GENERATE(Uxth, assembler) { |
2936 | __ LoadImmediate(R1, -1); |
2937 | __ LoadImmediate(R0, 0x5a5a5a5a); // Overwritten. |
2938 | __ uxth(R0, R1); |
2939 | __ LoadImmediate(R2, 0x1002a); |
2940 | __ LoadImmediate(R1, 0x5a5a5a5a); // Overwritten. |
2941 | __ uxth(R1, R2); |
2942 | __ add(R0, R0, Operand(R1)); |
2943 | __ ret(); |
2944 | } |
2945 | |
2946 | ASSEMBLER_TEST_RUN(Uxth, test) { |
2947 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2948 | EXPECT_EQ(0xffff + 0x2a, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2949 | } |
2950 | |
2951 | // Floating point move immediate, to/from integer register. |
2952 | ASSEMBLER_TEST_GENERATE(Fmovdi, assembler) { |
2953 | __ LoadDImmediate(V0, 1.0); |
2954 | __ ret(); |
2955 | } |
2956 | |
2957 | ASSEMBLER_TEST_RUN(Fmovdi, test) { |
2958 | typedef double (*DoubleReturn)() DART_UNUSED; |
2959 | EXPECT_EQ(1.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
2960 | } |
2961 | |
2962 | ASSEMBLER_TEST_GENERATE(Fmovdi2, assembler) { |
2963 | __ LoadDImmediate(V0, 123412983.1324524315); |
2964 | __ ret(); |
2965 | } |
2966 | |
2967 | ASSEMBLER_TEST_RUN(Fmovdi2, test) { |
2968 | typedef double (*DoubleReturn)() DART_UNUSED; |
2969 | EXPECT_FLOAT_EQ(123412983.1324524315, |
2970 | EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry()), |
2971 | 0.0001f); |
2972 | } |
2973 | |
2974 | ASSEMBLER_TEST_GENERATE(Fmovrd, assembler) { |
2975 | __ LoadDImmediate(V1, 1.0); |
2976 | __ fmovrd(R0, V1); |
2977 | __ ret(); |
2978 | } |
2979 | |
2980 | ASSEMBLER_TEST_RUN(Fmovrd, test) { |
2981 | typedef int64_t (*Int64Return)() DART_UNUSED; |
2982 | const int64_t one = bit_cast<int64_t, double>(1.0); |
2983 | EXPECT_EQ(one, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
2984 | } |
2985 | |
2986 | ASSEMBLER_TEST_GENERATE(Fmovdr, assembler) { |
2987 | __ LoadDImmediate(V1, 1.0); |
2988 | __ fmovrd(R1, V1); |
2989 | __ fmovdr(V0, R1); |
2990 | __ ret(); |
2991 | } |
2992 | |
2993 | ASSEMBLER_TEST_RUN(Fmovdr, test) { |
2994 | typedef double (*DoubleReturn)() DART_UNUSED; |
2995 | EXPECT_EQ(1.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
2996 | } |
2997 | |
2998 | ASSEMBLER_TEST_GENERATE(Fmovrs, assembler) { |
2999 | __ LoadDImmediate(V2, 1.0); |
3000 | __ fcvtsd(V1, V2); |
3001 | __ fmovrs(R0, V1); |
3002 | __ ret(); |
3003 | } |
3004 | |
3005 | ASSEMBLER_TEST_RUN(Fmovrs, test) { |
3006 | typedef int64_t (*Int64Return)() DART_UNUSED; |
3007 | int64_t result = EXECUTE_TEST_CODE_INT64(Int64Return, test->entry()); |
3008 | const uint32_t one = bit_cast<uint32_t, float>(1.0f); |
3009 | EXPECT_EQ(one, static_cast<uint32_t>(result)); |
3010 | } |
3011 | |
3012 | ASSEMBLER_TEST_GENERATE(Fmovsr, assembler) { |
3013 | __ LoadImmediate(R2, bit_cast<uint32_t, float>(1.0f)); |
3014 | __ fmovsr(V1, R2); |
3015 | __ fmovrs(R0, V1); |
3016 | __ ret(); |
3017 | } |
3018 | |
3019 | ASSEMBLER_TEST_RUN(Fmovsr, test) { |
3020 | typedef int64_t (*Int64Return)() DART_UNUSED; |
3021 | int64_t result = EXECUTE_TEST_CODE_INT64(Int64Return, test->entry()); |
3022 | const uint32_t one = bit_cast<uint32_t, float>(1.0f); |
3023 | EXPECT_EQ(one, static_cast<uint32_t>(result)); |
3024 | } |
3025 | |
3026 | ASSEMBLER_TEST_GENERATE(FldrdFstrdPrePostIndex, assembler) { |
3027 | __ SetupDartSP(); |
3028 | |
3029 | __ sub(CSP, CSP, |
3030 | Operand(2 * target::kWordSize)); // Must not access beyond CSP. |
3031 | |
3032 | __ LoadDImmediate(V1, 42.0); |
3033 | __ fstrd(V1, Address(SP, -1 * target::kWordSize, Address::PreIndex)); |
3034 | __ fldrd(V0, Address(SP, 1 * target::kWordSize, Address::PostIndex)); |
3035 | __ RestoreCSP(); |
3036 | __ ret(); |
3037 | } |
3038 | |
3039 | ASSEMBLER_TEST_RUN(FldrdFstrdPrePostIndex, test) { |
3040 | typedef double (*DoubleReturn)() DART_UNUSED; |
3041 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3042 | } |
3043 | |
3044 | ASSEMBLER_TEST_GENERATE(FldrsFstrsPrePostIndex, assembler) { |
3045 | __ SetupDartSP(); |
3046 | |
3047 | __ sub(CSP, CSP, |
3048 | Operand(2 * target::kWordSize)); // Must not access beyond CSP. |
3049 | |
3050 | __ LoadDImmediate(V1, 42.0); |
3051 | __ fcvtsd(V2, V1); |
3052 | __ fstrs(V2, Address(SP, -1 * target::kWordSize, Address::PreIndex)); |
3053 | __ fldrs(V3, Address(SP, 1 * target::kWordSize, Address::PostIndex)); |
3054 | __ fcvtds(V0, V3); |
3055 | __ RestoreCSP(); |
3056 | __ ret(); |
3057 | } |
3058 | |
3059 | ASSEMBLER_TEST_RUN(FldrsFstrsPrePostIndex, test) { |
3060 | typedef double (*DoubleReturn)() DART_UNUSED; |
3061 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3062 | } |
3063 | |
3064 | ASSEMBLER_TEST_GENERATE(FldrqFstrqPrePostIndex, assembler) { |
3065 | __ SetupDartSP(); |
3066 | |
3067 | __ sub(CSP, CSP, |
3068 | Operand(2 * target::kWordSize)); // Must not access beyond CSP. |
3069 | |
3070 | __ LoadDImmediate(V1, 21.0); |
3071 | __ LoadDImmediate(V2, 21.0); |
3072 | __ LoadImmediate(R1, 42); |
3073 | __ Push(R1); |
3074 | __ PushDouble(V1); |
3075 | __ PushDouble(V2); |
3076 | __ fldrq(V3, Address(SP, 2 * target::kWordSize, Address::PostIndex)); |
3077 | __ Pop(R0); |
3078 | __ fstrq(V3, Address(SP, -2 * target::kWordSize, Address::PreIndex)); |
3079 | __ PopDouble(V0); |
3080 | __ PopDouble(V1); |
3081 | __ faddd(V0, V0, V1); |
3082 | __ RestoreCSP(); |
3083 | __ ret(); |
3084 | } |
3085 | |
3086 | ASSEMBLER_TEST_RUN(FldrqFstrqPrePostIndex, test) { |
3087 | typedef double (*DoubleReturn)() DART_UNUSED; |
3088 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3089 | } |
3090 | |
3091 | ASSEMBLER_TEST_GENERATE(Fcvtzds, assembler) { |
3092 | __ LoadDImmediate(V0, 42.0); |
3093 | __ fcvtzds(R0, V0); |
3094 | __ ret(); |
3095 | } |
3096 | |
3097 | ASSEMBLER_TEST_RUN(Fcvtzds, test) { |
3098 | typedef int64_t (*Int64Return)() DART_UNUSED; |
3099 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
3100 | } |
3101 | |
3102 | ASSEMBLER_TEST_GENERATE(Scvtfdx, assembler) { |
3103 | __ LoadImmediate(R0, 42); |
3104 | __ scvtfdx(V0, R0); |
3105 | __ ret(); |
3106 | } |
3107 | |
3108 | ASSEMBLER_TEST_RUN(Scvtfdx, test) { |
3109 | typedef double (*DoubleReturn)() DART_UNUSED; |
3110 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3111 | } |
3112 | |
3113 | ASSEMBLER_TEST_GENERATE(Scvtfdw, assembler) { |
3114 | // Fill upper 32-bits with garbage. |
3115 | __ LoadImmediate(R0, 0x111111110000002A); |
3116 | __ scvtfdw(V0, R0); |
3117 | __ ret(); |
3118 | } |
3119 | |
3120 | ASSEMBLER_TEST_RUN(Scvtfdw, test) { |
3121 | typedef double (*DoubleReturn)() DART_UNUSED; |
3122 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3123 | } |
3124 | |
3125 | ASSEMBLER_TEST_GENERATE(FabsdPos, assembler) { |
3126 | __ LoadDImmediate(V1, 42.0); |
3127 | __ fabsd(V0, V1); |
3128 | __ ret(); |
3129 | } |
3130 | |
3131 | ASSEMBLER_TEST_RUN(FabsdPos, test) { |
3132 | typedef double (*DoubleReturn)() DART_UNUSED; |
3133 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3134 | } |
3135 | |
3136 | ASSEMBLER_TEST_GENERATE(FabsdNeg, assembler) { |
3137 | __ LoadDImmediate(V1, -42.0); |
3138 | __ fabsd(V0, V1); |
3139 | __ ret(); |
3140 | } |
3141 | |
3142 | ASSEMBLER_TEST_RUN(FabsdNeg, test) { |
3143 | typedef double (*DoubleReturn)() DART_UNUSED; |
3144 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3145 | } |
3146 | |
3147 | ASSEMBLER_TEST_GENERATE(FnegdPos, assembler) { |
3148 | __ LoadDImmediate(V1, 42.0); |
3149 | __ fnegd(V0, V1); |
3150 | __ ret(); |
3151 | } |
3152 | |
3153 | ASSEMBLER_TEST_RUN(FnegdPos, test) { |
3154 | typedef double (*DoubleReturn)() DART_UNUSED; |
3155 | EXPECT_EQ(-42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3156 | } |
3157 | |
3158 | ASSEMBLER_TEST_GENERATE(FnegdNeg, assembler) { |
3159 | __ LoadDImmediate(V1, -42.0); |
3160 | __ fnegd(V0, V1); |
3161 | __ ret(); |
3162 | } |
3163 | |
3164 | ASSEMBLER_TEST_RUN(FnegdNeg, test) { |
3165 | typedef double (*DoubleReturn)() DART_UNUSED; |
3166 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3167 | } |
3168 | |
3169 | ASSEMBLER_TEST_GENERATE(Fsqrtd, assembler) { |
3170 | __ LoadDImmediate(V1, 64.0); |
3171 | __ fsqrtd(V0, V1); |
3172 | __ ret(); |
3173 | } |
3174 | |
3175 | ASSEMBLER_TEST_RUN(Fsqrtd, test) { |
3176 | typedef double (*DoubleReturn)() DART_UNUSED; |
3177 | EXPECT_EQ(8.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3178 | } |
3179 | |
3180 | ASSEMBLER_TEST_GENERATE(Fmuld, assembler) { |
3181 | __ LoadDImmediate(V1, 84.0); |
3182 | __ LoadDImmediate(V2, 0.5); |
3183 | __ fmuld(V0, V1, V2); |
3184 | __ ret(); |
3185 | } |
3186 | |
3187 | ASSEMBLER_TEST_RUN(Fmuld, test) { |
3188 | typedef double (*DoubleReturn)() DART_UNUSED; |
3189 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3190 | } |
3191 | |
3192 | ASSEMBLER_TEST_GENERATE(Fdivd, assembler) { |
3193 | __ LoadDImmediate(V1, 84.0); |
3194 | __ LoadDImmediate(V2, 2.0); |
3195 | __ fdivd(V0, V1, V2); |
3196 | __ ret(); |
3197 | } |
3198 | |
3199 | ASSEMBLER_TEST_RUN(Fdivd, test) { |
3200 | typedef double (*DoubleReturn)() DART_UNUSED; |
3201 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3202 | } |
3203 | |
3204 | ASSEMBLER_TEST_GENERATE(Faddd, assembler) { |
3205 | __ LoadDImmediate(V1, 41.5); |
3206 | __ LoadDImmediate(V2, 0.5); |
3207 | __ faddd(V0, V1, V2); |
3208 | __ ret(); |
3209 | } |
3210 | |
3211 | ASSEMBLER_TEST_RUN(Faddd, test) { |
3212 | typedef double (*DoubleReturn)() DART_UNUSED; |
3213 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3214 | } |
3215 | |
3216 | ASSEMBLER_TEST_GENERATE(Fsubd, assembler) { |
3217 | __ LoadDImmediate(V1, 42.5); |
3218 | __ LoadDImmediate(V2, 0.5); |
3219 | __ fsubd(V0, V1, V2); |
3220 | __ ret(); |
3221 | } |
3222 | |
3223 | ASSEMBLER_TEST_RUN(Fsubd, test) { |
3224 | typedef double (*DoubleReturn)() DART_UNUSED; |
3225 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3226 | } |
3227 | |
3228 | ASSEMBLER_TEST_GENERATE(FldrdFstrdHeapTag, assembler) { |
3229 | __ SetupDartSP(); |
3230 | __ LoadDImmediate(V0, 43.0); |
3231 | __ LoadDImmediate(V1, 42.0); |
3232 | __ AddImmediate(SP, SP, -1 * target::kWordSize); |
3233 | __ add(R2, SP, Operand(1)); |
3234 | __ fstrd(V1, Address(R2, -1)); |
3235 | __ fldrd(V0, Address(R2, -1)); |
3236 | __ AddImmediate(SP, 1 * target::kWordSize); |
3237 | __ RestoreCSP(); |
3238 | __ ret(); |
3239 | } |
3240 | |
3241 | ASSEMBLER_TEST_RUN(FldrdFstrdHeapTag, test) { |
3242 | typedef double (*DoubleReturn)() DART_UNUSED; |
3243 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3244 | } |
3245 | |
3246 | ASSEMBLER_TEST_GENERATE(FldrdFstrdLargeIndex, assembler) { |
3247 | __ SetupDartSP(); |
3248 | |
3249 | __ sub(CSP, CSP, |
3250 | Operand(32 * target::kWordSize)); // Must not access beyond CSP. |
3251 | |
3252 | __ LoadDImmediate(V0, 43.0); |
3253 | __ LoadDImmediate(V1, 42.0); |
3254 | // Largest negative offset that can fit in the signed 9-bit immediate field. |
3255 | __ fstrd(V1, Address(SP, -32 * target::kWordSize, Address::PreIndex)); |
3256 | // Largest positive kWordSize aligned offset that we can fit. |
3257 | __ fldrd(V0, Address(SP, 31 * target::kWordSize, Address::PostIndex)); |
3258 | // Correction. |
3259 | __ add(SP, SP, Operand(target::kWordSize)); // Restore SP. |
3260 | __ RestoreCSP(); |
3261 | __ ret(); |
3262 | } |
3263 | |
3264 | ASSEMBLER_TEST_RUN(FldrdFstrdLargeIndex, test) { |
3265 | typedef double (*DoubleReturn)() DART_UNUSED; |
3266 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3267 | } |
3268 | |
3269 | ASSEMBLER_TEST_GENERATE(FldrdFstrdLargeOffset, assembler) { |
3270 | __ SetupDartSP(); |
3271 | __ LoadDImmediate(V0, 43.0); |
3272 | __ LoadDImmediate(V1, 42.0); |
3273 | __ sub(SP, SP, Operand(512 * target::kWordSize)); |
3274 | __ andi(CSP, SP, Immediate(~15)); // Must not access beyond CSP. |
3275 | __ fstrd(V1, Address(SP, 512 * target::kWordSize, Address::Offset)); |
3276 | __ add(SP, SP, Operand(512 * target::kWordSize)); |
3277 | __ fldrd(V0, Address(SP)); |
3278 | __ RestoreCSP(); |
3279 | __ ret(); |
3280 | } |
3281 | |
3282 | ASSEMBLER_TEST_RUN(FldrdFstrdLargeOffset, test) { |
3283 | typedef double (*DoubleReturn)() DART_UNUSED; |
3284 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3285 | } |
3286 | |
3287 | ASSEMBLER_TEST_GENERATE(FldrdFstrdExtReg, assembler) { |
3288 | __ SetupDartSP(); |
3289 | __ LoadDImmediate(V0, 43.0); |
3290 | __ LoadDImmediate(V1, 42.0); |
3291 | __ movz(R2, Immediate(0xfff8), 0); |
3292 | __ movk(R2, Immediate(0xffff), 1); // R2 <- -8 (int32_t). |
3293 | // This should sign extend R2, and add to SP to get address, |
3294 | // i.e. SP - kWordSize. |
3295 | __ fstrd(V1, Address(SP, R2, SXTW)); |
3296 | __ sub(SP, SP, Operand(target::kWordSize)); |
3297 | __ andi(CSP, SP, Immediate(~15)); // Must not access beyond CSP. |
3298 | __ fldrd(V0, Address(SP)); |
3299 | __ add(SP, SP, Operand(target::kWordSize)); |
3300 | __ RestoreCSP(); |
3301 | __ ret(); |
3302 | } |
3303 | |
3304 | ASSEMBLER_TEST_RUN(FldrdFstrdExtReg, test) { |
3305 | typedef double (*DoubleReturn)() DART_UNUSED; |
3306 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3307 | } |
3308 | |
3309 | ASSEMBLER_TEST_GENERATE(FldrdFstrdScaledReg, assembler) { |
3310 | __ SetupDartSP(); |
3311 | __ LoadDImmediate(V0, 43.0); |
3312 | __ LoadDImmediate(V1, 42.0); |
3313 | __ movz(R2, Immediate(10), 0); |
3314 | __ sub(SP, SP, Operand(10 * target::kWordSize)); |
3315 | __ andi(CSP, SP, Immediate(~15)); // Must not access beyond CSP. |
3316 | // Store V1 into SP + R2 * kWordSize. |
3317 | __ fstrd(V1, Address(SP, R2, UXTX, Address::Scaled)); |
3318 | __ fldrd(V0, Address(SP, R2, UXTX, Address::Scaled)); |
3319 | __ add(SP, SP, Operand(10 * target::kWordSize)); |
3320 | __ RestoreCSP(); |
3321 | __ ret(); |
3322 | } |
3323 | |
3324 | ASSEMBLER_TEST_RUN(FldrdFstrdScaledReg, test) { |
3325 | typedef double (*DoubleReturn)() DART_UNUSED; |
3326 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3327 | } |
3328 | |
3329 | ASSEMBLER_TEST_GENERATE(VinswVmovrs, assembler) { |
3330 | __ LoadImmediate(R0, 42); |
3331 | __ LoadImmediate(R1, 43); |
3332 | __ LoadImmediate(R2, 44); |
3333 | __ LoadImmediate(R3, 45); |
3334 | |
3335 | __ vinsw(V0, 0, R0); |
3336 | __ vinsw(V0, 1, R1); |
3337 | __ vinsw(V0, 2, R2); |
3338 | __ vinsw(V0, 3, R3); |
3339 | |
3340 | __ vmovrs(R4, V0, 0); |
3341 | __ vmovrs(R5, V0, 1); |
3342 | __ vmovrs(R6, V0, 2); |
3343 | __ vmovrs(R7, V0, 3); |
3344 | |
3345 | __ add(R0, R4, Operand(R5)); |
3346 | __ add(R0, R0, Operand(R6)); |
3347 | __ add(R0, R0, Operand(R7)); |
3348 | __ ret(); |
3349 | } |
3350 | |
3351 | ASSEMBLER_TEST_RUN(VinswVmovrs, test) { |
3352 | EXPECT(test != NULL); |
3353 | typedef int64_t (*Int64Return)() DART_UNUSED; |
3354 | EXPECT_EQ(174, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
3355 | } |
3356 | |
3357 | ASSEMBLER_TEST_GENERATE(VinsxVmovrd, assembler) { |
3358 | __ LoadImmediate(R0, 42); |
3359 | __ LoadImmediate(R1, 43); |
3360 | |
3361 | __ vinsx(V0, 0, R0); |
3362 | __ vinsx(V0, 1, R1); |
3363 | |
3364 | __ vmovrd(R2, V0, 0); |
3365 | __ vmovrd(R3, V0, 1); |
3366 | |
3367 | __ add(R0, R2, Operand(R3)); |
3368 | __ ret(); |
3369 | } |
3370 | |
3371 | ASSEMBLER_TEST_RUN(VinsxVmovrd, test) { |
3372 | EXPECT(test != NULL); |
3373 | typedef int64_t (*Int64Return)() DART_UNUSED; |
3374 | EXPECT_EQ(85, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
3375 | } |
3376 | |
3377 | ASSEMBLER_TEST_GENERATE(Vnot, assembler) { |
3378 | __ LoadImmediate(R0, 0xfffffffe); |
3379 | __ LoadImmediate(R1, 0xffffffff); |
3380 | __ vinsw(V1, 0, R1); |
3381 | __ vinsw(V1, 1, R0); |
3382 | __ vinsw(V1, 2, R1); |
3383 | __ vinsw(V1, 3, R0); |
3384 | |
3385 | __ vnot(V0, V1); |
3386 | |
3387 | __ vmovrs(R2, V0, 0); |
3388 | __ vmovrs(R3, V0, 1); |
3389 | __ vmovrs(R4, V0, 2); |
3390 | __ vmovrs(R5, V0, 3); |
3391 | __ add(R0, R2, Operand(R3)); |
3392 | __ add(R0, R0, Operand(R4)); |
3393 | __ add(R0, R0, Operand(R5)); |
3394 | __ ret(); |
3395 | } |
3396 | |
3397 | ASSEMBLER_TEST_RUN(Vnot, test) { |
3398 | EXPECT(test != NULL); |
3399 | typedef int64_t (*Int64Return)() DART_UNUSED; |
3400 | EXPECT_EQ(2, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
3401 | } |
3402 | |
3403 | ASSEMBLER_TEST_GENERATE(Vabss, assembler) { |
3404 | __ LoadDImmediate(V1, 21.0); |
3405 | __ LoadDImmediate(V2, -21.0); |
3406 | |
3407 | __ fcvtsd(V1, V1); |
3408 | __ fcvtsd(V2, V2); |
3409 | |
3410 | __ veor(V3, V3, V3); |
3411 | __ vinss(V3, 1, V1, 0); |
3412 | __ vinss(V3, 3, V2, 0); |
3413 | |
3414 | __ vabss(V4, V3); |
3415 | |
3416 | __ vinss(V5, 0, V4, 1); |
3417 | __ vinss(V6, 0, V4, 3); |
3418 | |
3419 | __ fcvtds(V5, V5); |
3420 | __ fcvtds(V6, V6); |
3421 | |
3422 | __ faddd(V0, V5, V6); |
3423 | __ ret(); |
3424 | } |
3425 | |
3426 | ASSEMBLER_TEST_RUN(Vabss, test) { |
3427 | typedef double (*DoubleReturn)() DART_UNUSED; |
3428 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3429 | } |
3430 | |
3431 | ASSEMBLER_TEST_GENERATE(Vabsd, assembler) { |
3432 | __ LoadDImmediate(V1, 21.0); |
3433 | __ LoadDImmediate(V2, -21.0); |
3434 | |
3435 | __ vinsd(V3, 0, V1, 0); |
3436 | __ vinsd(V3, 1, V2, 0); |
3437 | |
3438 | __ vabsd(V4, V3); |
3439 | |
3440 | __ vinsd(V5, 0, V4, 0); |
3441 | __ vinsd(V6, 0, V4, 1); |
3442 | |
3443 | __ faddd(V0, V5, V6); |
3444 | __ ret(); |
3445 | } |
3446 | |
3447 | ASSEMBLER_TEST_RUN(Vabsd, test) { |
3448 | typedef double (*DoubleReturn)() DART_UNUSED; |
3449 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3450 | } |
3451 | |
3452 | ASSEMBLER_TEST_GENERATE(Vnegs, assembler) { |
3453 | __ LoadDImmediate(V1, 42.0); |
3454 | __ LoadDImmediate(V2, -84.0); |
3455 | |
3456 | __ fcvtsd(V1, V1); |
3457 | __ fcvtsd(V2, V2); |
3458 | |
3459 | __ veor(V3, V3, V3); |
3460 | __ vinss(V3, 1, V1, 0); |
3461 | __ vinss(V3, 3, V2, 0); |
3462 | |
3463 | __ vnegs(V4, V3); |
3464 | |
3465 | __ vinss(V5, 0, V4, 1); |
3466 | __ vinss(V6, 0, V4, 3); |
3467 | |
3468 | __ fcvtds(V5, V5); |
3469 | __ fcvtds(V6, V6); |
3470 | __ faddd(V0, V5, V6); |
3471 | __ ret(); |
3472 | } |
3473 | |
3474 | ASSEMBLER_TEST_RUN(Vnegs, test) { |
3475 | typedef double (*DoubleReturn)() DART_UNUSED; |
3476 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3477 | } |
3478 | |
3479 | ASSEMBLER_TEST_GENERATE(Vnegd, assembler) { |
3480 | __ LoadDImmediate(V1, 42.0); |
3481 | __ LoadDImmediate(V2, -84.0); |
3482 | |
3483 | __ vinsd(V3, 0, V1, 0); |
3484 | __ vinsd(V3, 1, V2, 0); |
3485 | |
3486 | __ vnegd(V4, V3); |
3487 | |
3488 | __ vinsd(V5, 0, V4, 0); |
3489 | __ vinsd(V6, 0, V4, 1); |
3490 | |
3491 | __ faddd(V0, V5, V6); |
3492 | __ ret(); |
3493 | } |
3494 | |
3495 | ASSEMBLER_TEST_RUN(Vnegd, test) { |
3496 | typedef double (*DoubleReturn)() DART_UNUSED; |
3497 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3498 | } |
3499 | |
3500 | ASSEMBLER_TEST_GENERATE(Vadds, assembler) { |
3501 | __ LoadDImmediate(V0, 0.0); |
3502 | __ LoadDImmediate(V1, 1.0); |
3503 | __ LoadDImmediate(V2, 2.0); |
3504 | __ LoadDImmediate(V3, 3.0); |
3505 | |
3506 | __ fcvtsd(V0, V0); |
3507 | __ fcvtsd(V1, V1); |
3508 | __ fcvtsd(V2, V2); |
3509 | __ fcvtsd(V3, V3); |
3510 | |
3511 | __ vinss(V4, 0, V0, 0); |
3512 | __ vinss(V4, 1, V1, 0); |
3513 | __ vinss(V4, 2, V2, 0); |
3514 | __ vinss(V4, 3, V3, 0); |
3515 | |
3516 | __ vadds(V5, V4, V4); |
3517 | |
3518 | __ vinss(V0, 0, V5, 0); |
3519 | __ vinss(V1, 0, V5, 1); |
3520 | __ vinss(V2, 0, V5, 2); |
3521 | __ vinss(V3, 0, V5, 3); |
3522 | |
3523 | __ fcvtds(V0, V0); |
3524 | __ fcvtds(V1, V1); |
3525 | __ fcvtds(V2, V2); |
3526 | __ fcvtds(V3, V3); |
3527 | |
3528 | __ faddd(V0, V0, V1); |
3529 | __ faddd(V0, V0, V2); |
3530 | __ faddd(V0, V0, V3); |
3531 | __ ret(); |
3532 | } |
3533 | |
3534 | ASSEMBLER_TEST_RUN(Vadds, test) { |
3535 | typedef double (*DoubleReturn)() DART_UNUSED; |
3536 | EXPECT_EQ(12.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3537 | } |
3538 | |
3539 | ASSEMBLER_TEST_GENERATE(Vsubs, assembler) { |
3540 | __ LoadDImmediate(V0, 0.0); |
3541 | __ LoadDImmediate(V1, 1.0); |
3542 | __ LoadDImmediate(V2, 2.0); |
3543 | __ LoadDImmediate(V3, 3.0); |
3544 | __ LoadDImmediate(V5, 0.0); |
3545 | |
3546 | __ fcvtsd(V0, V0); |
3547 | __ fcvtsd(V1, V1); |
3548 | __ fcvtsd(V2, V2); |
3549 | __ fcvtsd(V3, V3); |
3550 | |
3551 | __ vinss(V4, 0, V0, 0); |
3552 | __ vinss(V4, 1, V1, 0); |
3553 | __ vinss(V4, 2, V2, 0); |
3554 | __ vinss(V4, 3, V3, 0); |
3555 | |
3556 | __ vsubs(V5, V5, V4); |
3557 | |
3558 | __ vinss(V0, 0, V5, 0); |
3559 | __ vinss(V1, 0, V5, 1); |
3560 | __ vinss(V2, 0, V5, 2); |
3561 | __ vinss(V3, 0, V5, 3); |
3562 | |
3563 | __ fcvtds(V0, V0); |
3564 | __ fcvtds(V1, V1); |
3565 | __ fcvtds(V2, V2); |
3566 | __ fcvtds(V3, V3); |
3567 | |
3568 | __ faddd(V0, V0, V1); |
3569 | __ faddd(V0, V0, V2); |
3570 | __ faddd(V0, V0, V3); |
3571 | __ ret(); |
3572 | } |
3573 | |
3574 | ASSEMBLER_TEST_RUN(Vsubs, test) { |
3575 | typedef double (*DoubleReturn)() DART_UNUSED; |
3576 | EXPECT_EQ(-6.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3577 | } |
3578 | |
3579 | ASSEMBLER_TEST_GENERATE(Vmuls, assembler) { |
3580 | __ LoadDImmediate(V0, 0.0); |
3581 | __ LoadDImmediate(V1, 1.0); |
3582 | __ LoadDImmediate(V2, 2.0); |
3583 | __ LoadDImmediate(V3, 3.0); |
3584 | |
3585 | __ fcvtsd(V0, V0); |
3586 | __ fcvtsd(V1, V1); |
3587 | __ fcvtsd(V2, V2); |
3588 | __ fcvtsd(V3, V3); |
3589 | |
3590 | __ vinss(V4, 0, V0, 0); |
3591 | __ vinss(V4, 1, V1, 0); |
3592 | __ vinss(V4, 2, V2, 0); |
3593 | __ vinss(V4, 3, V3, 0); |
3594 | |
3595 | __ vmuls(V5, V4, V4); |
3596 | |
3597 | __ vinss(V0, 0, V5, 0); |
3598 | __ vinss(V1, 0, V5, 1); |
3599 | __ vinss(V2, 0, V5, 2); |
3600 | __ vinss(V3, 0, V5, 3); |
3601 | |
3602 | __ fcvtds(V0, V0); |
3603 | __ fcvtds(V1, V1); |
3604 | __ fcvtds(V2, V2); |
3605 | __ fcvtds(V3, V3); |
3606 | |
3607 | __ faddd(V0, V0, V1); |
3608 | __ faddd(V0, V0, V2); |
3609 | __ faddd(V0, V0, V3); |
3610 | __ ret(); |
3611 | } |
3612 | |
3613 | ASSEMBLER_TEST_RUN(Vmuls, test) { |
3614 | typedef double (*DoubleReturn)() DART_UNUSED; |
3615 | EXPECT_EQ(14.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3616 | } |
3617 | |
3618 | ASSEMBLER_TEST_GENERATE(Vdivs, assembler) { |
3619 | __ LoadDImmediate(V0, 0.0); |
3620 | __ LoadDImmediate(V1, 1.0); |
3621 | __ LoadDImmediate(V2, 2.0); |
3622 | __ LoadDImmediate(V3, 3.0); |
3623 | |
3624 | __ fcvtsd(V0, V0); |
3625 | __ fcvtsd(V1, V1); |
3626 | __ fcvtsd(V2, V2); |
3627 | __ fcvtsd(V3, V3); |
3628 | |
3629 | __ vinss(V4, 0, V0, 0); |
3630 | __ vinss(V4, 1, V1, 0); |
3631 | __ vinss(V4, 2, V2, 0); |
3632 | __ vinss(V4, 3, V3, 0); |
3633 | |
3634 | __ vdivs(V5, V4, V4); |
3635 | |
3636 | __ vinss(V0, 0, V5, 0); |
3637 | __ vinss(V1, 0, V5, 1); |
3638 | __ vinss(V2, 0, V5, 2); |
3639 | __ vinss(V3, 0, V5, 3); |
3640 | |
3641 | __ fcvtds(V0, V0); |
3642 | __ fcvtds(V1, V1); |
3643 | __ fcvtds(V2, V2); |
3644 | __ fcvtds(V3, V3); |
3645 | |
3646 | __ faddd(V0, V1, V1); |
3647 | __ faddd(V0, V0, V2); |
3648 | __ faddd(V0, V0, V3); |
3649 | __ ret(); |
3650 | } |
3651 | |
3652 | ASSEMBLER_TEST_RUN(Vdivs, test) { |
3653 | typedef double (*DoubleReturn)() DART_UNUSED; |
3654 | EXPECT_EQ(4.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3655 | } |
3656 | |
3657 | ASSEMBLER_TEST_GENERATE(Vaddd, assembler) { |
3658 | __ LoadDImmediate(V0, 2.0); |
3659 | __ LoadDImmediate(V1, 3.0); |
3660 | |
3661 | __ vinsd(V4, 0, V0, 0); |
3662 | __ vinsd(V4, 1, V1, 0); |
3663 | |
3664 | __ vaddd(V5, V4, V4); |
3665 | |
3666 | __ vinsd(V0, 0, V5, 0); |
3667 | __ vinsd(V1, 0, V5, 1); |
3668 | |
3669 | __ faddd(V0, V0, V1); |
3670 | __ ret(); |
3671 | } |
3672 | |
3673 | ASSEMBLER_TEST_RUN(Vaddd, test) { |
3674 | typedef double (*DoubleReturn)() DART_UNUSED; |
3675 | EXPECT_EQ(10.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3676 | } |
3677 | |
3678 | ASSEMBLER_TEST_GENERATE(Vsubd, assembler) { |
3679 | __ LoadDImmediate(V0, 2.0); |
3680 | __ LoadDImmediate(V1, 3.0); |
3681 | __ LoadDImmediate(V5, 0.0); |
3682 | |
3683 | __ vinsd(V4, 0, V0, 0); |
3684 | __ vinsd(V4, 1, V1, 0); |
3685 | |
3686 | __ vsubd(V5, V5, V4); |
3687 | |
3688 | __ vinsd(V0, 0, V5, 0); |
3689 | __ vinsd(V1, 0, V5, 1); |
3690 | |
3691 | __ faddd(V0, V0, V1); |
3692 | __ ret(); |
3693 | } |
3694 | |
3695 | ASSEMBLER_TEST_RUN(Vsubd, test) { |
3696 | typedef double (*DoubleReturn)() DART_UNUSED; |
3697 | EXPECT_EQ(-5.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3698 | } |
3699 | |
3700 | ASSEMBLER_TEST_GENERATE(Vmuld, assembler) { |
3701 | __ LoadDImmediate(V0, 2.0); |
3702 | __ LoadDImmediate(V1, 3.0); |
3703 | |
3704 | __ vinsd(V4, 0, V0, 0); |
3705 | __ vinsd(V4, 1, V1, 0); |
3706 | |
3707 | __ vmuld(V5, V4, V4); |
3708 | |
3709 | __ vinsd(V0, 0, V5, 0); |
3710 | __ vinsd(V1, 0, V5, 1); |
3711 | |
3712 | __ faddd(V0, V0, V1); |
3713 | __ ret(); |
3714 | } |
3715 | |
3716 | ASSEMBLER_TEST_RUN(Vmuld, test) { |
3717 | typedef double (*DoubleReturn)() DART_UNUSED; |
3718 | EXPECT_EQ(13.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3719 | } |
3720 | |
3721 | ASSEMBLER_TEST_GENERATE(Vdivd, assembler) { |
3722 | __ LoadDImmediate(V0, 2.0); |
3723 | __ LoadDImmediate(V1, 3.0); |
3724 | |
3725 | __ vinsd(V4, 0, V0, 0); |
3726 | __ vinsd(V4, 1, V1, 0); |
3727 | |
3728 | __ vdivd(V5, V4, V4); |
3729 | |
3730 | __ vinsd(V0, 0, V5, 0); |
3731 | __ vinsd(V1, 0, V5, 1); |
3732 | |
3733 | __ faddd(V0, V0, V1); |
3734 | __ ret(); |
3735 | } |
3736 | |
3737 | ASSEMBLER_TEST_RUN(Vdivd, test) { |
3738 | typedef double (*DoubleReturn)() DART_UNUSED; |
3739 | EXPECT_EQ(2.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3740 | } |
3741 | |
3742 | ASSEMBLER_TEST_GENERATE(Vdupd, assembler) { |
3743 | __ SetupDartSP(); |
3744 | __ LoadDImmediate(V0, 21.0); |
3745 | __ vdupd(V1, V0, 0); |
3746 | |
3747 | const int dword_bytes = 1 << Log2OperandSizeBytes(kDWord); |
3748 | const int qword_bytes = 1 << Log2OperandSizeBytes(kQWord); |
3749 | |
3750 | __ sub(CSP, CSP, Operand(qword_bytes)); // Must not access beyond CSP. |
3751 | |
3752 | __ fstrq(V1, Address(SP, -1 * qword_bytes, Address::PreIndex)); |
3753 | |
3754 | __ fldrd(V2, Address(SP, 1 * dword_bytes, Address::PostIndex)); |
3755 | __ fldrd(V3, Address(SP, 1 * dword_bytes, Address::PostIndex)); |
3756 | |
3757 | __ faddd(V0, V2, V3); |
3758 | __ RestoreCSP(); |
3759 | __ ret(); |
3760 | } |
3761 | |
3762 | ASSEMBLER_TEST_RUN(Vdupd, test) { |
3763 | typedef double (*DoubleReturn)() DART_UNUSED; |
3764 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3765 | } |
3766 | |
3767 | ASSEMBLER_TEST_GENERATE(Vdups, assembler) { |
3768 | __ SetupDartSP(); |
3769 | __ LoadDImmediate(V0, 21.0); |
3770 | __ fcvtsd(V0, V0); |
3771 | __ vdups(V1, V0, 0); |
3772 | |
3773 | const int sword_bytes = 1 << Log2OperandSizeBytes(kSWord); |
3774 | const int qword_bytes = 1 << Log2OperandSizeBytes(kQWord); |
3775 | |
3776 | __ sub(CSP, CSP, Operand(qword_bytes)); // Must not access beyond CSP. |
3777 | |
3778 | __ fstrq(V1, Address(SP, -1 * qword_bytes, Address::PreIndex)); |
3779 | |
3780 | __ fldrs(V3, Address(SP, 1 * sword_bytes, Address::PostIndex)); |
3781 | __ fldrs(V2, Address(SP, 1 * sword_bytes, Address::PostIndex)); |
3782 | __ fldrs(V1, Address(SP, 1 * sword_bytes, Address::PostIndex)); |
3783 | __ fldrs(V0, Address(SP, 1 * sword_bytes, Address::PostIndex)); |
3784 | |
3785 | __ fcvtds(V0, V0); |
3786 | __ fcvtds(V1, V1); |
3787 | __ fcvtds(V2, V2); |
3788 | __ fcvtds(V3, V3); |
3789 | |
3790 | __ faddd(V0, V1, V1); |
3791 | __ faddd(V0, V0, V2); |
3792 | __ faddd(V0, V0, V3); |
3793 | __ RestoreCSP(); |
3794 | __ ret(); |
3795 | } |
3796 | |
3797 | ASSEMBLER_TEST_RUN(Vdups, test) { |
3798 | typedef double (*DoubleReturn)() DART_UNUSED; |
3799 | EXPECT_EQ(84.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3800 | } |
3801 | |
3802 | ASSEMBLER_TEST_GENERATE(Vinsd, assembler) { |
3803 | __ SetupDartSP(); |
3804 | __ LoadDImmediate(V5, 42.0); |
3805 | __ vinsd(V1, 1, V5, 0); // V1[1] <- V0[0]. |
3806 | |
3807 | const int dword_bytes = 1 << Log2OperandSizeBytes(kDWord); |
3808 | const int qword_bytes = 1 << Log2OperandSizeBytes(kQWord); |
3809 | |
3810 | __ sub(CSP, CSP, Operand(qword_bytes)); // Must not access beyond CSP. |
3811 | |
3812 | __ fstrq(V1, Address(SP, -1 * qword_bytes, Address::PreIndex)); |
3813 | |
3814 | __ fldrd(V2, Address(SP, 1 * dword_bytes, Address::PostIndex)); |
3815 | __ fldrd(V3, Address(SP, 1 * dword_bytes, Address::PostIndex)); |
3816 | |
3817 | __ fmovdd(V0, V3); |
3818 | __ RestoreCSP(); |
3819 | __ ret(); |
3820 | } |
3821 | |
3822 | ASSEMBLER_TEST_RUN(Vinsd, test) { |
3823 | typedef double (*DoubleReturn)() DART_UNUSED; |
3824 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3825 | } |
3826 | |
3827 | ASSEMBLER_TEST_GENERATE(Vinss, assembler) { |
3828 | __ SetupDartSP(); |
3829 | // Set V1 parts 1 and 3 to 21.0. |
3830 | __ LoadDImmediate(V0, 21.0); |
3831 | __ fcvtsd(V0, V0); |
3832 | __ vinss(V1, 3, V0, 0); |
3833 | __ vinss(V1, 1, V0, 0); |
3834 | |
3835 | // Set V1 parts 0 and 2 to 0.0. |
3836 | __ LoadDImmediate(V0, 0.0); |
3837 | __ fcvtsd(V0, V0); |
3838 | __ vinss(V1, 2, V0, 0); |
3839 | __ vinss(V1, 0, V0, 0); |
3840 | |
3841 | const int sword_bytes = 1 << Log2OperandSizeBytes(kSWord); |
3842 | const int qword_bytes = 1 << Log2OperandSizeBytes(kQWord); |
3843 | |
3844 | __ sub(CSP, CSP, Operand(qword_bytes)); // Must not access beyond CSP. |
3845 | |
3846 | __ fstrq(V1, Address(SP, -1 * qword_bytes, Address::PreIndex)); |
3847 | |
3848 | __ fldrs(V3, Address(SP, 1 * sword_bytes, Address::PostIndex)); |
3849 | __ fldrs(V2, Address(SP, 1 * sword_bytes, Address::PostIndex)); |
3850 | __ fldrs(V1, Address(SP, 1 * sword_bytes, Address::PostIndex)); |
3851 | __ fldrs(V0, Address(SP, 1 * sword_bytes, Address::PostIndex)); |
3852 | |
3853 | __ fcvtds(V0, V0); |
3854 | __ fcvtds(V1, V1); |
3855 | __ fcvtds(V2, V2); |
3856 | __ fcvtds(V3, V3); |
3857 | |
3858 | __ faddd(V0, V0, V1); |
3859 | __ faddd(V0, V0, V2); |
3860 | __ faddd(V0, V0, V3); |
3861 | __ RestoreCSP(); |
3862 | __ ret(); |
3863 | } |
3864 | |
3865 | ASSEMBLER_TEST_RUN(Vinss, test) { |
3866 | typedef double (*DoubleReturn)() DART_UNUSED; |
3867 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3868 | } |
3869 | |
3870 | ASSEMBLER_TEST_GENERATE(Vand, assembler) { |
3871 | __ LoadDImmediate(V1, 21.0); |
3872 | __ LoadImmediate(R0, 0xffffffff); |
3873 | |
3874 | // V0 <- (0, 0xffffffff, 0, 0xffffffff) |
3875 | __ fmovdr(V0, R0); |
3876 | __ vinss(V0, 2, V0, 0); |
3877 | |
3878 | // V1 <- (21.0, 21.0, 21.0, 21.0) |
3879 | __ fcvtsd(V1, V1); |
3880 | __ vdups(V1, V1, 0); |
3881 | |
3882 | __ vand(V2, V1, V0); |
3883 | |
3884 | __ vinss(V3, 0, V2, 0); |
3885 | __ vinss(V4, 0, V2, 1); |
3886 | __ vinss(V5, 0, V2, 2); |
3887 | __ vinss(V6, 0, V2, 3); |
3888 | |
3889 | __ fcvtds(V3, V3); |
3890 | __ fcvtds(V4, V4); |
3891 | __ fcvtds(V5, V5); |
3892 | __ fcvtds(V6, V6); |
3893 | |
3894 | __ vaddd(V0, V3, V4); |
3895 | __ vaddd(V0, V0, V5); |
3896 | __ vaddd(V0, V0, V6); |
3897 | __ ret(); |
3898 | } |
3899 | |
3900 | ASSEMBLER_TEST_RUN(Vand, test) { |
3901 | typedef double (*DoubleReturn)() DART_UNUSED; |
3902 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3903 | } |
3904 | |
3905 | ASSEMBLER_TEST_GENERATE(Vorr, assembler) { |
3906 | __ LoadDImmediate(V1, 10.5); |
3907 | __ fcvtsd(V1, V1); |
3908 | |
3909 | // V0 <- (0, 10.5, 0, 10.5) |
3910 | __ fmovdd(V0, V1); |
3911 | __ vinss(V0, 2, V0, 0); |
3912 | |
3913 | // V1 <- (10.5, 0, 10.5, 0) |
3914 | __ veor(V1, V1, V1); |
3915 | __ vinss(V1, 1, V0, 0); |
3916 | __ vinss(V1, 3, V0, 0); |
3917 | |
3918 | __ vorr(V2, V1, V0); |
3919 | |
3920 | __ vinss(V3, 0, V2, 0); |
3921 | __ vinss(V4, 0, V2, 1); |
3922 | __ vinss(V5, 0, V2, 2); |
3923 | __ vinss(V6, 0, V2, 3); |
3924 | |
3925 | __ fcvtds(V3, V3); |
3926 | __ fcvtds(V4, V4); |
3927 | __ fcvtds(V5, V5); |
3928 | __ fcvtds(V6, V6); |
3929 | |
3930 | __ vaddd(V0, V3, V4); |
3931 | __ vaddd(V0, V0, V5); |
3932 | __ vaddd(V0, V0, V6); |
3933 | __ ret(); |
3934 | } |
3935 | |
3936 | ASSEMBLER_TEST_RUN(Vorr, test) { |
3937 | typedef double (*DoubleReturn)() DART_UNUSED; |
3938 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
3939 | } |
3940 | |
3941 | ASSEMBLER_TEST_GENERATE(Veor, assembler) { |
3942 | __ LoadImmediate(R1, 0xffffffff); |
3943 | __ LoadImmediate(R2, ~21); |
3944 | |
3945 | __ vinsw(V1, 0, R1); |
3946 | __ vinsw(V1, 1, R2); |
3947 | __ vinsw(V1, 2, R1); |
3948 | __ vinsw(V1, 3, R2); |
3949 | |
3950 | __ vinsw(V2, 0, R1); |
3951 | __ vinsw(V2, 1, R1); |
3952 | __ vinsw(V2, 2, R1); |
3953 | __ vinsw(V2, 3, R1); |
3954 | |
3955 | __ veor(V0, V1, V2); |
3956 | |
3957 | __ vmovrs(R3, V0, 0); |
3958 | __ vmovrs(R4, V0, 1); |
3959 | __ vmovrs(R5, V0, 2); |
3960 | __ vmovrs(R6, V0, 3); |
3961 | |
3962 | __ add(R0, R3, Operand(R4)); |
3963 | __ add(R0, R0, Operand(R5)); |
3964 | __ add(R0, R0, Operand(R6)); |
3965 | __ ret(); |
3966 | } |
3967 | |
3968 | ASSEMBLER_TEST_RUN(Veor, test) { |
3969 | typedef int64_t (*Int64Return)() DART_UNUSED; |
3970 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
3971 | } |
3972 | |
3973 | ASSEMBLER_TEST_GENERATE(Vaddw, assembler) { |
3974 | __ LoadImmediate(R4, 21); |
3975 | |
3976 | __ vdupw(V1, R4); |
3977 | __ vdupw(V2, R4); |
3978 | |
3979 | __ vaddw(V0, V1, V2); |
3980 | |
3981 | __ vmovrs(R0, V0, 0); |
3982 | __ vmovrs(R1, V0, 1); |
3983 | __ vmovrs(R2, V0, 2); |
3984 | __ vmovrs(R3, V0, 3); |
3985 | __ add(R0, R0, Operand(R1)); |
3986 | __ add(R0, R0, Operand(R2)); |
3987 | __ add(R0, R0, Operand(R3)); |
3988 | __ ret(); |
3989 | } |
3990 | |
3991 | ASSEMBLER_TEST_RUN(Vaddw, test) { |
3992 | typedef int64_t (*Int64Return)() DART_UNUSED; |
3993 | EXPECT_EQ(168, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
3994 | } |
3995 | |
3996 | ASSEMBLER_TEST_GENERATE(Vsubw, assembler) { |
3997 | __ LoadImmediate(R4, 31); |
3998 | __ LoadImmediate(R5, 10); |
3999 | |
4000 | __ vdupw(V1, R4); |
4001 | __ vdupw(V2, R5); |
4002 | |
4003 | __ vsubw(V0, V1, V2); |
4004 | |
4005 | __ vmovrs(R0, V0, 0); |
4006 | __ vmovrs(R1, V0, 1); |
4007 | __ vmovrs(R2, V0, 2); |
4008 | __ vmovrs(R3, V0, 3); |
4009 | __ add(R0, R0, Operand(R1)); |
4010 | __ add(R0, R0, Operand(R2)); |
4011 | __ add(R0, R0, Operand(R3)); |
4012 | __ ret(); |
4013 | } |
4014 | |
4015 | ASSEMBLER_TEST_RUN(Vsubw, test) { |
4016 | typedef int64_t (*Int64Return)() DART_UNUSED; |
4017 | EXPECT_EQ(84, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
4018 | } |
4019 | |
4020 | ASSEMBLER_TEST_GENERATE(Vaddx, assembler) { |
4021 | __ LoadImmediate(R4, 21); |
4022 | |
4023 | __ vdupx(V1, R4); |
4024 | __ vdupx(V2, R4); |
4025 | |
4026 | __ vaddx(V0, V1, V2); |
4027 | |
4028 | __ vmovrd(R0, V0, 0); |
4029 | __ vmovrd(R1, V0, 1); |
4030 | __ add(R0, R0, Operand(R1)); |
4031 | __ ret(); |
4032 | } |
4033 | |
4034 | ASSEMBLER_TEST_RUN(Vaddx, test) { |
4035 | typedef int64_t (*Int64Return)() DART_UNUSED; |
4036 | EXPECT_EQ(84, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
4037 | } |
4038 | |
4039 | ASSEMBLER_TEST_GENERATE(Vsubx, assembler) { |
4040 | __ LoadImmediate(R4, 31); |
4041 | __ LoadImmediate(R5, 10); |
4042 | |
4043 | __ vdupx(V1, R4); |
4044 | __ vdupx(V2, R5); |
4045 | |
4046 | __ vsubx(V0, V1, V2); |
4047 | |
4048 | __ vmovrd(R0, V0, 0); |
4049 | __ vmovrd(R1, V0, 1); |
4050 | __ add(R0, R0, Operand(R1)); |
4051 | __ ret(); |
4052 | } |
4053 | |
4054 | ASSEMBLER_TEST_RUN(Vsubx, test) { |
4055 | typedef int64_t (*Int64Return)() DART_UNUSED; |
4056 | EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
4057 | } |
4058 | |
4059 | ASSEMBLER_TEST_GENERATE(Vceqs, assembler) { |
4060 | __ LoadDImmediate(V0, 42.0); |
4061 | __ LoadDImmediate(V1, -42.0); |
4062 | |
4063 | __ fcvtsd(V0, V0); |
4064 | __ fcvtsd(V1, V1); |
4065 | |
4066 | __ vdups(V2, V0, 0); |
4067 | __ vinss(V3, 0, V0, 0); |
4068 | __ vinss(V3, 1, V1, 0); |
4069 | __ vinss(V3, 2, V0, 0); |
4070 | __ vinss(V3, 3, V1, 0); |
4071 | |
4072 | __ vceqs(V4, V2, V3); |
4073 | |
4074 | __ vmovrs(R1, V4, 0); |
4075 | __ vmovrs(R2, V4, 1); |
4076 | __ vmovrs(R3, V4, 2); |
4077 | __ vmovrs(R4, V4, 3); |
4078 | |
4079 | __ addw(R0, R1, Operand(R2)); |
4080 | __ addw(R0, R0, Operand(R3)); |
4081 | __ addw(R0, R0, Operand(R4)); |
4082 | __ ret(); |
4083 | } |
4084 | |
4085 | ASSEMBLER_TEST_RUN(Vceqs, test) { |
4086 | typedef int64_t (*Int64Return)() DART_UNUSED; |
4087 | EXPECT_EQ(0xfffffffe, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
4088 | } |
4089 | |
4090 | ASSEMBLER_TEST_GENERATE(Vceqd, assembler) { |
4091 | __ LoadDImmediate(V0, 42.0); |
4092 | __ LoadDImmediate(V1, -42.0); |
4093 | |
4094 | __ vdupd(V2, V0, 0); |
4095 | __ vinsd(V3, 0, V0, 0); |
4096 | __ vinsd(V3, 1, V1, 0); |
4097 | |
4098 | __ vceqd(V4, V2, V3); |
4099 | |
4100 | __ vmovrd(R1, V4, 0); |
4101 | __ vmovrd(R2, V4, 1); |
4102 | |
4103 | __ add(R0, R1, Operand(R2)); |
4104 | __ ret(); |
4105 | } |
4106 | |
4107 | ASSEMBLER_TEST_RUN(Vceqd, test) { |
4108 | typedef int64_t (*Int64Return)() DART_UNUSED; |
4109 | EXPECT_EQ(-1, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
4110 | } |
4111 | |
4112 | ASSEMBLER_TEST_GENERATE(Vcgts, assembler) { |
4113 | __ LoadDImmediate(V0, 42.0); |
4114 | __ LoadDImmediate(V1, -42.0); |
4115 | |
4116 | __ fcvtsd(V0, V0); |
4117 | __ fcvtsd(V1, V1); |
4118 | |
4119 | __ vdups(V2, V0, 0); |
4120 | __ vinss(V3, 0, V0, 0); |
4121 | __ vinss(V3, 1, V1, 0); |
4122 | __ vinss(V3, 2, V0, 0); |
4123 | __ vinss(V3, 3, V1, 0); |
4124 | |
4125 | __ vcgts(V4, V2, V3); |
4126 | |
4127 | __ vmovrs(R1, V4, 0); |
4128 | __ vmovrs(R2, V4, 1); |
4129 | __ vmovrs(R3, V4, 2); |
4130 | __ vmovrs(R4, V4, 3); |
4131 | |
4132 | __ addw(R0, R1, Operand(R2)); |
4133 | __ addw(R0, R0, Operand(R3)); |
4134 | __ addw(R0, R0, Operand(R4)); |
4135 | __ ret(); |
4136 | } |
4137 | |
4138 | ASSEMBLER_TEST_RUN(Vcgts, test) { |
4139 | typedef int64_t (*Int64Return)() DART_UNUSED; |
4140 | EXPECT_EQ(0xfffffffe, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
4141 | } |
4142 | |
4143 | ASSEMBLER_TEST_GENERATE(Vcgtd, assembler) { |
4144 | __ LoadDImmediate(V0, 42.0); |
4145 | __ LoadDImmediate(V1, -42.0); |
4146 | |
4147 | __ vdupd(V2, V0, 0); |
4148 | __ vinsd(V3, 0, V0, 0); |
4149 | __ vinsd(V3, 1, V1, 0); |
4150 | |
4151 | __ vcgtd(V4, V2, V3); |
4152 | |
4153 | __ vmovrd(R1, V4, 0); |
4154 | __ vmovrd(R2, V4, 1); |
4155 | |
4156 | __ add(R0, R1, Operand(R2)); |
4157 | __ ret(); |
4158 | } |
4159 | |
4160 | ASSEMBLER_TEST_RUN(Vcgtd, test) { |
4161 | typedef int64_t (*Int64Return)() DART_UNUSED; |
4162 | EXPECT_EQ(-1, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
4163 | } |
4164 | |
4165 | ASSEMBLER_TEST_GENERATE(Vcges, assembler) { |
4166 | __ LoadDImmediate(V0, 42.0); |
4167 | __ LoadDImmediate(V1, 43.0); |
4168 | |
4169 | __ fcvtsd(V0, V0); |
4170 | __ fcvtsd(V1, V1); |
4171 | |
4172 | __ vdups(V2, V0, 0); |
4173 | __ vinss(V3, 0, V0, 0); |
4174 | __ vinss(V3, 1, V1, 0); |
4175 | __ vinss(V3, 2, V0, 0); |
4176 | __ vinss(V3, 3, V1, 0); |
4177 | |
4178 | __ vcges(V4, V2, V3); |
4179 | |
4180 | __ vmovrs(R1, V4, 0); |
4181 | __ vmovrs(R2, V4, 1); |
4182 | __ vmovrs(R3, V4, 2); |
4183 | __ vmovrs(R4, V4, 3); |
4184 | |
4185 | __ addw(R0, R1, Operand(R2)); |
4186 | __ addw(R0, R0, Operand(R3)); |
4187 | __ addw(R0, R0, Operand(R4)); |
4188 | __ ret(); |
4189 | } |
4190 | |
4191 | ASSEMBLER_TEST_RUN(Vcges, test) { |
4192 | typedef int64_t (*Int64Return)() DART_UNUSED; |
4193 | EXPECT_EQ(0xfffffffe, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
4194 | } |
4195 | |
4196 | ASSEMBLER_TEST_GENERATE(Vcged, assembler) { |
4197 | __ LoadDImmediate(V0, 42.0); |
4198 | __ LoadDImmediate(V1, 43.0); |
4199 | |
4200 | __ vdupd(V2, V0, 0); |
4201 | __ vinsd(V3, 0, V0, 0); |
4202 | __ vinsd(V3, 1, V1, 0); |
4203 | |
4204 | __ vcged(V4, V2, V3); |
4205 | |
4206 | __ vmovrd(R1, V4, 0); |
4207 | __ vmovrd(R2, V4, 1); |
4208 | |
4209 | __ add(R0, R1, Operand(R2)); |
4210 | __ ret(); |
4211 | } |
4212 | |
4213 | ASSEMBLER_TEST_RUN(Vcged, test) { |
4214 | typedef int64_t (*Int64Return)() DART_UNUSED; |
4215 | EXPECT_EQ(-1, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
4216 | } |
4217 | |
4218 | ASSEMBLER_TEST_GENERATE(Vmaxs, assembler) { |
4219 | __ LoadDImmediate(V0, 10.5); |
4220 | __ LoadDImmediate(V1, 10.0); |
4221 | |
4222 | __ fcvtsd(V0, V0); |
4223 | __ fcvtsd(V1, V1); |
4224 | |
4225 | __ vdups(V2, V0, 0); |
4226 | __ vinss(V3, 0, V0, 0); |
4227 | __ vinss(V3, 1, V1, 0); |
4228 | __ vinss(V3, 2, V0, 0); |
4229 | __ vinss(V3, 3, V1, 0); |
4230 | |
4231 | __ vmaxs(V4, V2, V3); |
4232 | |
4233 | __ vinss(V0, 0, V4, 0); |
4234 | __ vinss(V1, 0, V4, 1); |
4235 | __ vinss(V2, 0, V4, 2); |
4236 | __ vinss(V3, 0, V4, 3); |
4237 | |
4238 | __ fcvtds(V0, V0); |
4239 | __ fcvtds(V1, V1); |
4240 | __ fcvtds(V2, V2); |
4241 | __ fcvtds(V3, V3); |
4242 | |
4243 | __ faddd(V0, V0, V1); |
4244 | __ faddd(V0, V0, V2); |
4245 | __ faddd(V0, V0, V3); |
4246 | __ ret(); |
4247 | } |
4248 | |
4249 | ASSEMBLER_TEST_RUN(Vmaxs, test) { |
4250 | typedef double (*DoubleReturn)() DART_UNUSED; |
4251 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
4252 | } |
4253 | |
4254 | ASSEMBLER_TEST_GENERATE(Vmaxd, assembler) { |
4255 | __ LoadDImmediate(V0, 21.0); |
4256 | __ LoadDImmediate(V1, 20.5); |
4257 | |
4258 | __ vdupd(V2, V0, 0); |
4259 | __ vinsd(V3, 0, V0, 0); |
4260 | __ vinsd(V3, 1, V1, 0); |
4261 | |
4262 | __ vmaxd(V4, V2, V3); |
4263 | |
4264 | __ vinsd(V0, 0, V4, 0); |
4265 | __ vinsd(V1, 0, V4, 1); |
4266 | |
4267 | __ faddd(V0, V0, V1); |
4268 | __ ret(); |
4269 | } |
4270 | |
4271 | ASSEMBLER_TEST_RUN(Vmaxd, test) { |
4272 | typedef double (*DoubleReturn)() DART_UNUSED; |
4273 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
4274 | } |
4275 | |
4276 | ASSEMBLER_TEST_GENERATE(Vmins, assembler) { |
4277 | __ LoadDImmediate(V0, 10.5); |
4278 | __ LoadDImmediate(V1, 11.0); |
4279 | |
4280 | __ fcvtsd(V0, V0); |
4281 | __ fcvtsd(V1, V1); |
4282 | |
4283 | __ vdups(V2, V0, 0); |
4284 | __ vinss(V3, 0, V0, 0); |
4285 | __ vinss(V3, 1, V1, 0); |
4286 | __ vinss(V3, 2, V0, 0); |
4287 | __ vinss(V3, 3, V1, 0); |
4288 | |
4289 | __ vmins(V4, V2, V3); |
4290 | |
4291 | __ vinss(V0, 0, V4, 0); |
4292 | __ vinss(V1, 0, V4, 1); |
4293 | __ vinss(V2, 0, V4, 2); |
4294 | __ vinss(V3, 0, V4, 3); |
4295 | |
4296 | __ fcvtds(V0, V0); |
4297 | __ fcvtds(V1, V1); |
4298 | __ fcvtds(V2, V2); |
4299 | __ fcvtds(V3, V3); |
4300 | |
4301 | __ faddd(V0, V0, V1); |
4302 | __ faddd(V0, V0, V2); |
4303 | __ faddd(V0, V0, V3); |
4304 | __ ret(); |
4305 | } |
4306 | |
4307 | ASSEMBLER_TEST_RUN(Vmins, test) { |
4308 | typedef double (*DoubleReturn)() DART_UNUSED; |
4309 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
4310 | } |
4311 | |
4312 | ASSEMBLER_TEST_GENERATE(Vmind, assembler) { |
4313 | __ LoadDImmediate(V0, 21.0); |
4314 | __ LoadDImmediate(V1, 21.5); |
4315 | |
4316 | __ vdupd(V2, V0, 0); |
4317 | __ vinsd(V3, 0, V0, 0); |
4318 | __ vinsd(V3, 1, V1, 0); |
4319 | |
4320 | __ vmind(V4, V2, V3); |
4321 | |
4322 | __ vinsd(V0, 0, V4, 0); |
4323 | __ vinsd(V1, 0, V4, 1); |
4324 | |
4325 | __ faddd(V0, V0, V1); |
4326 | __ ret(); |
4327 | } |
4328 | |
4329 | ASSEMBLER_TEST_RUN(Vmind, test) { |
4330 | typedef double (*DoubleReturn)() DART_UNUSED; |
4331 | EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
4332 | } |
4333 | |
4334 | ASSEMBLER_TEST_GENERATE(Vsqrts, assembler) { |
4335 | __ LoadDImmediate(V0, 64.0); |
4336 | __ LoadDImmediate(V1, 49.0); |
4337 | |
4338 | __ fcvtsd(V0, V0); |
4339 | __ fcvtsd(V1, V1); |
4340 | |
4341 | __ veor(V3, V3, V3); |
4342 | __ vinss(V3, 1, V0, 0); |
4343 | __ vinss(V3, 3, V1, 0); |
4344 | |
4345 | __ vsqrts(V4, V3); |
4346 | |
4347 | __ vinss(V5, 0, V4, 1); |
4348 | __ vinss(V6, 0, V4, 3); |
4349 | |
4350 | __ fcvtds(V5, V5); |
4351 | __ fcvtds(V6, V6); |
4352 | |
4353 | __ faddd(V0, V5, V6); |
4354 | __ ret(); |
4355 | } |
4356 | |
4357 | ASSEMBLER_TEST_RUN(Vsqrts, test) { |
4358 | typedef double (*DoubleReturn)() DART_UNUSED; |
4359 | EXPECT_EQ(15.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
4360 | } |
4361 | |
4362 | ASSEMBLER_TEST_GENERATE(Vsqrtd, assembler) { |
4363 | __ LoadDImmediate(V0, 64.0); |
4364 | __ LoadDImmediate(V1, 49.0); |
4365 | |
4366 | __ vinsd(V3, 0, V0, 0); |
4367 | __ vinsd(V3, 1, V1, 0); |
4368 | |
4369 | __ vsqrtd(V4, V3); |
4370 | |
4371 | __ vinsd(V5, 0, V4, 0); |
4372 | __ vinsd(V6, 0, V4, 1); |
4373 | |
4374 | __ faddd(V0, V5, V6); |
4375 | __ ret(); |
4376 | } |
4377 | |
4378 | ASSEMBLER_TEST_RUN(Vsqrtd, test) { |
4379 | typedef double (*DoubleReturn)() DART_UNUSED; |
4380 | EXPECT_EQ(15.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
4381 | } |
4382 | |
4383 | // This is the same function as in the Simulator. |
4384 | static float arm_recip_estimate(float a) { |
4385 | // From the ARM Architecture Reference Manual A2-85. |
4386 | if (isinf(a) || (fabs(a) >= exp2f(126))) |
4387 | return 0.0; |
4388 | else if (a == 0.0) |
4389 | return kPosInfinity; |
4390 | else if (isnan(a)) |
4391 | return a; |
4392 | |
4393 | uint32_t a_bits = bit_cast<uint32_t, float>(a); |
4394 | // scaled = '0011 1111 1110' : a<22:0> : Zeros(29) |
4395 | uint64_t scaled = (static_cast<uint64_t>(0x3fe) << 52) | |
4396 | ((static_cast<uint64_t>(a_bits) & 0x7fffff) << 29); |
4397 | // result_exp = 253 - UInt(a<30:23>) |
4398 | int32_t result_exp = 253 - ((a_bits >> 23) & 0xff); |
4399 | ASSERT((result_exp >= 1) && (result_exp <= 252)); |
4400 | |
4401 | double scaled_d = bit_cast<double, uint64_t>(scaled); |
4402 | ASSERT((scaled_d >= 0.5) && (scaled_d < 1.0)); |
4403 | |
4404 | // a in units of 1/512 rounded down. |
4405 | int32_t q = static_cast<int32_t>(scaled_d * 512.0); |
4406 | // reciprocal r. |
4407 | double r = 1.0 / ((static_cast<double>(q) + 0.5) / 512.0); |
4408 | // r in units of 1/256 rounded to nearest. |
4409 | int32_t s = static_cast<int32_t>(256.0 * r + 0.5); |
4410 | double estimate = static_cast<double>(s) / 256.0; |
4411 | ASSERT((estimate >= 1.0) && (estimate <= (511.0 / 256.0))); |
4412 | |
4413 | // result = sign : result_exp<7:0> : estimate<51:29> |
4414 | int32_t result_bits = |
4415 | (a_bits & 0x80000000) | ((result_exp & 0xff) << 23) | |
4416 | ((bit_cast<uint64_t, double>(estimate) >> 29) & 0x7fffff); |
4417 | return bit_cast<float, int32_t>(result_bits); |
4418 | } |
4419 | |
4420 | ASSEMBLER_TEST_GENERATE(Vrecpes, assembler) { |
4421 | __ LoadDImmediate(V1, 147.0); |
4422 | __ fcvtsd(V1, V1); |
4423 | __ vinss(V2, 0, V1, 0); |
4424 | __ vinss(V2, 1, V1, 0); |
4425 | __ vinss(V2, 2, V1, 0); |
4426 | __ vinss(V2, 3, V1, 0); |
4427 | __ vrecpes(V0, V2); |
4428 | __ fcvtds(V0, V0); |
4429 | __ ret(); |
4430 | } |
4431 | |
4432 | ASSEMBLER_TEST_RUN(Vrecpes, test) { |
4433 | EXPECT(test != NULL); |
4434 | typedef double (*DoubleReturn)() DART_UNUSED; |
4435 | float res = EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry()); |
4436 | EXPECT_FLOAT_EQ(arm_recip_estimate(147.0), res, 0.0001); |
4437 | } |
4438 | |
4439 | ASSEMBLER_TEST_GENERATE(Vrecpss, assembler) { |
4440 | __ LoadDImmediate(V1, 5.0); |
4441 | __ LoadDImmediate(V2, 10.0); |
4442 | |
4443 | __ fcvtsd(V1, V1); |
4444 | __ fcvtsd(V2, V2); |
4445 | |
4446 | __ vrecpss(V0, V1, V2); |
4447 | |
4448 | __ fcvtds(V0, V0); |
4449 | __ ret(); |
4450 | } |
4451 | |
4452 | ASSEMBLER_TEST_RUN(Vrecpss, test) { |
4453 | EXPECT(test != NULL); |
4454 | typedef double (*DoubleReturn)() DART_UNUSED; |
4455 | double res = EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry()); |
4456 | EXPECT_FLOAT_EQ(2.0 - 10.0 * 5.0, res, 0.0001); |
4457 | } |
4458 | |
4459 | ASSEMBLER_TEST_GENERATE(VRecps, assembler) { |
4460 | __ LoadDImmediate(V0, 1.0 / 10.5); |
4461 | __ fcvtsd(V0, V0); |
4462 | |
4463 | __ vdups(V1, V0, 0); |
4464 | |
4465 | __ VRecps(V2, V1); |
4466 | |
4467 | __ vinss(V0, 0, V2, 0); |
4468 | __ vinss(V1, 0, V2, 1); |
4469 | __ vinss(V2, 0, V2, 2); |
4470 | __ vinss(V3, 0, V2, 3); |
4471 | |
4472 | __ fcvtds(V0, V0); |
4473 | __ fcvtds(V1, V1); |
4474 | __ fcvtds(V2, V2); |
4475 | __ fcvtds(V3, V3); |
4476 | |
4477 | __ faddd(V0, V0, V1); |
4478 | __ faddd(V0, V0, V2); |
4479 | __ faddd(V0, V0, V3); |
4480 | __ ret(); |
4481 | } |
4482 | |
4483 | ASSEMBLER_TEST_RUN(VRecps, test) { |
4484 | typedef double (*DoubleReturn)() DART_UNUSED; |
4485 | double res = EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry()); |
4486 | EXPECT_FLOAT_EQ(42.0, res, 0.0001); |
4487 | } |
4488 | |
4489 | static float arm_reciprocal_sqrt_estimate(float a) { |
4490 | // From the ARM Architecture Reference Manual A2-87. |
4491 | if (isinf(a) || (fabs(a) >= exp2f(126))) |
4492 | return 0.0; |
4493 | else if (a == 0.0) |
4494 | return kPosInfinity; |
4495 | else if (isnan(a)) |
4496 | return a; |
4497 | |
4498 | uint32_t a_bits = bit_cast<uint32_t, float>(a); |
4499 | uint64_t scaled; |
4500 | if (((a_bits >> 23) & 1) != 0) { |
4501 | // scaled = '0 01111111101' : operand<22:0> : Zeros(29) |
4502 | scaled = (static_cast<uint64_t>(0x3fd) << 52) | |
4503 | ((static_cast<uint64_t>(a_bits) & 0x7fffff) << 29); |
4504 | } else { |
4505 | // scaled = '0 01111111110' : operand<22:0> : Zeros(29) |
4506 | scaled = (static_cast<uint64_t>(0x3fe) << 52) | |
4507 | ((static_cast<uint64_t>(a_bits) & 0x7fffff) << 29); |
4508 | } |
4509 | // result_exp = (380 - UInt(operand<30:23>) DIV 2; |
4510 | int32_t result_exp = (380 - ((a_bits >> 23) & 0xff)) / 2; |
4511 | |
4512 | double scaled_d = bit_cast<double, uint64_t>(scaled); |
4513 | ASSERT((scaled_d >= 0.25) && (scaled_d < 1.0)); |
4514 | |
4515 | double r; |
4516 | if (scaled_d < 0.5) { |
4517 | // range 0.25 <= a < 0.5 |
4518 | |
4519 | // a in units of 1/512 rounded down. |
4520 | int32_t q0 = static_cast<int32_t>(scaled_d * 512.0); |
4521 | // reciprocal root r. |
4522 | r = 1.0 / sqrt((static_cast<double>(q0) + 0.5) / 512.0); |
4523 | } else { |
4524 | // range 0.5 <= a < 1.0 |
4525 | |
4526 | // a in units of 1/256 rounded down. |
4527 | int32_t q1 = static_cast<int32_t>(scaled_d * 256.0); |
4528 | // reciprocal root r. |
4529 | r = 1.0 / sqrt((static_cast<double>(q1) + 0.5) / 256.0); |
4530 | } |
4531 | // r in units of 1/256 rounded to nearest. |
4532 | int32_t s = static_cast<int>(256.0 * r + 0.5); |
4533 | double estimate = static_cast<double>(s) / 256.0; |
4534 | ASSERT((estimate >= 1.0) && (estimate <= (511.0 / 256.0))); |
4535 | |
4536 | // result = 0 : result_exp<7:0> : estimate<51:29> |
4537 | int32_t result_bits = |
4538 | ((result_exp & 0xff) << 23) | |
4539 | ((bit_cast<uint64_t, double>(estimate) >> 29) & 0x7fffff); |
4540 | return bit_cast<float, int32_t>(result_bits); |
4541 | } |
4542 | |
4543 | ASSEMBLER_TEST_GENERATE(Vrsqrtes, assembler) { |
4544 | __ LoadDImmediate(V1, 147.0); |
4545 | __ fcvtsd(V1, V1); |
4546 | |
4547 | __ vrsqrtes(V0, V1); |
4548 | |
4549 | __ fcvtds(V0, V0); |
4550 | __ ret(); |
4551 | } |
4552 | |
4553 | ASSEMBLER_TEST_RUN(Vrsqrtes, test) { |
4554 | EXPECT(test != NULL); |
4555 | typedef double (*DoubleReturn)() DART_UNUSED; |
4556 | double res = EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry()); |
4557 | EXPECT_FLOAT_EQ(arm_reciprocal_sqrt_estimate(147.0), res, 0.0001); |
4558 | } |
4559 | |
4560 | ASSEMBLER_TEST_GENERATE(Vrsqrtss, assembler) { |
4561 | __ LoadDImmediate(V1, 5.0); |
4562 | __ LoadDImmediate(V2, 10.0); |
4563 | |
4564 | __ fcvtsd(V1, V1); |
4565 | __ fcvtsd(V2, V2); |
4566 | |
4567 | __ vrsqrtss(V0, V1, V2); |
4568 | |
4569 | __ fcvtds(V0, V0); |
4570 | __ ret(); |
4571 | } |
4572 | |
4573 | ASSEMBLER_TEST_RUN(Vrsqrtss, test) { |
4574 | EXPECT(test != NULL); |
4575 | typedef double (*DoubleReturn)() DART_UNUSED; |
4576 | double res = EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry()); |
4577 | EXPECT_FLOAT_EQ((3.0 - 10.0 * 5.0) / 2.0, res, 0.0001); |
4578 | } |
4579 | |
4580 | ASSEMBLER_TEST_GENERATE(ReciprocalSqrt, assembler) { |
4581 | __ LoadDImmediate(V1, 147000.0); |
4582 | __ fcvtsd(V1, V1); |
4583 | |
4584 | __ VRSqrts(V0, V1); |
4585 | |
4586 | __ fcvtds(V0, V0); |
4587 | __ ret(); |
4588 | } |
4589 | |
4590 | ASSEMBLER_TEST_RUN(ReciprocalSqrt, test) { |
4591 | EXPECT(test != NULL); |
4592 | typedef double (*DoubleReturn)() DART_UNUSED; |
4593 | double res = EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry()); |
4594 | EXPECT_FLOAT_EQ(1.0 / sqrt(147000.0), res, 0.0001); |
4595 | } |
4596 | |
4597 | // Called from assembler_test.cc. |
4598 | // LR: return address. |
4599 | // R0: value. |
4600 | // R1: growable array. |
4601 | // R2: current thread. |
4602 | ASSEMBLER_TEST_GENERATE(StoreIntoObject, assembler) { |
4603 | __ SetupDartSP(); |
4604 | __ Push(CODE_REG); |
4605 | __ Push(THR); |
4606 | __ Push(BARRIER_MASK); |
4607 | __ Push(LR); |
4608 | __ mov(THR, R2); |
4609 | __ ldr(BARRIER_MASK, Address(THR, Thread::write_barrier_mask_offset())); |
4610 | __ StoreIntoObject(R1, FieldAddress(R1, GrowableObjectArray::data_offset()), |
4611 | R0); |
4612 | __ Pop(LR); |
4613 | __ Pop(BARRIER_MASK); |
4614 | __ Pop(THR); |
4615 | __ Pop(CODE_REG); |
4616 | __ RestoreCSP(); |
4617 | __ ret(); |
4618 | } |
4619 | |
4620 | // Push numbers from kMaxPushedNumber to 0 to the stack then drop top |
4621 | // kMaxPushedNumber elements. This should leave just kMaxPushedNumber on the |
4622 | // stack. |
4623 | const intptr_t kMaxPushedNumber = 913; |
4624 | |
4625 | ASSEMBLER_TEST_GENERATE(Drop, assembler) { |
4626 | __ SetupDartSP((kMaxPushedNumber + 1) * target::kWordSize); |
4627 | for (intptr_t i = kMaxPushedNumber; i >= 0; i--) { |
4628 | __ PushImmediate(i); |
4629 | } |
4630 | __ Drop(kMaxPushedNumber); |
4631 | __ PopRegister(R0); |
4632 | __ RestoreCSP(); |
4633 | __ ret(); |
4634 | } |
4635 | |
4636 | ASSEMBLER_TEST_RUN(Drop, test) { |
4637 | EXPECT(test != NULL); |
4638 | typedef int64_t (*Int64Return)() DART_UNUSED; |
4639 | EXPECT_EQ(kMaxPushedNumber, |
4640 | EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
4641 | } |
4642 | |
4643 | } // namespace compiler |
4644 | } // namespace dart |
4645 | |
4646 | #endif // defined(TARGET_ARCH_ARM64) |
4647 | |