1/*
2 * Copyright (c) 1999, 2019, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25#ifndef SHARE_C1_C1_INSTRUCTION_HPP
26#define SHARE_C1_C1_INSTRUCTION_HPP
27
28#include "c1/c1_Compilation.hpp"
29#include "c1/c1_LIR.hpp"
30#include "c1/c1_ValueType.hpp"
31#include "ci/ciField.hpp"
32
33// Predefined classes
34class ciField;
35class ValueStack;
36class InstructionPrinter;
37class IRScope;
38class LIR_OprDesc;
39typedef LIR_OprDesc* LIR_Opr;
40
41
42// Instruction class hierarchy
43//
44// All leaf classes in the class hierarchy are concrete classes
45// (i.e., are instantiated). All other classes are abstract and
46// serve factoring.
47
48class Instruction;
49class Phi;
50class Local;
51class Constant;
52class AccessField;
53class LoadField;
54class StoreField;
55class AccessArray;
56class ArrayLength;
57class AccessIndexed;
58class LoadIndexed;
59class StoreIndexed;
60class NegateOp;
61class Op2;
62class ArithmeticOp;
63class ShiftOp;
64class LogicOp;
65class CompareOp;
66class IfOp;
67class Convert;
68class NullCheck;
69class TypeCast;
70class OsrEntry;
71class ExceptionObject;
72class StateSplit;
73class Invoke;
74class NewInstance;
75class NewArray;
76class NewTypeArray;
77class NewObjectArray;
78class NewMultiArray;
79class TypeCheck;
80class CheckCast;
81class InstanceOf;
82class AccessMonitor;
83class MonitorEnter;
84class MonitorExit;
85class Intrinsic;
86class BlockBegin;
87class BlockEnd;
88class Goto;
89class If;
90class IfInstanceOf;
91class Switch;
92class TableSwitch;
93class LookupSwitch;
94class Return;
95class Throw;
96class Base;
97class RoundFP;
98class UnsafeOp;
99class UnsafeRawOp;
100class UnsafeGetRaw;
101class UnsafePutRaw;
102class UnsafeObjectOp;
103class UnsafeGetObject;
104class UnsafePutObject;
105class UnsafeGetAndSetObject;
106class ProfileCall;
107class ProfileReturnType;
108class ProfileInvoke;
109class RuntimeCall;
110class MemBar;
111class RangeCheckPredicate;
112#ifdef ASSERT
113class Assert;
114#endif
115
116// A Value is a reference to the instruction creating the value
117typedef Instruction* Value;
118typedef GrowableArray<Value> Values;
119typedef GrowableArray<ValueStack*> ValueStackStack;
120
121// BlockClosure is the base class for block traversal/iteration.
122
123class BlockClosure: public CompilationResourceObj {
124 public:
125 virtual void block_do(BlockBegin* block) = 0;
126};
127
128
129// A simple closure class for visiting the values of an Instruction
130class ValueVisitor: public StackObj {
131 public:
132 virtual void visit(Value* v) = 0;
133};
134
135
136// Some array and list classes
137typedef GrowableArray<BlockBegin*> BlockBeginArray;
138
139class BlockList: public GrowableArray<BlockBegin*> {
140 public:
141 BlockList(): GrowableArray<BlockBegin*>() {}
142 BlockList(const int size): GrowableArray<BlockBegin*>(size) {}
143 BlockList(const int size, BlockBegin* init): GrowableArray<BlockBegin*>(size, size, init) {}
144
145 void iterate_forward(BlockClosure* closure);
146 void iterate_backward(BlockClosure* closure);
147 void blocks_do(void f(BlockBegin*));
148 void values_do(ValueVisitor* f);
149 void print(bool cfg_only = false, bool live_only = false) PRODUCT_RETURN;
150};
151
152
153// InstructionVisitors provide type-based dispatch for instructions.
154// For each concrete Instruction class X, a virtual function do_X is
155// provided. Functionality that needs to be implemented for all classes
156// (e.g., printing, code generation) is factored out into a specialised
157// visitor instead of added to the Instruction classes itself.
158
159class InstructionVisitor: public StackObj {
160 public:
161 virtual void do_Phi (Phi* x) = 0;
162 virtual void do_Local (Local* x) = 0;
163 virtual void do_Constant (Constant* x) = 0;
164 virtual void do_LoadField (LoadField* x) = 0;
165 virtual void do_StoreField (StoreField* x) = 0;
166 virtual void do_ArrayLength (ArrayLength* x) = 0;
167 virtual void do_LoadIndexed (LoadIndexed* x) = 0;
168 virtual void do_StoreIndexed (StoreIndexed* x) = 0;
169 virtual void do_NegateOp (NegateOp* x) = 0;
170 virtual void do_ArithmeticOp (ArithmeticOp* x) = 0;
171 virtual void do_ShiftOp (ShiftOp* x) = 0;
172 virtual void do_LogicOp (LogicOp* x) = 0;
173 virtual void do_CompareOp (CompareOp* x) = 0;
174 virtual void do_IfOp (IfOp* x) = 0;
175 virtual void do_Convert (Convert* x) = 0;
176 virtual void do_NullCheck (NullCheck* x) = 0;
177 virtual void do_TypeCast (TypeCast* x) = 0;
178 virtual void do_Invoke (Invoke* x) = 0;
179 virtual void do_NewInstance (NewInstance* x) = 0;
180 virtual void do_NewTypeArray (NewTypeArray* x) = 0;
181 virtual void do_NewObjectArray (NewObjectArray* x) = 0;
182 virtual void do_NewMultiArray (NewMultiArray* x) = 0;
183 virtual void do_CheckCast (CheckCast* x) = 0;
184 virtual void do_InstanceOf (InstanceOf* x) = 0;
185 virtual void do_MonitorEnter (MonitorEnter* x) = 0;
186 virtual void do_MonitorExit (MonitorExit* x) = 0;
187 virtual void do_Intrinsic (Intrinsic* x) = 0;
188 virtual void do_BlockBegin (BlockBegin* x) = 0;
189 virtual void do_Goto (Goto* x) = 0;
190 virtual void do_If (If* x) = 0;
191 virtual void do_IfInstanceOf (IfInstanceOf* x) = 0;
192 virtual void do_TableSwitch (TableSwitch* x) = 0;
193 virtual void do_LookupSwitch (LookupSwitch* x) = 0;
194 virtual void do_Return (Return* x) = 0;
195 virtual void do_Throw (Throw* x) = 0;
196 virtual void do_Base (Base* x) = 0;
197 virtual void do_OsrEntry (OsrEntry* x) = 0;
198 virtual void do_ExceptionObject(ExceptionObject* x) = 0;
199 virtual void do_RoundFP (RoundFP* x) = 0;
200 virtual void do_UnsafeGetRaw (UnsafeGetRaw* x) = 0;
201 virtual void do_UnsafePutRaw (UnsafePutRaw* x) = 0;
202 virtual void do_UnsafeGetObject(UnsafeGetObject* x) = 0;
203 virtual void do_UnsafePutObject(UnsafePutObject* x) = 0;
204 virtual void do_UnsafeGetAndSetObject(UnsafeGetAndSetObject* x) = 0;
205 virtual void do_ProfileCall (ProfileCall* x) = 0;
206 virtual void do_ProfileReturnType (ProfileReturnType* x) = 0;
207 virtual void do_ProfileInvoke (ProfileInvoke* x) = 0;
208 virtual void do_RuntimeCall (RuntimeCall* x) = 0;
209 virtual void do_MemBar (MemBar* x) = 0;
210 virtual void do_RangeCheckPredicate(RangeCheckPredicate* x) = 0;
211#ifdef ASSERT
212 virtual void do_Assert (Assert* x) = 0;
213#endif
214};
215
216
217// Hashing support
218//
219// Note: This hash functions affect the performance
220// of ValueMap - make changes carefully!
221
222#define HASH1(x1 ) ((intx)(x1))
223#define HASH2(x1, x2 ) ((HASH1(x1 ) << 7) ^ HASH1(x2))
224#define HASH3(x1, x2, x3 ) ((HASH2(x1, x2 ) << 7) ^ HASH1(x3))
225#define HASH4(x1, x2, x3, x4) ((HASH3(x1, x2, x3) << 7) ^ HASH1(x4))
226
227
228// The following macros are used to implement instruction-specific hashing.
229// By default, each instruction implements hash() and is_equal(Value), used
230// for value numbering/common subexpression elimination. The default imple-
231// mentation disables value numbering. Each instruction which can be value-
232// numbered, should define corresponding hash() and is_equal(Value) functions
233// via the macros below. The f arguments specify all the values/op codes, etc.
234// that need to be identical for two instructions to be identical.
235//
236// Note: The default implementation of hash() returns 0 in order to indicate
237// that the instruction should not be considered for value numbering.
238// The currently used hash functions do not guarantee that never a 0
239// is produced. While this is still correct, it may be a performance
240// bug (no value numbering for that node). However, this situation is
241// so unlikely, that we are not going to handle it specially.
242
243#define HASHING1(class_name, enabled, f1) \
244 virtual intx hash() const { \
245 return (enabled) ? HASH2(name(), f1) : 0; \
246 } \
247 virtual bool is_equal(Value v) const { \
248 if (!(enabled) ) return false; \
249 class_name* _v = v->as_##class_name(); \
250 if (_v == NULL ) return false; \
251 if (f1 != _v->f1) return false; \
252 return true; \
253 } \
254
255
256#define HASHING2(class_name, enabled, f1, f2) \
257 virtual intx hash() const { \
258 return (enabled) ? HASH3(name(), f1, f2) : 0; \
259 } \
260 virtual bool is_equal(Value v) const { \
261 if (!(enabled) ) return false; \
262 class_name* _v = v->as_##class_name(); \
263 if (_v == NULL ) return false; \
264 if (f1 != _v->f1) return false; \
265 if (f2 != _v->f2) return false; \
266 return true; \
267 } \
268
269
270#define HASHING3(class_name, enabled, f1, f2, f3) \
271 virtual intx hash() const { \
272 return (enabled) ? HASH4(name(), f1, f2, f3) : 0; \
273 } \
274 virtual bool is_equal(Value v) const { \
275 if (!(enabled) ) return false; \
276 class_name* _v = v->as_##class_name(); \
277 if (_v == NULL ) return false; \
278 if (f1 != _v->f1) return false; \
279 if (f2 != _v->f2) return false; \
280 if (f3 != _v->f3) return false; \
281 return true; \
282 } \
283
284
285// The mother of all instructions...
286
287class Instruction: public CompilationResourceObj {
288 private:
289 int _id; // the unique instruction id
290#ifndef PRODUCT
291 int _printable_bci; // the bci of the instruction for printing
292#endif
293 int _use_count; // the number of instructions refering to this value (w/o prev/next); only roots can have use count = 0 or > 1
294 int _pin_state; // set of PinReason describing the reason for pinning
295 ValueType* _type; // the instruction value type
296 Instruction* _next; // the next instruction if any (NULL for BlockEnd instructions)
297 Instruction* _subst; // the substitution instruction if any
298 LIR_Opr _operand; // LIR specific information
299 unsigned int _flags; // Flag bits
300
301 ValueStack* _state_before; // Copy of state with input operands still on stack (or NULL)
302 ValueStack* _exception_state; // Copy of state for exception handling
303 XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction
304
305 friend class UseCountComputer;
306 friend class BlockBegin;
307
308 void update_exception_state(ValueStack* state);
309
310 protected:
311 BlockBegin* _block; // Block that contains this instruction
312
313 void set_type(ValueType* type) {
314 assert(type != NULL, "type must exist");
315 _type = type;
316 }
317
318 // Helper class to keep track of which arguments need a null check
319 class ArgsNonNullState {
320 private:
321 int _nonnull_state; // mask identifying which args are nonnull
322 public:
323 ArgsNonNullState()
324 : _nonnull_state(AllBits) {}
325
326 // Does argument number i needs a null check?
327 bool arg_needs_null_check(int i) const {
328 // No data is kept for arguments starting at position 33 so
329 // conservatively assume that they need a null check.
330 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) {
331 return is_set_nth_bit(_nonnull_state, i);
332 }
333 return true;
334 }
335
336 // Set whether argument number i needs a null check or not
337 void set_arg_needs_null_check(int i, bool check) {
338 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) {
339 if (check) {
340 _nonnull_state |= nth_bit(i);
341 } else {
342 _nonnull_state &= ~(nth_bit(i));
343 }
344 }
345 }
346 };
347
348 public:
349 void* operator new(size_t size) throw() {
350 Compilation* c = Compilation::current();
351 void* res = c->arena()->Amalloc(size);
352 ((Instruction*)res)->_id = c->get_next_id();
353 return res;
354 }
355
356 static const int no_bci = -99;
357
358 enum InstructionFlag {
359 NeedsNullCheckFlag = 0,
360 CanTrapFlag,
361 DirectCompareFlag,
362 IsEliminatedFlag,
363 IsSafepointFlag,
364 IsStaticFlag,
365 IsStrictfpFlag,
366 NeedsStoreCheckFlag,
367 NeedsWriteBarrierFlag,
368 PreservesStateFlag,
369 TargetIsFinalFlag,
370 TargetIsLoadedFlag,
371 TargetIsStrictfpFlag,
372 UnorderedIsTrueFlag,
373 NeedsPatchingFlag,
374 ThrowIncompatibleClassChangeErrorFlag,
375 InvokeSpecialReceiverCheckFlag,
376 ProfileMDOFlag,
377 IsLinkedInBlockFlag,
378 NeedsRangeCheckFlag,
379 InWorkListFlag,
380 DeoptimizeOnException,
381 InstructionLastFlag
382 };
383
384 public:
385 bool check_flag(InstructionFlag id) const { return (_flags & (1 << id)) != 0; }
386 void set_flag(InstructionFlag id, bool f) { _flags = f ? (_flags | (1 << id)) : (_flags & ~(1 << id)); };
387
388 // 'globally' used condition values
389 enum Condition {
390 eql, neq, lss, leq, gtr, geq, aeq, beq
391 };
392
393 // Instructions may be pinned for many reasons and under certain conditions
394 // with enough knowledge it's possible to safely unpin them.
395 enum PinReason {
396 PinUnknown = 1 << 0
397 , PinExplicitNullCheck = 1 << 3
398 , PinStackForStateSplit= 1 << 12
399 , PinStateSplitConstructor= 1 << 13
400 , PinGlobalValueNumbering= 1 << 14
401 };
402
403 static Condition mirror(Condition cond);
404 static Condition negate(Condition cond);
405
406 // initialization
407 static int number_of_instructions() {
408 return Compilation::current()->number_of_instructions();
409 }
410
411 // creation
412 Instruction(ValueType* type, ValueStack* state_before = NULL, bool type_is_constant = false)
413 :
414#ifndef PRODUCT
415 _printable_bci(-99),
416#endif
417 _use_count(0)
418 , _pin_state(0)
419 , _type(type)
420 , _next(NULL)
421 , _subst(NULL)
422 , _operand(LIR_OprFact::illegalOpr)
423 , _flags(0)
424 , _state_before(state_before)
425 , _exception_handlers(NULL)
426 , _block(NULL)
427 {
428 check_state(state_before);
429 assert(type != NULL && (!type->is_constant() || type_is_constant), "type must exist");
430 update_exception_state(_state_before);
431 }
432
433 // accessors
434 int id() const { return _id; }
435#ifndef PRODUCT
436 bool has_printable_bci() const { return _printable_bci != -99; }
437 int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; }
438 void set_printable_bci(int bci) { _printable_bci = bci; }
439#endif
440 int dominator_depth();
441 int use_count() const { return _use_count; }
442 int pin_state() const { return _pin_state; }
443 bool is_pinned() const { return _pin_state != 0 || PinAllInstructions; }
444 ValueType* type() const { return _type; }
445 BlockBegin *block() const { return _block; }
446 Instruction* prev(); // use carefully, expensive operation
447 Instruction* next() const { return _next; }
448 bool has_subst() const { return _subst != NULL; }
449 Instruction* subst() { return _subst == NULL ? this : _subst->subst(); }
450 LIR_Opr operand() const { return _operand; }
451
452 void set_needs_null_check(bool f) { set_flag(NeedsNullCheckFlag, f); }
453 bool needs_null_check() const { return check_flag(NeedsNullCheckFlag); }
454 bool is_linked() const { return check_flag(IsLinkedInBlockFlag); }
455 bool can_be_linked() { return as_Local() == NULL && as_Phi() == NULL; }
456
457 bool has_uses() const { return use_count() > 0; }
458 ValueStack* state_before() const { return _state_before; }
459 ValueStack* exception_state() const { return _exception_state; }
460 virtual bool needs_exception_state() const { return true; }
461 XHandlers* exception_handlers() const { return _exception_handlers; }
462
463 // manipulation
464 void pin(PinReason reason) { _pin_state |= reason; }
465 void pin() { _pin_state |= PinUnknown; }
466 // DANGEROUS: only used by EliminateStores
467 void unpin(PinReason reason) { assert((reason & PinUnknown) == 0, "can't unpin unknown state"); _pin_state &= ~reason; }
468
469 Instruction* set_next(Instruction* next) {
470 assert(next->has_printable_bci(), "_printable_bci should have been set");
471 assert(next != NULL, "must not be NULL");
472 assert(as_BlockEnd() == NULL, "BlockEnd instructions must have no next");
473 assert(next->can_be_linked(), "shouldn't link these instructions into list");
474
475 BlockBegin *block = this->block();
476 next->_block = block;
477
478 next->set_flag(Instruction::IsLinkedInBlockFlag, true);
479 _next = next;
480 return next;
481 }
482
483 Instruction* set_next(Instruction* next, int bci) {
484#ifndef PRODUCT
485 next->set_printable_bci(bci);
486#endif
487 return set_next(next);
488 }
489
490 // when blocks are merged
491 void fixup_block_pointers() {
492 Instruction *cur = next()->next(); // next()'s block is set in set_next
493 while (cur && cur->_block != block()) {
494 cur->_block = block();
495 cur = cur->next();
496 }
497 }
498
499 Instruction *insert_after(Instruction *i) {
500 Instruction* n = _next;
501 set_next(i);
502 i->set_next(n);
503 return _next;
504 }
505
506 Instruction *insert_after_same_bci(Instruction *i) {
507#ifndef PRODUCT
508 i->set_printable_bci(printable_bci());
509#endif
510 return insert_after(i);
511 }
512
513 void set_subst(Instruction* subst) {
514 assert(subst == NULL ||
515 type()->base() == subst->type()->base() ||
516 subst->type()->base() == illegalType, "type can't change");
517 _subst = subst;
518 }
519 void set_exception_handlers(XHandlers *xhandlers) { _exception_handlers = xhandlers; }
520 void set_exception_state(ValueStack* s) { check_state(s); _exception_state = s; }
521 void set_state_before(ValueStack* s) { check_state(s); _state_before = s; }
522
523 // machine-specifics
524 void set_operand(LIR_Opr operand) { assert(operand != LIR_OprFact::illegalOpr, "operand must exist"); _operand = operand; }
525 void clear_operand() { _operand = LIR_OprFact::illegalOpr; }
526
527 // generic
528 virtual Instruction* as_Instruction() { return this; } // to satisfy HASHING1 macro
529 virtual Phi* as_Phi() { return NULL; }
530 virtual Local* as_Local() { return NULL; }
531 virtual Constant* as_Constant() { return NULL; }
532 virtual AccessField* as_AccessField() { return NULL; }
533 virtual LoadField* as_LoadField() { return NULL; }
534 virtual StoreField* as_StoreField() { return NULL; }
535 virtual AccessArray* as_AccessArray() { return NULL; }
536 virtual ArrayLength* as_ArrayLength() { return NULL; }
537 virtual AccessIndexed* as_AccessIndexed() { return NULL; }
538 virtual LoadIndexed* as_LoadIndexed() { return NULL; }
539 virtual StoreIndexed* as_StoreIndexed() { return NULL; }
540 virtual NegateOp* as_NegateOp() { return NULL; }
541 virtual Op2* as_Op2() { return NULL; }
542 virtual ArithmeticOp* as_ArithmeticOp() { return NULL; }
543 virtual ShiftOp* as_ShiftOp() { return NULL; }
544 virtual LogicOp* as_LogicOp() { return NULL; }
545 virtual CompareOp* as_CompareOp() { return NULL; }
546 virtual IfOp* as_IfOp() { return NULL; }
547 virtual Convert* as_Convert() { return NULL; }
548 virtual NullCheck* as_NullCheck() { return NULL; }
549 virtual OsrEntry* as_OsrEntry() { return NULL; }
550 virtual StateSplit* as_StateSplit() { return NULL; }
551 virtual Invoke* as_Invoke() { return NULL; }
552 virtual NewInstance* as_NewInstance() { return NULL; }
553 virtual NewArray* as_NewArray() { return NULL; }
554 virtual NewTypeArray* as_NewTypeArray() { return NULL; }
555 virtual NewObjectArray* as_NewObjectArray() { return NULL; }
556 virtual NewMultiArray* as_NewMultiArray() { return NULL; }
557 virtual TypeCheck* as_TypeCheck() { return NULL; }
558 virtual CheckCast* as_CheckCast() { return NULL; }
559 virtual InstanceOf* as_InstanceOf() { return NULL; }
560 virtual TypeCast* as_TypeCast() { return NULL; }
561 virtual AccessMonitor* as_AccessMonitor() { return NULL; }
562 virtual MonitorEnter* as_MonitorEnter() { return NULL; }
563 virtual MonitorExit* as_MonitorExit() { return NULL; }
564 virtual Intrinsic* as_Intrinsic() { return NULL; }
565 virtual BlockBegin* as_BlockBegin() { return NULL; }
566 virtual BlockEnd* as_BlockEnd() { return NULL; }
567 virtual Goto* as_Goto() { return NULL; }
568 virtual If* as_If() { return NULL; }
569 virtual IfInstanceOf* as_IfInstanceOf() { return NULL; }
570 virtual TableSwitch* as_TableSwitch() { return NULL; }
571 virtual LookupSwitch* as_LookupSwitch() { return NULL; }
572 virtual Return* as_Return() { return NULL; }
573 virtual Throw* as_Throw() { return NULL; }
574 virtual Base* as_Base() { return NULL; }
575 virtual RoundFP* as_RoundFP() { return NULL; }
576 virtual ExceptionObject* as_ExceptionObject() { return NULL; }
577 virtual UnsafeOp* as_UnsafeOp() { return NULL; }
578 virtual ProfileInvoke* as_ProfileInvoke() { return NULL; }
579 virtual RangeCheckPredicate* as_RangeCheckPredicate() { return NULL; }
580
581#ifdef ASSERT
582 virtual Assert* as_Assert() { return NULL; }
583#endif
584
585 virtual void visit(InstructionVisitor* v) = 0;
586
587 virtual bool can_trap() const { return false; }
588
589 virtual void input_values_do(ValueVisitor* f) = 0;
590 virtual void state_values_do(ValueVisitor* f);
591 virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ }
592 void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); }
593
594 virtual ciType* exact_type() const;
595 virtual ciType* declared_type() const { return NULL; }
596
597 // hashing
598 virtual const char* name() const = 0;
599 HASHING1(Instruction, false, id()) // hashing disabled by default
600
601 // debugging
602 static void check_state(ValueStack* state) PRODUCT_RETURN;
603 void print() PRODUCT_RETURN;
604 void print_line() PRODUCT_RETURN;
605 void print(InstructionPrinter& ip) PRODUCT_RETURN;
606};
607
608
609// The following macros are used to define base (i.e., non-leaf)
610// and leaf instruction classes. They define class-name related
611// generic functionality in one place.
612
613#define BASE(class_name, super_class_name) \
614 class class_name: public super_class_name { \
615 public: \
616 virtual class_name* as_##class_name() { return this; } \
617
618
619#define LEAF(class_name, super_class_name) \
620 BASE(class_name, super_class_name) \
621 public: \
622 virtual const char* name() const { return #class_name; } \
623 virtual void visit(InstructionVisitor* v) { v->do_##class_name(this); } \
624
625
626// Debugging support
627
628
629#ifdef ASSERT
630class AssertValues: public ValueVisitor {
631 void visit(Value* x) { assert((*x) != NULL, "value must exist"); }
632};
633 #define ASSERT_VALUES { AssertValues assert_value; values_do(&assert_value); }
634#else
635 #define ASSERT_VALUES
636#endif // ASSERT
637
638
639// A Phi is a phi function in the sense of SSA form. It stands for
640// the value of a local variable at the beginning of a join block.
641// A Phi consists of n operands, one for every incoming branch.
642
643LEAF(Phi, Instruction)
644 private:
645 int _pf_flags; // the flags of the phi function
646 int _index; // to value on operand stack (index < 0) or to local
647 public:
648 // creation
649 Phi(ValueType* type, BlockBegin* b, int index)
650 : Instruction(type->base())
651 , _pf_flags(0)
652 , _index(index)
653 {
654 _block = b;
655 NOT_PRODUCT(set_printable_bci(Value(b)->printable_bci()));
656 if (type->is_illegal()) {
657 make_illegal();
658 }
659 }
660
661 // flags
662 enum Flag {
663 no_flag = 0,
664 visited = 1 << 0,
665 cannot_simplify = 1 << 1
666 };
667
668 // accessors
669 bool is_local() const { return _index >= 0; }
670 bool is_on_stack() const { return !is_local(); }
671 int local_index() const { assert(is_local(), ""); return _index; }
672 int stack_index() const { assert(is_on_stack(), ""); return -(_index+1); }
673
674 Value operand_at(int i) const;
675 int operand_count() const;
676
677 void set(Flag f) { _pf_flags |= f; }
678 void clear(Flag f) { _pf_flags &= ~f; }
679 bool is_set(Flag f) const { return (_pf_flags & f) != 0; }
680
681 // Invalidates phis corresponding to merges of locals of two different types
682 // (these should never be referenced, otherwise the bytecodes are illegal)
683 void make_illegal() {
684 set(cannot_simplify);
685 set_type(illegalType);
686 }
687
688 bool is_illegal() const {
689 return type()->is_illegal();
690 }
691
692 // generic
693 virtual void input_values_do(ValueVisitor* f) {
694 }
695};
696
697
698// A local is a placeholder for an incoming argument to a function call.
699LEAF(Local, Instruction)
700 private:
701 int _java_index; // the local index within the method to which the local belongs
702 bool _is_receiver; // if local variable holds the receiver: "this" for non-static methods
703 ciType* _declared_type;
704 public:
705 // creation
706 Local(ciType* declared, ValueType* type, int index, bool receiver)
707 : Instruction(type)
708 , _java_index(index)
709 , _is_receiver(receiver)
710 , _declared_type(declared)
711 {
712 NOT_PRODUCT(set_printable_bci(-1));
713 }
714
715 // accessors
716 int java_index() const { return _java_index; }
717 bool is_receiver() const { return _is_receiver; }
718
719 virtual ciType* declared_type() const { return _declared_type; }
720
721 // generic
722 virtual void input_values_do(ValueVisitor* f) { /* no values */ }
723};
724
725
726LEAF(Constant, Instruction)
727 public:
728 // creation
729 Constant(ValueType* type):
730 Instruction(type, NULL, /*type_is_constant*/ true)
731 {
732 assert(type->is_constant(), "must be a constant");
733 }
734
735 Constant(ValueType* type, ValueStack* state_before):
736 Instruction(type, state_before, /*type_is_constant*/ true)
737 {
738 assert(state_before != NULL, "only used for constants which need patching");
739 assert(type->is_constant(), "must be a constant");
740 // since it's patching it needs to be pinned
741 pin();
742 }
743
744 // generic
745 virtual bool can_trap() const { return state_before() != NULL; }
746 virtual void input_values_do(ValueVisitor* f) { /* no values */ }
747
748 virtual intx hash() const;
749 virtual bool is_equal(Value v) const;
750
751 virtual ciType* exact_type() const;
752
753 enum CompareResult { not_comparable = -1, cond_false, cond_true };
754
755 virtual CompareResult compare(Instruction::Condition condition, Value right) const;
756 BlockBegin* compare(Instruction::Condition cond, Value right,
757 BlockBegin* true_sux, BlockBegin* false_sux) const {
758 switch (compare(cond, right)) {
759 case not_comparable:
760 return NULL;
761 case cond_false:
762 return false_sux;
763 case cond_true:
764 return true_sux;
765 default:
766 ShouldNotReachHere();
767 return NULL;
768 }
769 }
770};
771
772
773BASE(AccessField, Instruction)
774 private:
775 Value _obj;
776 int _offset;
777 ciField* _field;
778 NullCheck* _explicit_null_check; // For explicit null check elimination
779
780 public:
781 // creation
782 AccessField(Value obj, int offset, ciField* field, bool is_static,
783 ValueStack* state_before, bool needs_patching)
784 : Instruction(as_ValueType(field->type()->basic_type()), state_before)
785 , _obj(obj)
786 , _offset(offset)
787 , _field(field)
788 , _explicit_null_check(NULL)
789 {
790 set_needs_null_check(!is_static);
791 set_flag(IsStaticFlag, is_static);
792 set_flag(NeedsPatchingFlag, needs_patching);
793 ASSERT_VALUES
794 // pin of all instructions with memory access
795 pin();
796 }
797
798 // accessors
799 Value obj() const { return _obj; }
800 int offset() const { return _offset; }
801 ciField* field() const { return _field; }
802 BasicType field_type() const { return _field->type()->basic_type(); }
803 bool is_static() const { return check_flag(IsStaticFlag); }
804 NullCheck* explicit_null_check() const { return _explicit_null_check; }
805 bool needs_patching() const { return check_flag(NeedsPatchingFlag); }
806
807 // Unresolved getstatic and putstatic can cause initialization.
808 // Technically it occurs at the Constant that materializes the base
809 // of the static fields but it's simpler to model it here.
810 bool is_init_point() const { return is_static() && (needs_patching() || !_field->holder()->is_initialized()); }
811
812 // manipulation
813
814 // Under certain circumstances, if a previous NullCheck instruction
815 // proved the target object non-null, we can eliminate the explicit
816 // null check and do an implicit one, simply specifying the debug
817 // information from the NullCheck. This field should only be consulted
818 // if needs_null_check() is true.
819 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; }
820
821 // generic
822 virtual bool can_trap() const { return needs_null_check() || needs_patching(); }
823 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); }
824};
825
826
827LEAF(LoadField, AccessField)
828 public:
829 // creation
830 LoadField(Value obj, int offset, ciField* field, bool is_static,
831 ValueStack* state_before, bool needs_patching)
832 : AccessField(obj, offset, field, is_static, state_before, needs_patching)
833 {}
834
835 ciType* declared_type() const;
836
837 // generic
838 HASHING2(LoadField, !needs_patching() && !field()->is_volatile(), obj()->subst(), offset()) // cannot be eliminated if needs patching or if volatile
839};
840
841
842LEAF(StoreField, AccessField)
843 private:
844 Value _value;
845
846 public:
847 // creation
848 StoreField(Value obj, int offset, ciField* field, Value value, bool is_static,
849 ValueStack* state_before, bool needs_patching)
850 : AccessField(obj, offset, field, is_static, state_before, needs_patching)
851 , _value(value)
852 {
853 set_flag(NeedsWriteBarrierFlag, as_ValueType(field_type())->is_object());
854 ASSERT_VALUES
855 pin();
856 }
857
858 // accessors
859 Value value() const { return _value; }
860 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); }
861
862 // generic
863 virtual void input_values_do(ValueVisitor* f) { AccessField::input_values_do(f); f->visit(&_value); }
864};
865
866
867BASE(AccessArray, Instruction)
868 private:
869 Value _array;
870
871 public:
872 // creation
873 AccessArray(ValueType* type, Value array, ValueStack* state_before)
874 : Instruction(type, state_before)
875 , _array(array)
876 {
877 set_needs_null_check(true);
878 ASSERT_VALUES
879 pin(); // instruction with side effect (null exception or range check throwing)
880 }
881
882 Value array() const { return _array; }
883
884 // generic
885 virtual bool can_trap() const { return needs_null_check(); }
886 virtual void input_values_do(ValueVisitor* f) { f->visit(&_array); }
887};
888
889
890LEAF(ArrayLength, AccessArray)
891 private:
892 NullCheck* _explicit_null_check; // For explicit null check elimination
893
894 public:
895 // creation
896 ArrayLength(Value array, ValueStack* state_before)
897 : AccessArray(intType, array, state_before)
898 , _explicit_null_check(NULL) {}
899
900 // accessors
901 NullCheck* explicit_null_check() const { return _explicit_null_check; }
902
903 // setters
904 // See LoadField::set_explicit_null_check for documentation
905 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; }
906
907 // generic
908 HASHING1(ArrayLength, true, array()->subst())
909};
910
911
912BASE(AccessIndexed, AccessArray)
913 private:
914 Value _index;
915 Value _length;
916 BasicType _elt_type;
917 bool _mismatched;
918
919 public:
920 // creation
921 AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before, bool mismatched)
922 : AccessArray(as_ValueType(elt_type), array, state_before)
923 , _index(index)
924 , _length(length)
925 , _elt_type(elt_type)
926 , _mismatched(mismatched)
927 {
928 set_flag(Instruction::NeedsRangeCheckFlag, true);
929 ASSERT_VALUES
930 }
931
932 // accessors
933 Value index() const { return _index; }
934 Value length() const { return _length; }
935 BasicType elt_type() const { return _elt_type; }
936 bool mismatched() const { return _mismatched; }
937
938 void clear_length() { _length = NULL; }
939 // perform elimination of range checks involving constants
940 bool compute_needs_range_check();
941
942 // generic
943 virtual void input_values_do(ValueVisitor* f) { AccessArray::input_values_do(f); f->visit(&_index); if (_length != NULL) f->visit(&_length); }
944};
945
946
947LEAF(LoadIndexed, AccessIndexed)
948 private:
949 NullCheck* _explicit_null_check; // For explicit null check elimination
950
951 public:
952 // creation
953 LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before, bool mismatched = false)
954 : AccessIndexed(array, index, length, elt_type, state_before, mismatched)
955 , _explicit_null_check(NULL) {}
956
957 // accessors
958 NullCheck* explicit_null_check() const { return _explicit_null_check; }
959
960 // setters
961 // See LoadField::set_explicit_null_check for documentation
962 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; }
963
964 ciType* exact_type() const;
965 ciType* declared_type() const;
966
967 // generic
968 HASHING2(LoadIndexed, true, array()->subst(), index()->subst())
969};
970
971
972LEAF(StoreIndexed, AccessIndexed)
973 private:
974 Value _value;
975
976 ciMethod* _profiled_method;
977 int _profiled_bci;
978 bool _check_boolean;
979
980 public:
981 // creation
982 StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* state_before,
983 bool check_boolean, bool mismatched = false)
984 : AccessIndexed(array, index, length, elt_type, state_before, mismatched)
985 , _value(value), _profiled_method(NULL), _profiled_bci(0), _check_boolean(check_boolean)
986 {
987 set_flag(NeedsWriteBarrierFlag, (as_ValueType(elt_type)->is_object()));
988 set_flag(NeedsStoreCheckFlag, (as_ValueType(elt_type)->is_object()));
989 ASSERT_VALUES
990 pin();
991 }
992
993 // accessors
994 Value value() const { return _value; }
995 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); }
996 bool needs_store_check() const { return check_flag(NeedsStoreCheckFlag); }
997 bool check_boolean() const { return _check_boolean; }
998 // Helpers for MethodData* profiling
999 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); }
1000 void set_profiled_method(ciMethod* method) { _profiled_method = method; }
1001 void set_profiled_bci(int bci) { _profiled_bci = bci; }
1002 bool should_profile() const { return check_flag(ProfileMDOFlag); }
1003 ciMethod* profiled_method() const { return _profiled_method; }
1004 int profiled_bci() const { return _profiled_bci; }
1005 // generic
1006 virtual void input_values_do(ValueVisitor* f) { AccessIndexed::input_values_do(f); f->visit(&_value); }
1007};
1008
1009
1010LEAF(NegateOp, Instruction)
1011 private:
1012 Value _x;
1013
1014 public:
1015 // creation
1016 NegateOp(Value x) : Instruction(x->type()->base()), _x(x) {
1017 ASSERT_VALUES
1018 }
1019
1020 // accessors
1021 Value x() const { return _x; }
1022
1023 // generic
1024 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); }
1025};
1026
1027
1028BASE(Op2, Instruction)
1029 private:
1030 Bytecodes::Code _op;
1031 Value _x;
1032 Value _y;
1033
1034 public:
1035 // creation
1036 Op2(ValueType* type, Bytecodes::Code op, Value x, Value y, ValueStack* state_before = NULL)
1037 : Instruction(type, state_before)
1038 , _op(op)
1039 , _x(x)
1040 , _y(y)
1041 {
1042 ASSERT_VALUES
1043 }
1044
1045 // accessors
1046 Bytecodes::Code op() const { return _op; }
1047 Value x() const { return _x; }
1048 Value y() const { return _y; }
1049
1050 // manipulators
1051 void swap_operands() {
1052 assert(is_commutative(), "operation must be commutative");
1053 Value t = _x; _x = _y; _y = t;
1054 }
1055
1056 // generic
1057 virtual bool is_commutative() const { return false; }
1058 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); }
1059};
1060
1061
1062LEAF(ArithmeticOp, Op2)
1063 public:
1064 // creation
1065 ArithmeticOp(Bytecodes::Code op, Value x, Value y, bool is_strictfp, ValueStack* state_before)
1066 : Op2(x->type()->meet(y->type()), op, x, y, state_before)
1067 {
1068 set_flag(IsStrictfpFlag, is_strictfp);
1069 if (can_trap()) pin();
1070 }
1071
1072 // accessors
1073 bool is_strictfp() const { return check_flag(IsStrictfpFlag); }
1074
1075 // generic
1076 virtual bool is_commutative() const;
1077 virtual bool can_trap() const;
1078 HASHING3(Op2, true, op(), x()->subst(), y()->subst())
1079};
1080
1081
1082LEAF(ShiftOp, Op2)
1083 public:
1084 // creation
1085 ShiftOp(Bytecodes::Code op, Value x, Value s) : Op2(x->type()->base(), op, x, s) {}
1086
1087 // generic
1088 HASHING3(Op2, true, op(), x()->subst(), y()->subst())
1089};
1090
1091
1092LEAF(LogicOp, Op2)
1093 public:
1094 // creation
1095 LogicOp(Bytecodes::Code op, Value x, Value y) : Op2(x->type()->meet(y->type()), op, x, y) {}
1096
1097 // generic
1098 virtual bool is_commutative() const;
1099 HASHING3(Op2, true, op(), x()->subst(), y()->subst())
1100};
1101
1102
1103LEAF(CompareOp, Op2)
1104 public:
1105 // creation
1106 CompareOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before)
1107 : Op2(intType, op, x, y, state_before)
1108 {}
1109
1110 // generic
1111 HASHING3(Op2, true, op(), x()->subst(), y()->subst())
1112};
1113
1114
1115LEAF(IfOp, Op2)
1116 private:
1117 Value _tval;
1118 Value _fval;
1119
1120 public:
1121 // creation
1122 IfOp(Value x, Condition cond, Value y, Value tval, Value fval)
1123 : Op2(tval->type()->meet(fval->type()), (Bytecodes::Code)cond, x, y)
1124 , _tval(tval)
1125 , _fval(fval)
1126 {
1127 ASSERT_VALUES
1128 assert(tval->type()->tag() == fval->type()->tag(), "types must match");
1129 }
1130
1131 // accessors
1132 virtual bool is_commutative() const;
1133 Bytecodes::Code op() const { ShouldNotCallThis(); return Bytecodes::_illegal; }
1134 Condition cond() const { return (Condition)Op2::op(); }
1135 Value tval() const { return _tval; }
1136 Value fval() const { return _fval; }
1137
1138 // generic
1139 virtual void input_values_do(ValueVisitor* f) { Op2::input_values_do(f); f->visit(&_tval); f->visit(&_fval); }
1140};
1141
1142
1143LEAF(Convert, Instruction)
1144 private:
1145 Bytecodes::Code _op;
1146 Value _value;
1147
1148 public:
1149 // creation
1150 Convert(Bytecodes::Code op, Value value, ValueType* to_type) : Instruction(to_type), _op(op), _value(value) {
1151 ASSERT_VALUES
1152 }
1153
1154 // accessors
1155 Bytecodes::Code op() const { return _op; }
1156 Value value() const { return _value; }
1157
1158 // generic
1159 virtual void input_values_do(ValueVisitor* f) { f->visit(&_value); }
1160 HASHING2(Convert, true, op(), value()->subst())
1161};
1162
1163
1164LEAF(NullCheck, Instruction)
1165 private:
1166 Value _obj;
1167
1168 public:
1169 // creation
1170 NullCheck(Value obj, ValueStack* state_before)
1171 : Instruction(obj->type()->base(), state_before)
1172 , _obj(obj)
1173 {
1174 ASSERT_VALUES
1175 set_can_trap(true);
1176 assert(_obj->type()->is_object(), "null check must be applied to objects only");
1177 pin(Instruction::PinExplicitNullCheck);
1178 }
1179
1180 // accessors
1181 Value obj() const { return _obj; }
1182
1183 // setters
1184 void set_can_trap(bool can_trap) { set_flag(CanTrapFlag, can_trap); }
1185
1186 // generic
1187 virtual bool can_trap() const { return check_flag(CanTrapFlag); /* null-check elimination sets to false */ }
1188 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); }
1189 HASHING1(NullCheck, true, obj()->subst())
1190};
1191
1192
1193// This node is supposed to cast the type of another node to a more precise
1194// declared type.
1195LEAF(TypeCast, Instruction)
1196 private:
1197 ciType* _declared_type;
1198 Value _obj;
1199
1200 public:
1201 // The type of this node is the same type as the object type (and it might be constant).
1202 TypeCast(ciType* type, Value obj, ValueStack* state_before)
1203 : Instruction(obj->type(), state_before, obj->type()->is_constant()),
1204 _declared_type(type),
1205 _obj(obj) {}
1206
1207 // accessors
1208 ciType* declared_type() const { return _declared_type; }
1209 Value obj() const { return _obj; }
1210
1211 // generic
1212 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); }
1213};
1214
1215
1216BASE(StateSplit, Instruction)
1217 private:
1218 ValueStack* _state;
1219
1220 protected:
1221 static void substitute(BlockList& list, BlockBegin* old_block, BlockBegin* new_block);
1222
1223 public:
1224 // creation
1225 StateSplit(ValueType* type, ValueStack* state_before = NULL)
1226 : Instruction(type, state_before)
1227 , _state(NULL)
1228 {
1229 pin(PinStateSplitConstructor);
1230 }
1231
1232 // accessors
1233 ValueStack* state() const { return _state; }
1234 IRScope* scope() const; // the state's scope
1235
1236 // manipulation
1237 void set_state(ValueStack* state) { assert(_state == NULL, "overwriting existing state"); check_state(state); _state = state; }
1238
1239 // generic
1240 virtual void input_values_do(ValueVisitor* f) { /* no values */ }
1241 virtual void state_values_do(ValueVisitor* f);
1242};
1243
1244
1245LEAF(Invoke, StateSplit)
1246 private:
1247 Bytecodes::Code _code;
1248 Value _recv;
1249 Values* _args;
1250 BasicTypeList* _signature;
1251 int _vtable_index;
1252 ciMethod* _target;
1253
1254 public:
1255 // creation
1256 Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args,
1257 int vtable_index, ciMethod* target, ValueStack* state_before);
1258
1259 // accessors
1260 Bytecodes::Code code() const { return _code; }
1261 Value receiver() const { return _recv; }
1262 bool has_receiver() const { return receiver() != NULL; }
1263 int number_of_arguments() const { return _args->length(); }
1264 Value argument_at(int i) const { return _args->at(i); }
1265 int vtable_index() const { return _vtable_index; }
1266 BasicTypeList* signature() const { return _signature; }
1267 ciMethod* target() const { return _target; }
1268
1269 ciType* declared_type() const;
1270
1271 // Returns false if target is not loaded
1272 bool target_is_final() const { return check_flag(TargetIsFinalFlag); }
1273 bool target_is_loaded() const { return check_flag(TargetIsLoadedFlag); }
1274 // Returns false if target is not loaded
1275 bool target_is_strictfp() const { return check_flag(TargetIsStrictfpFlag); }
1276
1277 // JSR 292 support
1278 bool is_invokedynamic() const { return code() == Bytecodes::_invokedynamic; }
1279 bool is_method_handle_intrinsic() const { return target()->is_method_handle_intrinsic(); }
1280
1281 virtual bool needs_exception_state() const { return false; }
1282
1283 // generic
1284 virtual bool can_trap() const { return true; }
1285 virtual void input_values_do(ValueVisitor* f) {
1286 StateSplit::input_values_do(f);
1287 if (has_receiver()) f->visit(&_recv);
1288 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i));
1289 }
1290 virtual void state_values_do(ValueVisitor *f);
1291};
1292
1293
1294LEAF(NewInstance, StateSplit)
1295 private:
1296 ciInstanceKlass* _klass;
1297 bool _is_unresolved;
1298
1299 public:
1300 // creation
1301 NewInstance(ciInstanceKlass* klass, ValueStack* state_before, bool is_unresolved)
1302 : StateSplit(instanceType, state_before)
1303 , _klass(klass), _is_unresolved(is_unresolved)
1304 {}
1305
1306 // accessors
1307 ciInstanceKlass* klass() const { return _klass; }
1308 bool is_unresolved() const { return _is_unresolved; }
1309
1310 virtual bool needs_exception_state() const { return false; }
1311
1312 // generic
1313 virtual bool can_trap() const { return true; }
1314 ciType* exact_type() const;
1315 ciType* declared_type() const;
1316};
1317
1318
1319BASE(NewArray, StateSplit)
1320 private:
1321 Value _length;
1322
1323 public:
1324 // creation
1325 NewArray(Value length, ValueStack* state_before)
1326 : StateSplit(objectType, state_before)
1327 , _length(length)
1328 {
1329 // Do not ASSERT_VALUES since length is NULL for NewMultiArray
1330 }
1331
1332 // accessors
1333 Value length() const { return _length; }
1334
1335 virtual bool needs_exception_state() const { return false; }
1336
1337 ciType* exact_type() const { return NULL; }
1338 ciType* declared_type() const;
1339
1340 // generic
1341 virtual bool can_trap() const { return true; }
1342 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); }
1343};
1344
1345
1346LEAF(NewTypeArray, NewArray)
1347 private:
1348 BasicType _elt_type;
1349
1350 public:
1351 // creation
1352 NewTypeArray(Value length, BasicType elt_type, ValueStack* state_before)
1353 : NewArray(length, state_before)
1354 , _elt_type(elt_type)
1355 {}
1356
1357 // accessors
1358 BasicType elt_type() const { return _elt_type; }
1359 ciType* exact_type() const;
1360};
1361
1362
1363LEAF(NewObjectArray, NewArray)
1364 private:
1365 ciKlass* _klass;
1366
1367 public:
1368 // creation
1369 NewObjectArray(ciKlass* klass, Value length, ValueStack* state_before) : NewArray(length, state_before), _klass(klass) {}
1370
1371 // accessors
1372 ciKlass* klass() const { return _klass; }
1373 ciType* exact_type() const;
1374};
1375
1376
1377LEAF(NewMultiArray, NewArray)
1378 private:
1379 ciKlass* _klass;
1380 Values* _dims;
1381
1382 public:
1383 // creation
1384 NewMultiArray(ciKlass* klass, Values* dims, ValueStack* state_before) : NewArray(NULL, state_before), _klass(klass), _dims(dims) {
1385 ASSERT_VALUES
1386 }
1387
1388 // accessors
1389 ciKlass* klass() const { return _klass; }
1390 Values* dims() const { return _dims; }
1391 int rank() const { return dims()->length(); }
1392
1393 // generic
1394 virtual void input_values_do(ValueVisitor* f) {
1395 // NOTE: we do not call NewArray::input_values_do since "length"
1396 // is meaningless for a multi-dimensional array; passing the
1397 // zeroth element down to NewArray as its length is a bad idea
1398 // since there will be a copy in the "dims" array which doesn't
1399 // get updated, and the value must not be traversed twice. Was bug
1400 // - kbr 4/10/2001
1401 StateSplit::input_values_do(f);
1402 for (int i = 0; i < _dims->length(); i++) f->visit(_dims->adr_at(i));
1403 }
1404};
1405
1406
1407BASE(TypeCheck, StateSplit)
1408 private:
1409 ciKlass* _klass;
1410 Value _obj;
1411
1412 ciMethod* _profiled_method;
1413 int _profiled_bci;
1414
1415 public:
1416 // creation
1417 TypeCheck(ciKlass* klass, Value obj, ValueType* type, ValueStack* state_before)
1418 : StateSplit(type, state_before), _klass(klass), _obj(obj),
1419 _profiled_method(NULL), _profiled_bci(0) {
1420 ASSERT_VALUES
1421 set_direct_compare(false);
1422 }
1423
1424 // accessors
1425 ciKlass* klass() const { return _klass; }
1426 Value obj() const { return _obj; }
1427 bool is_loaded() const { return klass() != NULL; }
1428 bool direct_compare() const { return check_flag(DirectCompareFlag); }
1429
1430 // manipulation
1431 void set_direct_compare(bool flag) { set_flag(DirectCompareFlag, flag); }
1432
1433 // generic
1434 virtual bool can_trap() const { return true; }
1435 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); }
1436
1437 // Helpers for MethodData* profiling
1438 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); }
1439 void set_profiled_method(ciMethod* method) { _profiled_method = method; }
1440 void set_profiled_bci(int bci) { _profiled_bci = bci; }
1441 bool should_profile() const { return check_flag(ProfileMDOFlag); }
1442 ciMethod* profiled_method() const { return _profiled_method; }
1443 int profiled_bci() const { return _profiled_bci; }
1444};
1445
1446
1447LEAF(CheckCast, TypeCheck)
1448 public:
1449 // creation
1450 CheckCast(ciKlass* klass, Value obj, ValueStack* state_before)
1451 : TypeCheck(klass, obj, objectType, state_before) {}
1452
1453 void set_incompatible_class_change_check() {
1454 set_flag(ThrowIncompatibleClassChangeErrorFlag, true);
1455 }
1456 bool is_incompatible_class_change_check() const {
1457 return check_flag(ThrowIncompatibleClassChangeErrorFlag);
1458 }
1459 void set_invokespecial_receiver_check() {
1460 set_flag(InvokeSpecialReceiverCheckFlag, true);
1461 }
1462 bool is_invokespecial_receiver_check() const {
1463 return check_flag(InvokeSpecialReceiverCheckFlag);
1464 }
1465
1466 virtual bool needs_exception_state() const {
1467 return !is_invokespecial_receiver_check();
1468 }
1469
1470 ciType* declared_type() const;
1471};
1472
1473
1474LEAF(InstanceOf, TypeCheck)
1475 public:
1476 // creation
1477 InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {}
1478
1479 virtual bool needs_exception_state() const { return false; }
1480};
1481
1482
1483BASE(AccessMonitor, StateSplit)
1484 private:
1485 Value _obj;
1486 int _monitor_no;
1487
1488 public:
1489 // creation
1490 AccessMonitor(Value obj, int monitor_no, ValueStack* state_before = NULL)
1491 : StateSplit(illegalType, state_before)
1492 , _obj(obj)
1493 , _monitor_no(monitor_no)
1494 {
1495 set_needs_null_check(true);
1496 ASSERT_VALUES
1497 }
1498
1499 // accessors
1500 Value obj() const { return _obj; }
1501 int monitor_no() const { return _monitor_no; }
1502
1503 // generic
1504 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); }
1505};
1506
1507
1508LEAF(MonitorEnter, AccessMonitor)
1509 public:
1510 // creation
1511 MonitorEnter(Value obj, int monitor_no, ValueStack* state_before)
1512 : AccessMonitor(obj, monitor_no, state_before)
1513 {
1514 ASSERT_VALUES
1515 }
1516
1517 // generic
1518 virtual bool can_trap() const { return true; }
1519};
1520
1521
1522LEAF(MonitorExit, AccessMonitor)
1523 public:
1524 // creation
1525 MonitorExit(Value obj, int monitor_no)
1526 : AccessMonitor(obj, monitor_no, NULL)
1527 {
1528 ASSERT_VALUES
1529 }
1530};
1531
1532
1533LEAF(Intrinsic, StateSplit)
1534 private:
1535 vmIntrinsics::ID _id;
1536 Values* _args;
1537 Value _recv;
1538 ArgsNonNullState _nonnull_state;
1539
1540 public:
1541 // preserves_state can be set to true for Intrinsics
1542 // which are guaranteed to preserve register state across any slow
1543 // cases; setting it to true does not mean that the Intrinsic can
1544 // not trap, only that if we continue execution in the same basic
1545 // block after the Intrinsic, all of the registers are intact. This
1546 // allows load elimination and common expression elimination to be
1547 // performed across the Intrinsic. The default value is false.
1548 Intrinsic(ValueType* type,
1549 vmIntrinsics::ID id,
1550 Values* args,
1551 bool has_receiver,
1552 ValueStack* state_before,
1553 bool preserves_state,
1554 bool cantrap = true)
1555 : StateSplit(type, state_before)
1556 , _id(id)
1557 , _args(args)
1558 , _recv(NULL)
1559 {
1560 assert(args != NULL, "args must exist");
1561 ASSERT_VALUES
1562 set_flag(PreservesStateFlag, preserves_state);
1563 set_flag(CanTrapFlag, cantrap);
1564 if (has_receiver) {
1565 _recv = argument_at(0);
1566 }
1567 set_needs_null_check(has_receiver);
1568
1569 // some intrinsics can't trap, so don't force them to be pinned
1570 if (!can_trap() && !vmIntrinsics::should_be_pinned(_id)) {
1571 unpin(PinStateSplitConstructor);
1572 }
1573 }
1574
1575 // accessors
1576 vmIntrinsics::ID id() const { return _id; }
1577 int number_of_arguments() const { return _args->length(); }
1578 Value argument_at(int i) const { return _args->at(i); }
1579
1580 bool has_receiver() const { return (_recv != NULL); }
1581 Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; }
1582 bool preserves_state() const { return check_flag(PreservesStateFlag); }
1583
1584 bool arg_needs_null_check(int i) const {
1585 return _nonnull_state.arg_needs_null_check(i);
1586 }
1587
1588 void set_arg_needs_null_check(int i, bool check) {
1589 _nonnull_state.set_arg_needs_null_check(i, check);
1590 }
1591
1592 // generic
1593 virtual bool can_trap() const { return check_flag(CanTrapFlag); }
1594 virtual void input_values_do(ValueVisitor* f) {
1595 StateSplit::input_values_do(f);
1596 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i));
1597 }
1598};
1599
1600
1601class LIR_List;
1602
1603LEAF(BlockBegin, StateSplit)
1604 private:
1605 int _block_id; // the unique block id
1606 int _bci; // start-bci of block
1607 int _depth_first_number; // number of this block in a depth-first ordering
1608 int _linear_scan_number; // number of this block in linear-scan ordering
1609 int _dominator_depth;
1610 int _loop_depth; // the loop nesting level of this block
1611 int _loop_index; // number of the innermost loop of this block
1612 int _flags; // the flags associated with this block
1613
1614 // fields used by BlockListBuilder
1615 int _total_preds; // number of predecessors found by BlockListBuilder
1616 ResourceBitMap _stores_to_locals; // bit is set when a local variable is stored in the block
1617
1618 // SSA specific fields: (factor out later)
1619 BlockList _successors; // the successors of this block
1620 BlockList _predecessors; // the predecessors of this block
1621 BlockList _dominates; // list of blocks that are dominated by this block
1622 BlockBegin* _dominator; // the dominator of this block
1623 // SSA specific ends
1624 BlockEnd* _end; // the last instruction of this block
1625 BlockList _exception_handlers; // the exception handlers potentially invoked by this block
1626 ValueStackStack* _exception_states; // only for xhandler entries: states of all instructions that have an edge to this xhandler
1627 int _exception_handler_pco; // if this block is the start of an exception handler,
1628 // this records the PC offset in the assembly code of the
1629 // first instruction in this block
1630 Label _label; // the label associated with this block
1631 LIR_List* _lir; // the low level intermediate representation for this block
1632
1633 ResourceBitMap _live_in; // set of live LIR_Opr registers at entry to this block
1634 ResourceBitMap _live_out; // set of live LIR_Opr registers at exit from this block
1635 ResourceBitMap _live_gen; // set of registers used before any redefinition in this block
1636 ResourceBitMap _live_kill; // set of registers defined in this block
1637
1638 ResourceBitMap _fpu_register_usage;
1639 intArray* _fpu_stack_state; // For x86 FPU code generation with UseLinearScan
1640 int _first_lir_instruction_id; // ID of first LIR instruction in this block
1641 int _last_lir_instruction_id; // ID of last LIR instruction in this block
1642
1643 void iterate_preorder (boolArray& mark, BlockClosure* closure);
1644 void iterate_postorder(boolArray& mark, BlockClosure* closure);
1645
1646 friend class SuxAndWeightAdjuster;
1647
1648 public:
1649 void* operator new(size_t size) throw() {
1650 Compilation* c = Compilation::current();
1651 void* res = c->arena()->Amalloc(size);
1652 ((BlockBegin*)res)->_id = c->get_next_id();
1653 ((BlockBegin*)res)->_block_id = c->get_next_block_id();
1654 return res;
1655 }
1656
1657 // initialization/counting
1658 static int number_of_blocks() {
1659 return Compilation::current()->number_of_blocks();
1660 }
1661
1662 // creation
1663 BlockBegin(int bci)
1664 : StateSplit(illegalType)
1665 , _bci(bci)
1666 , _depth_first_number(-1)
1667 , _linear_scan_number(-1)
1668 , _dominator_depth(-1)
1669 , _loop_depth(0)
1670 , _loop_index(-1)
1671 , _flags(0)
1672 , _total_preds(0)
1673 , _stores_to_locals()
1674 , _successors(2)
1675 , _predecessors(2)
1676 , _dominates(2)
1677 , _dominator(NULL)
1678 , _end(NULL)
1679 , _exception_handlers(1)
1680 , _exception_states(NULL)
1681 , _exception_handler_pco(-1)
1682 , _lir(NULL)
1683 , _live_in()
1684 , _live_out()
1685 , _live_gen()
1686 , _live_kill()
1687 , _fpu_register_usage()
1688 , _fpu_stack_state(NULL)
1689 , _first_lir_instruction_id(-1)
1690 , _last_lir_instruction_id(-1)
1691 {
1692 _block = this;
1693#ifndef PRODUCT
1694 set_printable_bci(bci);
1695#endif
1696 }
1697
1698 // accessors
1699 int block_id() const { return _block_id; }
1700 int bci() const { return _bci; }
1701 BlockList* successors() { return &_successors; }
1702 BlockList* dominates() { return &_dominates; }
1703 BlockBegin* dominator() const { return _dominator; }
1704 int loop_depth() const { return _loop_depth; }
1705 int dominator_depth() const { return _dominator_depth; }
1706 int depth_first_number() const { return _depth_first_number; }
1707 int linear_scan_number() const { return _linear_scan_number; }
1708 BlockEnd* end() const { return _end; }
1709 Label* label() { return &_label; }
1710 LIR_List* lir() const { return _lir; }
1711 int exception_handler_pco() const { return _exception_handler_pco; }
1712 ResourceBitMap& live_in() { return _live_in; }
1713 ResourceBitMap& live_out() { return _live_out; }
1714 ResourceBitMap& live_gen() { return _live_gen; }
1715 ResourceBitMap& live_kill() { return _live_kill; }
1716 ResourceBitMap& fpu_register_usage() { return _fpu_register_usage; }
1717 intArray* fpu_stack_state() const { return _fpu_stack_state; }
1718 int first_lir_instruction_id() const { return _first_lir_instruction_id; }
1719 int last_lir_instruction_id() const { return _last_lir_instruction_id; }
1720 int total_preds() const { return _total_preds; }
1721 BitMap& stores_to_locals() { return _stores_to_locals; }
1722
1723 // manipulation
1724 void set_dominator(BlockBegin* dom) { _dominator = dom; }
1725 void set_loop_depth(int d) { _loop_depth = d; }
1726 void set_dominator_depth(int d) { _dominator_depth = d; }
1727 void set_depth_first_number(int dfn) { _depth_first_number = dfn; }
1728 void set_linear_scan_number(int lsn) { _linear_scan_number = lsn; }
1729 void set_end(BlockEnd* end);
1730 void clear_end();
1731 void disconnect_from_graph();
1732 static void disconnect_edge(BlockBegin* from, BlockBegin* to);
1733 BlockBegin* insert_block_between(BlockBegin* sux);
1734 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux);
1735 void set_lir(LIR_List* lir) { _lir = lir; }
1736 void set_exception_handler_pco(int pco) { _exception_handler_pco = pco; }
1737 void set_live_in (const ResourceBitMap& map) { _live_in = map; }
1738 void set_live_out (const ResourceBitMap& map) { _live_out = map; }
1739 void set_live_gen (const ResourceBitMap& map) { _live_gen = map; }
1740 void set_live_kill(const ResourceBitMap& map) { _live_kill = map; }
1741 void set_fpu_register_usage(const ResourceBitMap& map) { _fpu_register_usage = map; }
1742 void set_fpu_stack_state(intArray* state) { _fpu_stack_state = state; }
1743 void set_first_lir_instruction_id(int id) { _first_lir_instruction_id = id; }
1744 void set_last_lir_instruction_id(int id) { _last_lir_instruction_id = id; }
1745 void increment_total_preds(int n = 1) { _total_preds += n; }
1746 void init_stores_to_locals(int locals_count) { _stores_to_locals.initialize(locals_count); }
1747
1748 // generic
1749 virtual void state_values_do(ValueVisitor* f);
1750
1751 // successors and predecessors
1752 int number_of_sux() const;
1753 BlockBegin* sux_at(int i) const;
1754 void add_successor(BlockBegin* sux);
1755 void remove_successor(BlockBegin* pred);
1756 bool is_successor(BlockBegin* sux) const { return _successors.contains(sux); }
1757
1758 void add_predecessor(BlockBegin* pred);
1759 void remove_predecessor(BlockBegin* pred);
1760 bool is_predecessor(BlockBegin* pred) const { return _predecessors.contains(pred); }
1761 int number_of_preds() const { return _predecessors.length(); }
1762 BlockBegin* pred_at(int i) const { return _predecessors.at(i); }
1763
1764 // exception handlers potentially invoked by this block
1765 void add_exception_handler(BlockBegin* b);
1766 bool is_exception_handler(BlockBegin* b) const { return _exception_handlers.contains(b); }
1767 int number_of_exception_handlers() const { return _exception_handlers.length(); }
1768 BlockBegin* exception_handler_at(int i) const { return _exception_handlers.at(i); }
1769
1770 // states of the instructions that have an edge to this exception handler
1771 int number_of_exception_states() { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states == NULL ? 0 : _exception_states->length(); }
1772 ValueStack* exception_state_at(int idx) const { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states->at(idx); }
1773 int add_exception_state(ValueStack* state);
1774
1775 // flags
1776 enum Flag {
1777 no_flag = 0,
1778 std_entry_flag = 1 << 0,
1779 osr_entry_flag = 1 << 1,
1780 exception_entry_flag = 1 << 2,
1781 subroutine_entry_flag = 1 << 3,
1782 backward_branch_target_flag = 1 << 4,
1783 is_on_work_list_flag = 1 << 5,
1784 was_visited_flag = 1 << 6,
1785 parser_loop_header_flag = 1 << 7, // set by parser to identify blocks where phi functions can not be created on demand
1786 critical_edge_split_flag = 1 << 8, // set for all blocks that are introduced when critical edges are split
1787 linear_scan_loop_header_flag = 1 << 9, // set during loop-detection for LinearScan
1788 linear_scan_loop_end_flag = 1 << 10, // set during loop-detection for LinearScan
1789 donot_eliminate_range_checks = 1 << 11 // Should be try to eliminate range checks in this block
1790 };
1791
1792 void set(Flag f) { _flags |= f; }
1793 void clear(Flag f) { _flags &= ~f; }
1794 bool is_set(Flag f) const { return (_flags & f) != 0; }
1795 bool is_entry_block() const {
1796 const int entry_mask = std_entry_flag | osr_entry_flag | exception_entry_flag;
1797 return (_flags & entry_mask) != 0;
1798 }
1799
1800 // iteration
1801 void iterate_preorder (BlockClosure* closure);
1802 void iterate_postorder (BlockClosure* closure);
1803
1804 void block_values_do(ValueVisitor* f);
1805
1806 // loops
1807 void set_loop_index(int ix) { _loop_index = ix; }
1808 int loop_index() const { return _loop_index; }
1809
1810 // merging
1811 bool try_merge(ValueStack* state); // try to merge states at block begin
1812 void merge(ValueStack* state) { bool b = try_merge(state); assert(b, "merge failed"); }
1813
1814 // debugging
1815 void print_block() PRODUCT_RETURN;
1816 void print_block(InstructionPrinter& ip, bool live_only = false) PRODUCT_RETURN;
1817};
1818
1819
1820BASE(BlockEnd, StateSplit)
1821 private:
1822 BlockList* _sux;
1823
1824 protected:
1825 BlockList* sux() const { return _sux; }
1826
1827 void set_sux(BlockList* sux) {
1828#ifdef ASSERT
1829 assert(sux != NULL, "sux must exist");
1830 for (int i = sux->length() - 1; i >= 0; i--) assert(sux->at(i) != NULL, "sux must exist");
1831#endif
1832 _sux = sux;
1833 }
1834
1835 public:
1836 // creation
1837 BlockEnd(ValueType* type, ValueStack* state_before, bool is_safepoint)
1838 : StateSplit(type, state_before)
1839 , _sux(NULL)
1840 {
1841 set_flag(IsSafepointFlag, is_safepoint);
1842 }
1843
1844 // accessors
1845 bool is_safepoint() const { return check_flag(IsSafepointFlag); }
1846 // For compatibility with old code, for new code use block()
1847 BlockBegin* begin() const { return _block; }
1848
1849 // manipulation
1850 void set_begin(BlockBegin* begin);
1851
1852 // successors
1853 int number_of_sux() const { return _sux != NULL ? _sux->length() : 0; }
1854 BlockBegin* sux_at(int i) const { return _sux->at(i); }
1855 BlockBegin* default_sux() const { return sux_at(number_of_sux() - 1); }
1856 BlockBegin** addr_sux_at(int i) const { return _sux->adr_at(i); }
1857 int sux_index(BlockBegin* sux) const { return _sux->find(sux); }
1858 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux);
1859};
1860
1861
1862LEAF(Goto, BlockEnd)
1863 public:
1864 enum Direction {
1865 none, // Just a regular goto
1866 taken, not_taken // Goto produced from If
1867 };
1868 private:
1869 ciMethod* _profiled_method;
1870 int _profiled_bci;
1871 Direction _direction;
1872 public:
1873 // creation
1874 Goto(BlockBegin* sux, ValueStack* state_before, bool is_safepoint = false)
1875 : BlockEnd(illegalType, state_before, is_safepoint)
1876 , _profiled_method(NULL)
1877 , _profiled_bci(0)
1878 , _direction(none) {
1879 BlockList* s = new BlockList(1);
1880 s->append(sux);
1881 set_sux(s);
1882 }
1883
1884 Goto(BlockBegin* sux, bool is_safepoint) : BlockEnd(illegalType, NULL, is_safepoint)
1885 , _profiled_method(NULL)
1886 , _profiled_bci(0)
1887 , _direction(none) {
1888 BlockList* s = new BlockList(1);
1889 s->append(sux);
1890 set_sux(s);
1891 }
1892
1893 bool should_profile() const { return check_flag(ProfileMDOFlag); }
1894 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches
1895 int profiled_bci() const { return _profiled_bci; }
1896 Direction direction() const { return _direction; }
1897
1898 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); }
1899 void set_profiled_method(ciMethod* method) { _profiled_method = method; }
1900 void set_profiled_bci(int bci) { _profiled_bci = bci; }
1901 void set_direction(Direction d) { _direction = d; }
1902};
1903
1904#ifdef ASSERT
1905LEAF(Assert, Instruction)
1906 private:
1907 Value _x;
1908 Condition _cond;
1909 Value _y;
1910 char *_message;
1911
1912 public:
1913 // creation
1914 // unordered_is_true is valid for float/double compares only
1915 Assert(Value x, Condition cond, bool unordered_is_true, Value y);
1916
1917 // accessors
1918 Value x() const { return _x; }
1919 Condition cond() const { return _cond; }
1920 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); }
1921 Value y() const { return _y; }
1922 const char *message() const { return _message; }
1923
1924 // generic
1925 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); }
1926};
1927#endif
1928
1929LEAF(RangeCheckPredicate, StateSplit)
1930 private:
1931 Value _x;
1932 Condition _cond;
1933 Value _y;
1934
1935 void check_state();
1936
1937 public:
1938 // creation
1939 // unordered_is_true is valid for float/double compares only
1940 RangeCheckPredicate(Value x, Condition cond, bool unordered_is_true, Value y, ValueStack* state) : StateSplit(illegalType)
1941 , _x(x)
1942 , _cond(cond)
1943 , _y(y)
1944 {
1945 ASSERT_VALUES
1946 set_flag(UnorderedIsTrueFlag, unordered_is_true);
1947 assert(x->type()->tag() == y->type()->tag(), "types must match");
1948 this->set_state(state);
1949 check_state();
1950 }
1951
1952 // Always deoptimize
1953 RangeCheckPredicate(ValueStack* state) : StateSplit(illegalType)
1954 {
1955 this->set_state(state);
1956 _x = _y = NULL;
1957 check_state();
1958 }
1959
1960 // accessors
1961 Value x() const { return _x; }
1962 Condition cond() const { return _cond; }
1963 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); }
1964 Value y() const { return _y; }
1965
1966 void always_fail() { _x = _y = NULL; }
1967
1968 // generic
1969 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_x); f->visit(&_y); }
1970 HASHING3(RangeCheckPredicate, true, x()->subst(), y()->subst(), cond())
1971};
1972
1973LEAF(If, BlockEnd)
1974 private:
1975 Value _x;
1976 Condition _cond;
1977 Value _y;
1978 ciMethod* _profiled_method;
1979 int _profiled_bci; // Canonicalizer may alter bci of If node
1980 bool _swapped; // Is the order reversed with respect to the original If in the
1981 // bytecode stream?
1982 public:
1983 // creation
1984 // unordered_is_true is valid for float/double compares only
1985 If(Value x, Condition cond, bool unordered_is_true, Value y, BlockBegin* tsux, BlockBegin* fsux, ValueStack* state_before, bool is_safepoint)
1986 : BlockEnd(illegalType, state_before, is_safepoint)
1987 , _x(x)
1988 , _cond(cond)
1989 , _y(y)
1990 , _profiled_method(NULL)
1991 , _profiled_bci(0)
1992 , _swapped(false)
1993 {
1994 ASSERT_VALUES
1995 set_flag(UnorderedIsTrueFlag, unordered_is_true);
1996 assert(x->type()->tag() == y->type()->tag(), "types must match");
1997 BlockList* s = new BlockList(2);
1998 s->append(tsux);
1999 s->append(fsux);
2000 set_sux(s);
2001 }
2002
2003 // accessors
2004 Value x() const { return _x; }
2005 Condition cond() const { return _cond; }
2006 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); }
2007 Value y() const { return _y; }
2008 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); }
2009 BlockBegin* tsux() const { return sux_for(true); }
2010 BlockBegin* fsux() const { return sux_for(false); }
2011 BlockBegin* usux() const { return sux_for(unordered_is_true()); }
2012 bool should_profile() const { return check_flag(ProfileMDOFlag); }
2013 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches
2014 int profiled_bci() const { return _profiled_bci; } // set for profiled branches and tiered
2015 bool is_swapped() const { return _swapped; }
2016
2017 // manipulation
2018 void swap_operands() {
2019 Value t = _x; _x = _y; _y = t;
2020 _cond = mirror(_cond);
2021 }
2022
2023 void swap_sux() {
2024 assert(number_of_sux() == 2, "wrong number of successors");
2025 BlockList* s = sux();
2026 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t);
2027 _cond = negate(_cond);
2028 set_flag(UnorderedIsTrueFlag, !check_flag(UnorderedIsTrueFlag));
2029 }
2030
2031 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); }
2032 void set_profiled_method(ciMethod* method) { _profiled_method = method; }
2033 void set_profiled_bci(int bci) { _profiled_bci = bci; }
2034 void set_swapped(bool value) { _swapped = value; }
2035 // generic
2036 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_x); f->visit(&_y); }
2037};
2038
2039
2040LEAF(IfInstanceOf, BlockEnd)
2041 private:
2042 ciKlass* _klass;
2043 Value _obj;
2044 bool _test_is_instance; // jump if instance
2045 int _instanceof_bci;
2046
2047 public:
2048 IfInstanceOf(ciKlass* klass, Value obj, bool test_is_instance, int instanceof_bci, BlockBegin* tsux, BlockBegin* fsux)
2049 : BlockEnd(illegalType, NULL, false) // temporary set to false
2050 , _klass(klass)
2051 , _obj(obj)
2052 , _test_is_instance(test_is_instance)
2053 , _instanceof_bci(instanceof_bci)
2054 {
2055 ASSERT_VALUES
2056 assert(instanceof_bci >= 0, "illegal bci");
2057 BlockList* s = new BlockList(2);
2058 s->append(tsux);
2059 s->append(fsux);
2060 set_sux(s);
2061 }
2062
2063 // accessors
2064 //
2065 // Note 1: If test_is_instance() is true, IfInstanceOf tests if obj *is* an
2066 // instance of klass; otherwise it tests if it is *not* and instance
2067 // of klass.
2068 //
2069 // Note 2: IfInstanceOf instructions are created by combining an InstanceOf
2070 // and an If instruction. The IfInstanceOf bci() corresponds to the
2071 // bci that the If would have had; the (this->) instanceof_bci() is
2072 // the bci of the original InstanceOf instruction.
2073 ciKlass* klass() const { return _klass; }
2074 Value obj() const { return _obj; }
2075 int instanceof_bci() const { return _instanceof_bci; }
2076 bool test_is_instance() const { return _test_is_instance; }
2077 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); }
2078 BlockBegin* tsux() const { return sux_for(true); }
2079 BlockBegin* fsux() const { return sux_for(false); }
2080
2081 // manipulation
2082 void swap_sux() {
2083 assert(number_of_sux() == 2, "wrong number of successors");
2084 BlockList* s = sux();
2085 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t);
2086 _test_is_instance = !_test_is_instance;
2087 }
2088
2089 // generic
2090 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_obj); }
2091};
2092
2093
2094BASE(Switch, BlockEnd)
2095 private:
2096 Value _tag;
2097
2098 public:
2099 // creation
2100 Switch(Value tag, BlockList* sux, ValueStack* state_before, bool is_safepoint)
2101 : BlockEnd(illegalType, state_before, is_safepoint)
2102 , _tag(tag) {
2103 ASSERT_VALUES
2104 set_sux(sux);
2105 }
2106
2107 // accessors
2108 Value tag() const { return _tag; }
2109 int length() const { return number_of_sux() - 1; }
2110
2111 virtual bool needs_exception_state() const { return false; }
2112
2113 // generic
2114 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_tag); }
2115};
2116
2117
2118LEAF(TableSwitch, Switch)
2119 private:
2120 int _lo_key;
2121
2122 public:
2123 // creation
2124 TableSwitch(Value tag, BlockList* sux, int lo_key, ValueStack* state_before, bool is_safepoint)
2125 : Switch(tag, sux, state_before, is_safepoint)
2126 , _lo_key(lo_key) { assert(_lo_key <= hi_key(), "integer overflow"); }
2127
2128 // accessors
2129 int lo_key() const { return _lo_key; }
2130 int hi_key() const { return _lo_key + (length() - 1); }
2131};
2132
2133
2134LEAF(LookupSwitch, Switch)
2135 private:
2136 intArray* _keys;
2137
2138 public:
2139 // creation
2140 LookupSwitch(Value tag, BlockList* sux, intArray* keys, ValueStack* state_before, bool is_safepoint)
2141 : Switch(tag, sux, state_before, is_safepoint)
2142 , _keys(keys) {
2143 assert(keys != NULL, "keys must exist");
2144 assert(keys->length() == length(), "sux & keys have incompatible lengths");
2145 }
2146
2147 // accessors
2148 int key_at(int i) const { return _keys->at(i); }
2149};
2150
2151
2152LEAF(Return, BlockEnd)
2153 private:
2154 Value _result;
2155
2156 public:
2157 // creation
2158 Return(Value result) :
2159 BlockEnd(result == NULL ? voidType : result->type()->base(), NULL, true),
2160 _result(result) {}
2161
2162 // accessors
2163 Value result() const { return _result; }
2164 bool has_result() const { return result() != NULL; }
2165
2166 // generic
2167 virtual void input_values_do(ValueVisitor* f) {
2168 BlockEnd::input_values_do(f);
2169 if (has_result()) f->visit(&_result);
2170 }
2171};
2172
2173
2174LEAF(Throw, BlockEnd)
2175 private:
2176 Value _exception;
2177
2178 public:
2179 // creation
2180 Throw(Value exception, ValueStack* state_before) : BlockEnd(illegalType, state_before, true), _exception(exception) {
2181 ASSERT_VALUES
2182 }
2183
2184 // accessors
2185 Value exception() const { return _exception; }
2186
2187 // generic
2188 virtual bool can_trap() const { return true; }
2189 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_exception); }
2190};
2191
2192
2193LEAF(Base, BlockEnd)
2194 public:
2195 // creation
2196 Base(BlockBegin* std_entry, BlockBegin* osr_entry) : BlockEnd(illegalType, NULL, false) {
2197 assert(std_entry->is_set(BlockBegin::std_entry_flag), "std entry must be flagged");
2198 assert(osr_entry == NULL || osr_entry->is_set(BlockBegin::osr_entry_flag), "osr entry must be flagged");
2199 BlockList* s = new BlockList(2);
2200 if (osr_entry != NULL) s->append(osr_entry);
2201 s->append(std_entry); // must be default sux!
2202 set_sux(s);
2203 }
2204
2205 // accessors
2206 BlockBegin* std_entry() const { return default_sux(); }
2207 BlockBegin* osr_entry() const { return number_of_sux() < 2 ? NULL : sux_at(0); }
2208};
2209
2210
2211LEAF(OsrEntry, Instruction)
2212 public:
2213 // creation
2214#ifdef _LP64
2215 OsrEntry() : Instruction(longType) { pin(); }
2216#else
2217 OsrEntry() : Instruction(intType) { pin(); }
2218#endif
2219
2220 // generic
2221 virtual void input_values_do(ValueVisitor* f) { }
2222};
2223
2224
2225// Models the incoming exception at a catch site
2226LEAF(ExceptionObject, Instruction)
2227 public:
2228 // creation
2229 ExceptionObject() : Instruction(objectType) {
2230 pin();
2231 }
2232
2233 // generic
2234 virtual void input_values_do(ValueVisitor* f) { }
2235};
2236
2237
2238// Models needed rounding for floating-point values on Intel.
2239// Currently only used to represent rounding of double-precision
2240// values stored into local variables, but could be used to model
2241// intermediate rounding of single-precision values as well.
2242LEAF(RoundFP, Instruction)
2243 private:
2244 Value _input; // floating-point value to be rounded
2245
2246 public:
2247 RoundFP(Value input)
2248 : Instruction(input->type()) // Note: should not be used for constants
2249 , _input(input)
2250 {
2251 ASSERT_VALUES
2252 }
2253
2254 // accessors
2255 Value input() const { return _input; }
2256
2257 // generic
2258 virtual void input_values_do(ValueVisitor* f) { f->visit(&_input); }
2259};
2260
2261
2262BASE(UnsafeOp, Instruction)
2263 private:
2264 BasicType _basic_type; // ValueType can not express byte-sized integers
2265
2266 protected:
2267 // creation
2268 UnsafeOp(BasicType basic_type, bool is_put)
2269 : Instruction(is_put ? voidType : as_ValueType(basic_type))
2270 , _basic_type(basic_type)
2271 {
2272 //Note: Unsafe ops are not not guaranteed to throw NPE.
2273 // Convservatively, Unsafe operations must be pinned though we could be
2274 // looser about this if we wanted to..
2275 pin();
2276 }
2277
2278 public:
2279 // accessors
2280 BasicType basic_type() { return _basic_type; }
2281
2282 // generic
2283 virtual void input_values_do(ValueVisitor* f) { }
2284};
2285
2286
2287BASE(UnsafeRawOp, UnsafeOp)
2288 private:
2289 Value _base; // Base address (a Java long)
2290 Value _index; // Index if computed by optimizer; initialized to NULL
2291 int _log2_scale; // Scale factor: 0, 1, 2, or 3.
2292 // Indicates log2 of number of bytes (1, 2, 4, or 8)
2293 // to scale index by.
2294
2295 protected:
2296 UnsafeRawOp(BasicType basic_type, Value addr, bool is_put)
2297 : UnsafeOp(basic_type, is_put)
2298 , _base(addr)
2299 , _index(NULL)
2300 , _log2_scale(0)
2301 {
2302 // Can not use ASSERT_VALUES because index may be NULL
2303 assert(addr != NULL && addr->type()->is_long(), "just checking");
2304 }
2305
2306 UnsafeRawOp(BasicType basic_type, Value base, Value index, int log2_scale, bool is_put)
2307 : UnsafeOp(basic_type, is_put)
2308 , _base(base)
2309 , _index(index)
2310 , _log2_scale(log2_scale)
2311 {
2312 }
2313
2314 public:
2315 // accessors
2316 Value base() { return _base; }
2317 Value index() { return _index; }
2318 bool has_index() { return (_index != NULL); }
2319 int log2_scale() { return _log2_scale; }
2320
2321 // setters
2322 void set_base (Value base) { _base = base; }
2323 void set_index(Value index) { _index = index; }
2324 void set_log2_scale(int log2_scale) { _log2_scale = log2_scale; }
2325
2326 // generic
2327 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f);
2328 f->visit(&_base);
2329 if (has_index()) f->visit(&_index); }
2330};
2331
2332
2333LEAF(UnsafeGetRaw, UnsafeRawOp)
2334 private:
2335 bool _may_be_unaligned, _is_wide; // For OSREntry
2336
2337 public:
2338 UnsafeGetRaw(BasicType basic_type, Value addr, bool may_be_unaligned, bool is_wide = false)
2339 : UnsafeRawOp(basic_type, addr, false) {
2340 _may_be_unaligned = may_be_unaligned;
2341 _is_wide = is_wide;
2342 }
2343
2344 UnsafeGetRaw(BasicType basic_type, Value base, Value index, int log2_scale, bool may_be_unaligned, bool is_wide = false)
2345 : UnsafeRawOp(basic_type, base, index, log2_scale, false) {
2346 _may_be_unaligned = may_be_unaligned;
2347 _is_wide = is_wide;
2348 }
2349
2350 bool may_be_unaligned() { return _may_be_unaligned; }
2351 bool is_wide() { return _is_wide; }
2352};
2353
2354
2355LEAF(UnsafePutRaw, UnsafeRawOp)
2356 private:
2357 Value _value; // Value to be stored
2358
2359 public:
2360 UnsafePutRaw(BasicType basic_type, Value addr, Value value)
2361 : UnsafeRawOp(basic_type, addr, true)
2362 , _value(value)
2363 {
2364 assert(value != NULL, "just checking");
2365 ASSERT_VALUES
2366 }
2367
2368 UnsafePutRaw(BasicType basic_type, Value base, Value index, int log2_scale, Value value)
2369 : UnsafeRawOp(basic_type, base, index, log2_scale, true)
2370 , _value(value)
2371 {
2372 assert(value != NULL, "just checking");
2373 ASSERT_VALUES
2374 }
2375
2376 // accessors
2377 Value value() { return _value; }
2378
2379 // generic
2380 virtual void input_values_do(ValueVisitor* f) { UnsafeRawOp::input_values_do(f);
2381 f->visit(&_value); }
2382};
2383
2384
2385BASE(UnsafeObjectOp, UnsafeOp)
2386 private:
2387 Value _object; // Object to be fetched from or mutated
2388 Value _offset; // Offset within object
2389 bool _is_volatile; // true if volatile - dl/JSR166
2390 public:
2391 UnsafeObjectOp(BasicType basic_type, Value object, Value offset, bool is_put, bool is_volatile)
2392 : UnsafeOp(basic_type, is_put), _object(object), _offset(offset), _is_volatile(is_volatile)
2393 {
2394 }
2395
2396 // accessors
2397 Value object() { return _object; }
2398 Value offset() { return _offset; }
2399 bool is_volatile() { return _is_volatile; }
2400 // generic
2401 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f);
2402 f->visit(&_object);
2403 f->visit(&_offset); }
2404};
2405
2406
2407LEAF(UnsafeGetObject, UnsafeObjectOp)
2408 public:
2409 UnsafeGetObject(BasicType basic_type, Value object, Value offset, bool is_volatile)
2410 : UnsafeObjectOp(basic_type, object, offset, false, is_volatile)
2411 {
2412 ASSERT_VALUES
2413 }
2414};
2415
2416
2417LEAF(UnsafePutObject, UnsafeObjectOp)
2418 private:
2419 Value _value; // Value to be stored
2420 public:
2421 UnsafePutObject(BasicType basic_type, Value object, Value offset, Value value, bool is_volatile)
2422 : UnsafeObjectOp(basic_type, object, offset, true, is_volatile)
2423 , _value(value)
2424 {
2425 ASSERT_VALUES
2426 }
2427
2428 // accessors
2429 Value value() { return _value; }
2430
2431 // generic
2432 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f);
2433 f->visit(&_value); }
2434};
2435
2436LEAF(UnsafeGetAndSetObject, UnsafeObjectOp)
2437 private:
2438 Value _value; // Value to be stored
2439 bool _is_add;
2440 public:
2441 UnsafeGetAndSetObject(BasicType basic_type, Value object, Value offset, Value value, bool is_add)
2442 : UnsafeObjectOp(basic_type, object, offset, false, false)
2443 , _value(value)
2444 , _is_add(is_add)
2445 {
2446 ASSERT_VALUES
2447 }
2448
2449 // accessors
2450 bool is_add() const { return _is_add; }
2451 Value value() { return _value; }
2452
2453 // generic
2454 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f);
2455 f->visit(&_value); }
2456};
2457
2458LEAF(ProfileCall, Instruction)
2459 private:
2460 ciMethod* _method;
2461 int _bci_of_invoke;
2462 ciMethod* _callee; // the method that is called at the given bci
2463 Value _recv;
2464 ciKlass* _known_holder;
2465 Values* _obj_args; // arguments for type profiling
2466 ArgsNonNullState _nonnull_state; // Do we know whether some arguments are never null?
2467 bool _inlined; // Are we profiling a call that is inlined
2468
2469 public:
2470 ProfileCall(ciMethod* method, int bci, ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined)
2471 : Instruction(voidType)
2472 , _method(method)
2473 , _bci_of_invoke(bci)
2474 , _callee(callee)
2475 , _recv(recv)
2476 , _known_holder(known_holder)
2477 , _obj_args(obj_args)
2478 , _inlined(inlined)
2479 {
2480 // The ProfileCall has side-effects and must occur precisely where located
2481 pin();
2482 }
2483
2484 ciMethod* method() const { return _method; }
2485 int bci_of_invoke() const { return _bci_of_invoke; }
2486 ciMethod* callee() const { return _callee; }
2487 Value recv() const { return _recv; }
2488 ciKlass* known_holder() const { return _known_holder; }
2489 int nb_profiled_args() const { return _obj_args == NULL ? 0 : _obj_args->length(); }
2490 Value profiled_arg_at(int i) const { return _obj_args->at(i); }
2491 bool arg_needs_null_check(int i) const {
2492 return _nonnull_state.arg_needs_null_check(i);
2493 }
2494 bool inlined() const { return _inlined; }
2495
2496 void set_arg_needs_null_check(int i, bool check) {
2497 _nonnull_state.set_arg_needs_null_check(i, check);
2498 }
2499
2500 virtual void input_values_do(ValueVisitor* f) {
2501 if (_recv != NULL) {
2502 f->visit(&_recv);
2503 }
2504 for (int i = 0; i < nb_profiled_args(); i++) {
2505 f->visit(_obj_args->adr_at(i));
2506 }
2507 }
2508};
2509
2510LEAF(ProfileReturnType, Instruction)
2511 private:
2512 ciMethod* _method;
2513 ciMethod* _callee;
2514 int _bci_of_invoke;
2515 Value _ret;
2516
2517 public:
2518 ProfileReturnType(ciMethod* method, int bci, ciMethod* callee, Value ret)
2519 : Instruction(voidType)
2520 , _method(method)
2521 , _callee(callee)
2522 , _bci_of_invoke(bci)
2523 , _ret(ret)
2524 {
2525 set_needs_null_check(true);
2526 // The ProfileType has side-effects and must occur precisely where located
2527 pin();
2528 }
2529
2530 ciMethod* method() const { return _method; }
2531 ciMethod* callee() const { return _callee; }
2532 int bci_of_invoke() const { return _bci_of_invoke; }
2533 Value ret() const { return _ret; }
2534
2535 virtual void input_values_do(ValueVisitor* f) {
2536 if (_ret != NULL) {
2537 f->visit(&_ret);
2538 }
2539 }
2540};
2541
2542// Call some C runtime function that doesn't safepoint,
2543// optionally passing the current thread as the first argument.
2544LEAF(RuntimeCall, Instruction)
2545 private:
2546 const char* _entry_name;
2547 address _entry;
2548 Values* _args;
2549 bool _pass_thread; // Pass the JavaThread* as an implicit first argument
2550
2551 public:
2552 RuntimeCall(ValueType* type, const char* entry_name, address entry, Values* args, bool pass_thread = true)
2553 : Instruction(type)
2554 , _entry_name(entry_name)
2555 , _entry(entry)
2556 , _args(args)
2557 , _pass_thread(pass_thread) {
2558 ASSERT_VALUES
2559 pin();
2560 }
2561
2562 const char* entry_name() const { return _entry_name; }
2563 address entry() const { return _entry; }
2564 int number_of_arguments() const { return _args->length(); }
2565 Value argument_at(int i) const { return _args->at(i); }
2566 bool pass_thread() const { return _pass_thread; }
2567
2568 virtual void input_values_do(ValueVisitor* f) {
2569 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i));
2570 }
2571};
2572
2573// Use to trip invocation counter of an inlined method
2574
2575LEAF(ProfileInvoke, Instruction)
2576 private:
2577 ciMethod* _inlinee;
2578 ValueStack* _state;
2579
2580 public:
2581 ProfileInvoke(ciMethod* inlinee, ValueStack* state)
2582 : Instruction(voidType)
2583 , _inlinee(inlinee)
2584 , _state(state)
2585 {
2586 // The ProfileInvoke has side-effects and must occur precisely where located QQQ???
2587 pin();
2588 }
2589
2590 ciMethod* inlinee() { return _inlinee; }
2591 ValueStack* state() { return _state; }
2592 virtual void input_values_do(ValueVisitor*) {}
2593 virtual void state_values_do(ValueVisitor*);
2594};
2595
2596LEAF(MemBar, Instruction)
2597 private:
2598 LIR_Code _code;
2599
2600 public:
2601 MemBar(LIR_Code code)
2602 : Instruction(voidType)
2603 , _code(code)
2604 {
2605 pin();
2606 }
2607
2608 LIR_Code code() { return _code; }
2609
2610 virtual void input_values_do(ValueVisitor*) {}
2611};
2612
2613class BlockPair: public CompilationResourceObj {
2614 private:
2615 BlockBegin* _from;
2616 BlockBegin* _to;
2617 public:
2618 BlockPair(BlockBegin* from, BlockBegin* to): _from(from), _to(to) {}
2619 BlockBegin* from() const { return _from; }
2620 BlockBegin* to() const { return _to; }
2621 bool is_same(BlockBegin* from, BlockBegin* to) const { return _from == from && _to == to; }
2622 bool is_same(BlockPair* p) const { return _from == p->from() && _to == p->to(); }
2623 void set_to(BlockBegin* b) { _to = b; }
2624 void set_from(BlockBegin* b) { _from = b; }
2625};
2626
2627typedef GrowableArray<BlockPair*> BlockPairList;
2628
2629inline int BlockBegin::number_of_sux() const { assert(_end == NULL || _end->number_of_sux() == _successors.length(), "mismatch"); return _successors.length(); }
2630inline BlockBegin* BlockBegin::sux_at(int i) const { assert(_end == NULL || _end->sux_at(i) == _successors.at(i), "mismatch"); return _successors.at(i); }
2631inline void BlockBegin::add_successor(BlockBegin* sux) { assert(_end == NULL, "Would create mismatch with successors of BlockEnd"); _successors.append(sux); }
2632
2633#undef ASSERT_VALUES
2634
2635#endif // SHARE_C1_C1_INSTRUCTION_HPP
2636