1/*
2** Common header for IR emitter and optimizations.
3** Copyright (C) 2005-2021 Mike Pall. See Copyright Notice in luajit.h
4*/
5
6#ifndef _LJ_IROPT_H
7#define _LJ_IROPT_H
8
9#include <stdarg.h>
10
11#include "lj_obj.h"
12#include "lj_jit.h"
13
14#if LJ_HASJIT
15/* IR emitter. */
16LJ_FUNC void LJ_FASTCALL lj_ir_growtop(jit_State *J);
17LJ_FUNC TRef LJ_FASTCALL lj_ir_emit(jit_State *J);
18
19/* Save current IR in J->fold.ins, but do not emit it (yet). */
20static LJ_AINLINE void lj_ir_set_(jit_State *J, uint16_t ot, IRRef1 a, IRRef1 b)
21{
22 J->fold.ins.ot = ot; J->fold.ins.op1 = a; J->fold.ins.op2 = b;
23}
24
25#define lj_ir_set(J, ot, a, b) \
26 lj_ir_set_(J, (uint16_t)(ot), (IRRef1)(a), (IRRef1)(b))
27
28/* Get ref of next IR instruction and optionally grow IR.
29** Note: this may invalidate all IRIns*!
30*/
31static LJ_AINLINE IRRef lj_ir_nextins(jit_State *J)
32{
33 IRRef ref = J->cur.nins;
34 if (LJ_UNLIKELY(ref >= J->irtoplim)) lj_ir_growtop(J);
35 J->cur.nins = ref + 1;
36 return ref;
37}
38
39LJ_FUNC TRef lj_ir_ggfload(jit_State *J, IRType t, uintptr_t ofs);
40
41/* Interning of constants. */
42LJ_FUNC TRef LJ_FASTCALL lj_ir_kint(jit_State *J, int32_t k);
43LJ_FUNC TRef lj_ir_k64(jit_State *J, IROp op, uint64_t u64);
44LJ_FUNC TRef lj_ir_knum_u64(jit_State *J, uint64_t u64);
45LJ_FUNC TRef lj_ir_knumint(jit_State *J, lua_Number n);
46LJ_FUNC TRef lj_ir_kint64(jit_State *J, uint64_t u64);
47LJ_FUNC TRef lj_ir_kgc(jit_State *J, GCobj *o, IRType t);
48LJ_FUNC TRef lj_ir_kptr_(jit_State *J, IROp op, void *ptr);
49LJ_FUNC TRef lj_ir_knull(jit_State *J, IRType t);
50LJ_FUNC TRef lj_ir_kslot(jit_State *J, TRef key, IRRef slot);
51LJ_FUNC TRef lj_ir_ktrace(jit_State *J);
52
53#if LJ_64
54#define lj_ir_kintp(J, k) lj_ir_kint64(J, (uint64_t)(k))
55#else
56#define lj_ir_kintp(J, k) lj_ir_kint(J, (int32_t)(k))
57#endif
58
59static LJ_AINLINE TRef lj_ir_knum(jit_State *J, lua_Number n)
60{
61 TValue tv;
62 tv.n = n;
63 return lj_ir_knum_u64(J, tv.u64);
64}
65
66#define lj_ir_kstr(J, str) lj_ir_kgc(J, obj2gco((str)), IRT_STR)
67#define lj_ir_ktab(J, tab) lj_ir_kgc(J, obj2gco((tab)), IRT_TAB)
68#define lj_ir_kfunc(J, func) lj_ir_kgc(J, obj2gco((func)), IRT_FUNC)
69#define lj_ir_kptr(J, ptr) lj_ir_kptr_(J, IR_KPTR, (ptr))
70#define lj_ir_kkptr(J, ptr) lj_ir_kptr_(J, IR_KKPTR, (ptr))
71
72/* Special FP constants. */
73#define lj_ir_knum_zero(J) lj_ir_knum_u64(J, U64x(00000000,00000000))
74#define lj_ir_knum_one(J) lj_ir_knum_u64(J, U64x(3ff00000,00000000))
75#define lj_ir_knum_tobit(J) lj_ir_knum_u64(J, U64x(43380000,00000000))
76
77/* Special 128 bit SIMD constants. */
78#define lj_ir_ksimd(J, idx) \
79 lj_ir_ggfload(J, IRT_NUM, (uintptr_t)LJ_KSIMD(J, idx) - (uintptr_t)J2GG(J))
80
81/* Access to constants. */
82LJ_FUNC void lj_ir_kvalue(lua_State *L, TValue *tv, const IRIns *ir);
83
84/* Convert IR operand types. */
85LJ_FUNC TRef LJ_FASTCALL lj_ir_tonumber(jit_State *J, TRef tr);
86LJ_FUNC TRef LJ_FASTCALL lj_ir_tonum(jit_State *J, TRef tr);
87LJ_FUNC TRef LJ_FASTCALL lj_ir_tostr(jit_State *J, TRef tr);
88
89/* Miscellaneous IR ops. */
90LJ_FUNC int lj_ir_numcmp(lua_Number a, lua_Number b, IROp op);
91LJ_FUNC int lj_ir_strcmp(GCstr *a, GCstr *b, IROp op);
92LJ_FUNC void lj_ir_rollback(jit_State *J, IRRef ref);
93
94/* Emit IR instructions with on-the-fly optimizations. */
95LJ_FUNC TRef LJ_FASTCALL lj_opt_fold(jit_State *J);
96LJ_FUNC TRef LJ_FASTCALL lj_opt_cse(jit_State *J);
97LJ_FUNC TRef LJ_FASTCALL lj_opt_cselim(jit_State *J, IRRef lim);
98
99/* Special return values for the fold functions. */
100enum {
101 NEXTFOLD, /* Couldn't fold, pass on. */
102 RETRYFOLD, /* Retry fold with modified fins. */
103 KINTFOLD, /* Return ref for int constant in fins->i. */
104 FAILFOLD, /* Guard would always fail. */
105 DROPFOLD, /* Guard eliminated. */
106 MAX_FOLD
107};
108
109#define INTFOLD(k) ((J->fold.ins.i = (k)), (TRef)KINTFOLD)
110#define INT64FOLD(k) (lj_ir_kint64(J, (k)))
111#define CONDFOLD(cond) ((TRef)FAILFOLD + (TRef)(cond))
112#define LEFTFOLD (J->fold.ins.op1)
113#define RIGHTFOLD (J->fold.ins.op2)
114#define CSEFOLD (lj_opt_cse(J))
115#define EMITFOLD (lj_ir_emit(J))
116
117/* Load/store forwarding. */
118LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_aload(jit_State *J);
119LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_hload(jit_State *J);
120LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_uload(jit_State *J);
121LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_fload(jit_State *J);
122LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_xload(jit_State *J);
123LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_alen(jit_State *J);
124LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_hrefk(jit_State *J);
125LJ_FUNC int LJ_FASTCALL lj_opt_fwd_href_nokey(jit_State *J);
126LJ_FUNC int LJ_FASTCALL lj_opt_fwd_tptr(jit_State *J, IRRef lim);
127LJ_FUNC int lj_opt_fwd_wasnonnil(jit_State *J, IROpT loadop, IRRef xref);
128
129/* Dead-store elimination. */
130LJ_FUNC TRef LJ_FASTCALL lj_opt_dse_ahstore(jit_State *J);
131LJ_FUNC TRef LJ_FASTCALL lj_opt_dse_ustore(jit_State *J);
132LJ_FUNC TRef LJ_FASTCALL lj_opt_dse_fstore(jit_State *J);
133LJ_FUNC TRef LJ_FASTCALL lj_opt_dse_xstore(jit_State *J);
134
135/* Narrowing. */
136LJ_FUNC TRef LJ_FASTCALL lj_opt_narrow_convert(jit_State *J);
137LJ_FUNC TRef LJ_FASTCALL lj_opt_narrow_index(jit_State *J, TRef key);
138LJ_FUNC TRef LJ_FASTCALL lj_opt_narrow_toint(jit_State *J, TRef tr);
139LJ_FUNC TRef LJ_FASTCALL lj_opt_narrow_tobit(jit_State *J, TRef tr);
140#if LJ_HASFFI
141LJ_FUNC TRef LJ_FASTCALL lj_opt_narrow_cindex(jit_State *J, TRef key);
142#endif
143LJ_FUNC TRef lj_opt_narrow_arith(jit_State *J, TRef rb, TRef rc,
144 TValue *vb, TValue *vc, IROp op);
145LJ_FUNC TRef lj_opt_narrow_unm(jit_State *J, TRef rc, TValue *vc);
146LJ_FUNC TRef lj_opt_narrow_mod(jit_State *J, TRef rb, TRef rc, TValue *vb, TValue *vc);
147LJ_FUNC TRef lj_opt_narrow_pow(jit_State *J, TRef rb, TRef rc, TValue *vb, TValue *vc);
148LJ_FUNC IRType lj_opt_narrow_forl(jit_State *J, cTValue *forbase);
149
150/* Optimization passes. */
151LJ_FUNC void lj_opt_dce(jit_State *J);
152LJ_FUNC int lj_opt_loop(jit_State *J);
153#if LJ_SOFTFP32 || (LJ_32 && LJ_HASFFI)
154LJ_FUNC void lj_opt_split(jit_State *J);
155#else
156#define lj_opt_split(J) UNUSED(J)
157#endif
158LJ_FUNC void lj_opt_sink(jit_State *J);
159
160#endif
161
162#endif
163