Ruby  3.1.4p223 (2023-03-30 revision HEAD)
vm_insnhelper.h
1 #ifndef RUBY_INSNHELPER_H
2 #define RUBY_INSNHELPER_H
3 /**********************************************************************
4 
5  insnhelper.h - helper macros to implement each instructions
6 
7  $Author$
8  created at: 04/01/01 15:50:34 JST
9 
10  Copyright (C) 2004-2007 Koichi Sasada
11 
12 **********************************************************************/
13 
14 MJIT_SYMBOL_EXPORT_BEGIN
15 
16 RUBY_EXTERN VALUE ruby_vm_const_missing_count;
17 RUBY_EXTERN rb_serial_t ruby_vm_global_constant_state;
18 RUBY_EXTERN rb_serial_t ruby_vm_class_serial;
19 RUBY_EXTERN rb_serial_t ruby_vm_global_cvar_state;
20 
21 MJIT_SYMBOL_EXPORT_END
22 
23 #if VM_COLLECT_USAGE_DETAILS
24 #define COLLECT_USAGE_INSN(insn) vm_collect_usage_insn(insn)
25 #define COLLECT_USAGE_OPERAND(insn, n, op) vm_collect_usage_operand((insn), (n), ((VALUE)(op)))
26 
27 #define COLLECT_USAGE_REGISTER(reg, s) vm_collect_usage_register((reg), (s))
28 #elif YJIT_STATS
29 /* for --yjit-stats */
30 #define COLLECT_USAGE_INSN(insn) rb_yjit_collect_vm_usage_insn(insn)
31 #define COLLECT_USAGE_OPERAND(insn, n, op) /* none */
32 #define COLLECT_USAGE_REGISTER(reg, s) /* none */
33 #else
34 #define COLLECT_USAGE_INSN(insn) /* none */
35 #define COLLECT_USAGE_OPERAND(insn, n, op) /* none */
36 #define COLLECT_USAGE_REGISTER(reg, s) /* none */
37 #endif
38 
39 /**********************************************************/
40 /* deal with stack */
41 /**********************************************************/
42 
43 #define PUSH(x) (SET_SV(x), INC_SP(1))
44 #define TOPN(n) (*(GET_SP()-(n)-1))
45 #define POPN(n) (DEC_SP(n))
46 #define POP() (DEC_SP(1))
47 #define STACK_ADDR_FROM_TOP(n) (GET_SP()-(n))
48 
49 /**********************************************************/
50 /* deal with registers */
51 /**********************************************************/
52 
53 #define VM_REG_CFP (reg_cfp)
54 #define VM_REG_PC (VM_REG_CFP->pc)
55 #define VM_REG_SP (VM_REG_CFP->sp)
56 #define VM_REG_EP (VM_REG_CFP->ep)
57 
58 #define RESTORE_REGS() do { \
59  VM_REG_CFP = ec->cfp; \
60 } while (0)
61 
62 #if VM_COLLECT_USAGE_DETAILS
63 enum vm_regan_regtype {
64  VM_REGAN_PC = 0,
65  VM_REGAN_SP = 1,
66  VM_REGAN_EP = 2,
67  VM_REGAN_CFP = 3,
68  VM_REGAN_SELF = 4,
69  VM_REGAN_ISEQ = 5
70 };
71 enum vm_regan_acttype {
72  VM_REGAN_ACT_GET = 0,
73  VM_REGAN_ACT_SET = 1
74 };
75 
76 #define COLLECT_USAGE_REGISTER_HELPER(a, b, v) \
77  (COLLECT_USAGE_REGISTER((VM_REGAN_##a), (VM_REGAN_ACT_##b)), (v))
78 #else
79 #define COLLECT_USAGE_REGISTER_HELPER(a, b, v) (v)
80 #endif
81 
82 /* PC */
83 #define GET_PC() (COLLECT_USAGE_REGISTER_HELPER(PC, GET, VM_REG_PC))
84 #define SET_PC(x) (VM_REG_PC = (COLLECT_USAGE_REGISTER_HELPER(PC, SET, (x))))
85 #define GET_CURRENT_INSN() (*GET_PC())
86 #define GET_OPERAND(n) (GET_PC()[(n)])
87 #define ADD_PC(n) (SET_PC(VM_REG_PC + (n)))
88 #define JUMP(dst) (SET_PC(VM_REG_PC + (dst)))
89 
90 /* frame pointer, environment pointer */
91 #define GET_CFP() (COLLECT_USAGE_REGISTER_HELPER(CFP, GET, VM_REG_CFP))
92 #define GET_EP() (COLLECT_USAGE_REGISTER_HELPER(EP, GET, VM_REG_EP))
93 #define SET_EP(x) (VM_REG_EP = (COLLECT_USAGE_REGISTER_HELPER(EP, SET, (x))))
94 #define GET_LEP() (VM_EP_LEP(GET_EP()))
95 
96 /* SP */
97 #define GET_SP() (COLLECT_USAGE_REGISTER_HELPER(SP, GET, VM_REG_SP))
98 #define SET_SP(x) (VM_REG_SP = (COLLECT_USAGE_REGISTER_HELPER(SP, SET, (x))))
99 #define INC_SP(x) (VM_REG_SP += (COLLECT_USAGE_REGISTER_HELPER(SP, SET, (x))))
100 #define DEC_SP(x) (VM_REG_SP -= (COLLECT_USAGE_REGISTER_HELPER(SP, SET, (x))))
101 #define SET_SV(x) (*GET_SP() = rb_ractor_confirm_belonging(x))
102  /* set current stack value as x */
103 
104 /* instruction sequence C struct */
105 #define GET_ISEQ() (GET_CFP()->iseq)
106 
107 /**********************************************************/
108 /* deal with variables */
109 /**********************************************************/
110 
111 #define GET_PREV_EP(ep) ((VALUE *)((ep)[VM_ENV_DATA_INDEX_SPECVAL] & ~0x03))
112 
113 /**********************************************************/
114 /* deal with values */
115 /**********************************************************/
116 
117 #define GET_SELF() (COLLECT_USAGE_REGISTER_HELPER(SELF, GET, GET_CFP()->self))
118 
119 /**********************************************************/
120 /* deal with control flow 2: method/iterator */
121 /**********************************************************/
122 
123 /* set fastpath when cached method is *NOT* protected
124  * because inline method cache does not care about receiver.
125  */
126 
127 static inline void
128 CC_SET_FASTPATH(const struct rb_callcache *cc, vm_call_handler func, bool enabled)
129 {
130  if (LIKELY(enabled)) {
131  vm_cc_call_set(cc, func);
132  }
133 }
134 
135 #define GET_BLOCK_HANDLER() (GET_LEP()[VM_ENV_DATA_INDEX_SPECVAL])
136 
137 /**********************************************************/
138 /* deal with control flow 3: exception */
139 /**********************************************************/
140 
141 
142 /**********************************************************/
143 /* deal with stack canary */
144 /**********************************************************/
145 
146 #if VM_CHECK_MODE > 0
147 #define SETUP_CANARY(cond) \
148  VALUE *canary = 0; \
149  if (cond) { \
150  canary = GET_SP(); \
151  SET_SV(vm_stack_canary); \
152  } \
153  else {\
154  SET_SV(Qfalse); /* cleanup */ \
155  }
156 #define CHECK_CANARY(cond, insn) \
157  if (cond) { \
158  if (*canary == vm_stack_canary) { \
159  *canary = Qfalse; /* cleanup */ \
160  } \
161  else { \
162  rb_vm_canary_is_found_dead(insn, *canary); \
163  } \
164  }
165 #else
166 #define SETUP_CANARY(cond) if (cond) {} else {}
167 #define CHECK_CANARY(cond, insn) if (cond) {(void)(insn);}
168 #endif
169 
170 /**********************************************************/
171 /* others */
172 /**********************************************************/
173 
174 #ifndef MJIT_HEADER
175 #define CALL_SIMPLE_METHOD() do { \
176  rb_snum_t x = leaf ? INSN_ATTR(width) : 0; \
177  rb_snum_t y = attr_width_opt_send_without_block(0); \
178  rb_snum_t z = x - y; \
179  ADD_PC(z); \
180  DISPATCH_ORIGINAL_INSN(opt_send_without_block); \
181 } while (0)
182 #endif
183 
184 #define PREV_CLASS_SERIAL() (ruby_vm_class_serial)
185 #define NEXT_CLASS_SERIAL() (++ruby_vm_class_serial)
186 #define GET_GLOBAL_CONSTANT_STATE() (ruby_vm_global_constant_state)
187 #define INC_GLOBAL_CONSTANT_STATE() (++ruby_vm_global_constant_state)
188 #define GET_GLOBAL_CVAR_STATE() (ruby_vm_global_cvar_state)
189 #define INC_GLOBAL_CVAR_STATE() (++ruby_vm_global_cvar_state)
190 
191 static inline struct vm_throw_data *
192 THROW_DATA_NEW(VALUE val, const rb_control_frame_t *cf, int st)
193 {
194  struct vm_throw_data *obj = (struct vm_throw_data *)rb_imemo_new(imemo_throw_data, val, (VALUE)cf, 0, 0);
195  obj->throw_state = st;
196  return obj;
197 }
198 
199 static inline VALUE
200 THROW_DATA_VAL(const struct vm_throw_data *obj)
201 {
202  VM_ASSERT(THROW_DATA_P(obj));
203  return obj->throw_obj;
204 }
205 
206 static inline const rb_control_frame_t *
207 THROW_DATA_CATCH_FRAME(const struct vm_throw_data *obj)
208 {
209  VM_ASSERT(THROW_DATA_P(obj));
210  return obj->catch_frame;
211 }
212 
213 static inline int
214 THROW_DATA_STATE(const struct vm_throw_data *obj)
215 {
216  VM_ASSERT(THROW_DATA_P(obj));
217  return obj->throw_state;
218 }
219 
220 static inline int
221 THROW_DATA_CONSUMED_P(const struct vm_throw_data *obj)
222 {
223  VM_ASSERT(THROW_DATA_P(obj));
224  return obj->flags & THROW_DATA_CONSUMED;
225 }
226 
227 static inline void
228 THROW_DATA_CATCH_FRAME_SET(struct vm_throw_data *obj, const rb_control_frame_t *cfp)
229 {
230  VM_ASSERT(THROW_DATA_P(obj));
231  obj->catch_frame = cfp;
232 }
233 
234 static inline void
235 THROW_DATA_STATE_SET(struct vm_throw_data *obj, int st)
236 {
237  VM_ASSERT(THROW_DATA_P(obj));
238  obj->throw_state = st;
239 }
240 
241 static inline void
242 THROW_DATA_CONSUMED_SET(struct vm_throw_data *obj)
243 {
244  if (THROW_DATA_P(obj) &&
245  THROW_DATA_STATE(obj) == TAG_BREAK) {
246  obj->flags |= THROW_DATA_CONSUMED;
247  }
248 }
249 
250 #define IS_ARGS_SPLAT(ci) (vm_ci_flag(ci) & VM_CALL_ARGS_SPLAT)
251 #define IS_ARGS_KEYWORD(ci) (vm_ci_flag(ci) & VM_CALL_KWARG)
252 #define IS_ARGS_KW_SPLAT(ci) (vm_ci_flag(ci) & VM_CALL_KW_SPLAT)
253 #define IS_ARGS_KW_OR_KW_SPLAT(ci) (vm_ci_flag(ci) & (VM_CALL_KWARG | VM_CALL_KW_SPLAT))
254 #define IS_ARGS_KW_SPLAT_MUT(ci) (vm_ci_flag(ci) & VM_CALL_KW_SPLAT_MUT)
255 
256 /* If this returns true, an optimized function returned by `vm_call_iseq_setup_func`
257  can be used as a fastpath. */
258 static inline bool
259 vm_call_iseq_optimizable_p(const struct rb_callinfo *ci, const struct rb_callcache *cc)
260 {
261  return !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
262  METHOD_ENTRY_CACHEABLE(vm_cc_cme(cc));
263 }
264 
265 #endif /* RUBY_INSNHELPER_H */
#define RUBY_EXTERN
Declaration of externally visible global variables.
Definition: dllexport.h:47
THROW_DATA.
Definition: imemo.h:62
uintptr_t VALUE
Type that represents a Ruby object.
Definition: value.h:40