11 #include "ruby/internal/config.h"
16 #include "debug_counter.h"
18 #include "internal/class.h"
19 #include "internal/compar.h"
20 #include "internal/hash.h"
21 #include "internal/numeric.h"
22 #include "internal/proc.h"
23 #include "internal/random.h"
24 #include "internal/variable.h"
25 #include "internal/struct.h"
31 #include "insns_info.inc"
38 int argc,
const VALUE *argv,
int priv);
50 ruby_vm_special_exception_copy(
VALUE exc)
53 rb_obj_copy_ivar(e, exc);
61 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
62 ec->raised_flag = RAISED_STACKOVERFLOW;
64 VALUE at = rb_ec_backtrace_object(ec);
65 mesg = ruby_vm_special_exception_copy(mesg);
70 EC_JUMP_TAG(ec, TAG_RAISE);
73 NORETURN(
static void vm_stackoverflow(
void));
75 NOINLINE(
static COLDFUNC
void vm_stackoverflow(
void));
79 vm_stackoverflow(
void)
81 ec_stack_overflow(GET_EC(), TRUE);
89 rb_bug(
"system stack overflow during GC. Faulty native extension?");
92 ec->raised_flag = RAISED_STACKOVERFLOW;
93 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
94 EC_JUMP_TAG(ec, TAG_RAISE);
96 #ifdef USE_SIGALTSTACK
97 ec_stack_overflow(ec, TRUE);
99 ec_stack_overflow(ec, FALSE);
104 #if VM_CHECK_MODE > 0
106 callable_class_p(
VALUE klass)
108 #if VM_CHECK_MODE >= 2
109 if (!klass)
return FALSE;
137 VM_ASSERT(IMEMO_TYPE_P((
VALUE)cme, imemo_ment));
139 if (callable_class_p(cme->defined_class)) {
149 vm_check_frame_detail(
VALUE type,
int req_block,
int req_me,
int req_cref,
VALUE specval,
VALUE cref_or_me,
int is_cframe,
const rb_iseq_t *iseq)
151 unsigned int magic = (
unsigned int)(
type & VM_FRAME_MAGIC_MASK);
152 enum imemo_type cref_or_me_type = imemo_env;
155 cref_or_me_type = imemo_type(cref_or_me);
157 if (
type & VM_FRAME_FLAG_BMETHOD) {
161 if (req_block && (
type & VM_ENV_FLAG_LOCAL) == 0) {
162 rb_bug(
"vm_push_frame: specval (%p) should be a block_ptr on %x frame", (
void *)specval, magic);
164 if (!req_block && (
type & VM_ENV_FLAG_LOCAL) != 0) {
165 rb_bug(
"vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (
void *)specval, magic);
169 if (cref_or_me_type != imemo_ment) {
170 rb_bug(
"vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
174 if (req_cref && cref_or_me_type != imemo_cref) {
175 rb_bug(
"vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
178 if (cref_or_me !=
Qfalse && cref_or_me_type != imemo_cref) {
179 if (((
type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC) && (cref_or_me_type == imemo_ment)) {
183 rb_bug(
"vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
189 if (cref_or_me_type == imemo_ment) {
192 if (!callable_method_entry_p(me)) {
193 rb_bug(
"vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
197 if ((
type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
198 VM_ASSERT(iseq == NULL ||
199 RUBY_VM_NORMAL_ISEQ_P(iseq) );
202 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
212 VALUE given_magic =
type & VM_FRAME_MAGIC_MASK;
215 #define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
217 vm_check_frame_detail(type, req_block, req_me, req_cref, \
218 specval, cref_or_me, is_cframe, iseq); \
220 switch (given_magic) {
222 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
223 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
224 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
225 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
226 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
227 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
228 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
229 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
230 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
232 rb_bug(
"vm_push_frame: unknown type (%x)", (
unsigned int)given_magic);
237 static VALUE vm_stack_canary;
238 static bool vm_stack_canary_was_born =
false;
241 MJIT_FUNC_EXPORTED
void
247 if (! LIKELY(vm_stack_canary_was_born)) {
250 else if ((
VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
254 else if (! (iseq = GET_ISEQ())) {
257 else if (LIKELY(sp[0] != vm_stack_canary)) {
266 const VALUE *orig = rb_iseq_original_iseq(iseq);
267 const VALUE *encoded = iseq->body->iseq_encoded;
268 const ptrdiff_t pos = GET_PC() - encoded;
269 const enum ruby_vminsn_type insn = (
enum ruby_vminsn_type)orig[pos];
270 const char *name = insn_name(insn);
271 const VALUE iseqw = rb_iseqw_new(iseq);
273 const char *stri = rb_str_to_cstr(inspection);
274 const VALUE disasm = rb_iseq_disasm(iseq);
275 const char *strd = rb_str_to_cstr(disasm);
281 "We are killing the stack canary set by %s, "
282 "at %s@pc=%"PRIdPTR
"\n"
283 "watch out the C stack trace.\n"
285 name, stri, pos, strd);
289 #define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
292 #define vm_check_canary(ec, sp)
293 #define vm_check_frame(a, b, c, d)
296 #if USE_DEBUG_COUNTER
298 vm_push_frame_debug_counter_inc(
305 RB_DEBUG_COUNTER_INC(frame_push);
307 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
308 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
309 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
312 RB_DEBUG_COUNTER_INC(frame_R2R);
315 RB_DEBUG_COUNTER_INC(frame_R2C);
320 RB_DEBUG_COUNTER_INC(frame_C2R);
323 RB_DEBUG_COUNTER_INC(frame_C2C);
328 switch (
type & VM_FRAME_MAGIC_MASK) {
329 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method);
return;
330 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block);
return;
331 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class);
return;
332 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top);
return;
333 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc);
return;
334 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc);
return;
335 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval);
return;
336 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue);
return;
337 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy);
return;
343 #define vm_push_frame_debug_counter_inc(ec, cfp, t)
346 STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
347 STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
348 STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
364 vm_check_frame(
type, specval, cref_or_me, iseq);
365 VM_ASSERT(local_size >= 0);
368 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
369 vm_check_canary(ec, sp);
374 for (
int i=0; i < local_size; i++) {
392 #if VM_DEBUG_BP_CHECK
403 vm_push_frame_debug_counter_inc(ec, cfp,
type);
410 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
412 if (VM_CHECK_MODE >= 4) rb_gc_verify_internal_consistency();
413 if (VMDEBUG == 2) SDR();
415 RUBY_VM_CHECK_INTS(ec);
416 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
418 return flags & VM_FRAME_FLAG_FINISH;
424 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
429 rb_arity_error_new(
int argc,
int min,
int max)
433 err_mess =
rb_sprintf(
"wrong number of arguments (given %d, expected %d)", argc, min);
436 err_mess =
rb_sprintf(
"wrong number of arguments (given %d, expected %d+)", argc, min);
439 err_mess =
rb_sprintf(
"wrong number of arguments (given %d, expected %d..%d)", argc, min, max);
445 rb_error_arity(
int argc,
int min,
int max)
452 NOINLINE(
static void vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v));
455 vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v)
458 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
459 VM_FORCE_WRITE(&ep[index], v);
460 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
461 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
465 vm_env_write(
const VALUE *ep,
int index,
VALUE v)
467 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
468 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
469 VM_STACK_ENV_WRITE(ep, index, v);
472 vm_env_write_slowpath(ep, index, v);
479 if (block_handler == VM_BLOCK_HANDLER_NONE) {
483 switch (vm_block_handler_type(block_handler)) {
484 case block_handler_type_iseq:
485 case block_handler_type_ifunc:
486 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler),
rb_cProc);
487 case block_handler_type_symbol:
488 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
489 case block_handler_type_proc:
490 return VM_BH_TO_PROC(block_handler);
492 VM_UNREACHABLE(rb_vm_bh_to_procval);
499 #if VM_CHECK_MODE > 0
501 vm_svar_valid_p(
VALUE svar)
504 switch (imemo_type(svar)) {
513 rb_bug(
"vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
523 if (lep && (ec == NULL || ec->root_lep != lep)) {
524 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
527 svar = ec->root_svar;
530 VM_ASSERT(svar ==
Qfalse || vm_svar_valid_p(svar));
538 VM_ASSERT(vm_svar_valid_p((
VALUE)svar));
540 if (lep && (ec == NULL || ec->root_lep != lep)) {
541 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (
VALUE)svar);
544 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->
self, &ec->root_svar, svar);
551 const struct vm_svar *svar = lep_svar(ec, lep);
556 case VM_SVAR_LASTLINE:
557 return svar->lastline;
558 case VM_SVAR_BACKREF:
559 return svar->backref;
561 const VALUE ary = svar->others;
582 struct vm_svar *svar = lep_svar(ec, lep);
585 lep_svar_write(ec, lep, svar = svar_new((
VALUE)svar));
589 case VM_SVAR_LASTLINE:
592 case VM_SVAR_BACKREF:
596 VALUE ary = svar->others;
612 val = lep_svar_get(ec, lep, key);
615 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
632 rb_bug(
"unexpected back-ref");
644 check_method_entry(
VALUE obj,
int can_be_svar)
646 if (obj ==
Qfalse)
return NULL;
648 #if VM_CHECK_MODE > 0
652 switch (imemo_type(obj)) {
662 #if VM_CHECK_MODE > 0
663 rb_bug(
"check_method_entry: svar should not be there:");
672 const VALUE *ep = cfp->ep;
675 while (!VM_ENV_LOCAL_P(ep)) {
676 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return me;
677 ep = VM_ENV_PREV_EP(ep);
680 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
686 switch (me->def->type) {
687 case VM_METHOD_TYPE_ISEQ:
688 return me->def->body.iseq.
iseqptr;
697 switch (me->def->type) {
698 case VM_METHOD_TYPE_ISEQ:
699 return me->def->body.iseq.
cref;
705 #if VM_CHECK_MODE == 0
709 check_cref(
VALUE obj,
int can_be_svar)
711 if (obj ==
Qfalse)
return NULL;
713 #if VM_CHECK_MODE > 0
717 switch (imemo_type(obj)) {
727 #if VM_CHECK_MODE > 0
728 rb_bug(
"check_method_entry: svar should not be there:");
735 vm_env_cref(
const VALUE *ep)
739 while (!VM_ENV_LOCAL_P(ep)) {
740 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return cref;
741 ep = VM_ENV_PREV_EP(ep);
744 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
748 is_cref(
const VALUE v,
int can_be_svar)
751 switch (imemo_type(v)) {
764 vm_env_cref_by_cref(
const VALUE *ep)
766 while (!VM_ENV_LOCAL_P(ep)) {
767 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE))
return TRUE;
768 ep = VM_ENV_PREV_EP(ep);
770 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
774 cref_replace_with_duplicated_cref_each_frame(
const VALUE *vptr,
int can_be_svar,
VALUE parent)
776 const VALUE v = *vptr;
780 switch (imemo_type(v)) {
783 new_cref = vm_cref_dup(cref);
788 VM_FORCE_WRITE(vptr, (
VALUE)new_cref);
793 return cref_replace_with_duplicated_cref_each_frame(&((
struct vm_svar *)v)->
cref_or_me, FALSE, v);
797 rb_bug(
"cref_replace_with_duplicated_cref_each_frame: unreachable");
806 vm_cref_replace_with_duplicated_cref(
const VALUE *ep)
808 if (vm_env_cref_by_cref(ep)) {
812 while (!VM_ENV_LOCAL_P(ep)) {
813 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
814 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
817 ep = VM_ENV_PREV_EP(ep);
819 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
820 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
823 rb_bug(
"vm_cref_dup: unreachable");
828 vm_get_cref(
const VALUE *ep)
836 rb_bug(
"vm_get_cref: unreachable");
841 rb_vm_get_cref(
const VALUE *ep)
843 return vm_get_cref(ep);
854 return vm_get_cref(cfp->ep);
858 vm_get_const_key_cref(
const VALUE *ep)
865 FL_TEST(CREF_CLASS(cref), RCLASS_CLONED)) {
868 cref = CREF_NEXT(cref);
881 if (CREF_CLASS(cref) == old_klass) {
882 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
883 *new_cref_ptr = new_cref;
886 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
887 cref = CREF_NEXT(cref);
888 *new_cref_ptr = new_cref;
889 new_cref_ptr = &new_cref->next;
891 *new_cref_ptr = NULL;
900 prev_cref = vm_env_cref(ep);
906 prev_cref = vm_env_cref(cfp->ep);
910 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
914 vm_get_cbase(
const VALUE *ep)
918 return CREF_CLASS_FOR_DEFINITION(cref);
922 vm_get_const_base(
const VALUE *ep)
927 if (!CREF_PUSHED_BY_EVAL(cref)) {
928 return CREF_CLASS_FOR_DEFINITION(cref);
930 cref = CREF_NEXT(cref);
937 vm_check_if_namespace(
VALUE klass)
945 vm_ensure_not_refinement_module(
VALUE self)
948 rb_warn(
"not defined at the refinement, but at the outer class/module");
964 if (
NIL_P(orig_klass) && allow_nil) {
966 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
970 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
971 root_cref = CREF_NEXT(root_cref);
974 while (cref && CREF_NEXT(cref)) {
975 if (CREF_PUSHED_BY_EVAL(cref)) {
979 klass = CREF_CLASS(cref);
981 cref = CREF_NEXT(cref);
987 if ((ce = rb_const_lookup(klass,
id))) {
988 rb_const_warn_if_deprecated(ce, klass,
id);
991 if (am == klass)
break;
993 if (is_defined)
return 1;
994 if (rb_autoloading_value(klass,
id, &av, NULL))
return av;
996 goto search_continue;
1003 if (UNLIKELY(!rb_ractor_main_p())) {
1006 "can not access non-shareable objects in constant %"PRIsVALUE
"::%s by non-main ractor.",
rb_class_path(klass),
rb_id2name(
id));
1017 if (root_cref && !
NIL_P(CREF_CLASS(root_cref))) {
1018 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1032 vm_check_if_namespace(orig_klass);
1034 return rb_public_const_defined_from(orig_klass,
id);
1037 return rb_public_const_get_from(orig_klass,
id);
1048 rb_bug(
"vm_get_cvar_base: no cref");
1051 while (CREF_NEXT(cref) &&
1053 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1054 cref = CREF_NEXT(cref);
1056 if (top_level_raise && !CREF_NEXT(cref)) {
1060 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1069 vm_search_const_defined_class(
const VALUE cbase,
ID id)
1072 if (cbase == rb_cObject) {
1088 if (iv_index_tbl == NULL)
return false;
1092 found = st_lookup(iv_index_tbl, (st_data_t)
id, &ent_data);
1097 return found ? true :
false;
1111 vm_cc_attr_index_set(cc, (
int)ent->index + 1);
1125 else if (LIKELY(is_attr ?
1126 RB_DEBUG_COUNTER_INC_UNLESS(ivar_get_ic_miss_unset, vm_cc_attr_index(cc) > 0) :
1127 RB_DEBUG_COUNTER_INC_UNLESS(ivar_get_ic_miss_serial,
1128 ic->entry && ic->entry->class_serial == RCLASS_SERIAL(
RBASIC(obj)->klass)))) {
1129 uint32_t index = !is_attr ? ic->entry->index : (vm_cc_attr_index(cc) - 1);
1131 RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
1140 val = rb_ivar_generic_lookup_with_index(obj,
id, index);
1149 struct st_table *iv_index_tbl = ROBJECT_IV_INDEX_TBL(obj);
1151 if (iv_index_tbl && iv_index_tbl_lookup(iv_index_tbl,
id, &ent)) {
1152 fill_ivar_cache(iseq, ic, cc, is_attr, ent);
1165 if (iv_index_tbl && iv_index_tbl_lookup(iv_index_tbl,
id, &ent)) {
1166 fill_ivar_cache(iseq, ic, cc, is_attr, ent);
1167 val = rb_ivar_generic_lookup_with_index(obj,
id, ent->index);
1176 if (LIKELY(val !=
Qundef)) {
1185 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1206 struct st_table *iv_index_tbl = ROBJECT_IV_INDEX_TBL(obj);
1209 if (iv_index_tbl_lookup(iv_index_tbl,
id, &ent)) {
1214 else if (ent->index >= INT_MAX) {
1218 vm_cc_attr_index_set(cc, (
int)(ent->index + 1));
1221 uint32_t index = ent->index;
1224 rb_init_iv_list(obj);
1228 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_iv_hit);
1234 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1241 return vm_setivar_slowpath(obj,
id, val, iseq, ic, NULL,
false);
1247 return vm_setivar_slowpath(obj,
id, val, NULL, NULL, cc,
true);
1260 (!is_attr && RB_DEBUG_COUNTER_INC_UNLESS(ivar_set_ic_miss_serial, ic->entry && ic->entry->class_serial == RCLASS_SERIAL(
RBASIC(obj)->klass))) ||
1261 ( is_attr && RB_DEBUG_COUNTER_INC_UNLESS(ivar_set_ic_miss_unset, vm_cc_attr_index(cc) > 0)))) {
1262 uint32_t index = !is_attr ? ic->entry->index : vm_cc_attr_index(cc)-1;
1265 rb_init_iv_list(obj);
1269 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1274 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1278 return vm_setivar_slowpath_attr(obj,
id, val, cc);
1281 return vm_setivar_slowpath_ivar(obj,
id, val, iseq, ic);
1288 VALUE defined_class = 0;
1292 defined_class =
RBASIC(defined_class)->klass;
1295 struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1297 rb_bug(
"the cvc table should be set");
1301 if (!rb_id_table_lookup(rb_cvc_tbl,
id, &ent_data)) {
1302 rb_bug(
"should have cvar cache entry");
1306 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1319 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE()) {
1321 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1323 if (st_lookup(RCLASS_IV_TBL(ic->entry->class_value), (st_data_t)
id, &v) &&
1324 LIKELY(rb_ractor_main_p())) {
1330 cref = vm_get_cref(GET_EP());
1331 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1333 return update_classvariable_cache(iseq, klass,
id, ic);
1339 return vm_getclassvariable(iseq, cfp,
id, ic);
1347 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE()) {
1348 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1350 rb_class_ivar_set(ic->entry->class_value,
id, val);
1354 cref = vm_get_cref(GET_EP());
1355 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1359 update_classvariable_cache(iseq, klass,
id, ic);
1365 vm_setclassvariable(iseq, cfp,
id, val, ic);
1371 return vm_getivar(obj,
id, iseq, ic, NULL, FALSE);
1377 vm_setivar(obj,
id, val, iseq, ic, 0, 0);
1383 vm_setinstancevariable(iseq, obj,
id, val, ic);
1391 rb_vm_set_ivar_idx(
VALUE obj, uint32_t index,
VALUE val)
1400 rb_init_iv_list(obj);
1414 ec->tag->state =
FIX2INT(err);
1417 ec->tag->state = TAG_THROW;
1419 else if (THROW_DATA_P(err)) {
1420 ec->tag->state = THROW_DATA_STATE((
struct vm_throw_data *)err);
1423 ec->tag->state = TAG_RAISE;
1430 const int flag,
const VALUE throwobj)
1438 else if (state == TAG_BREAK) {
1440 const VALUE *ep = GET_EP();
1441 const rb_iseq_t *base_iseq = GET_ISEQ();
1442 escape_cfp = reg_cfp;
1444 while (base_iseq->body->type != ISEQ_TYPE_BLOCK) {
1445 if (escape_cfp->iseq->body->type == ISEQ_TYPE_CLASS) {
1446 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1447 ep = escape_cfp->ep;
1448 base_iseq = escape_cfp->iseq;
1451 ep = VM_ENV_PREV_EP(ep);
1452 base_iseq = base_iseq->body->parent_iseq;
1453 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1454 VM_ASSERT(escape_cfp->iseq == base_iseq);
1458 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1464 ep = VM_ENV_PREV_EP(ep);
1466 while (escape_cfp < eocfp) {
1467 if (escape_cfp->ep == ep) {
1468 const rb_iseq_t *
const iseq = escape_cfp->iseq;
1469 const VALUE epc = escape_cfp->pc - iseq->body->iseq_encoded;
1474 for (i=0; i < ct->size; i++) {
1476 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1478 if (entry->type == CATCH_TYPE_BREAK &&
1479 entry->iseq == base_iseq &&
1480 entry->start < epc && entry->end >= epc) {
1481 if (entry->cont == epc) {
1490 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1495 rb_vm_localjump_error(
"break from proc-closure", throwobj, TAG_BREAK);
1498 else if (state == TAG_RETRY) {
1499 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1501 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1503 else if (state == TAG_RETURN) {
1504 const VALUE *current_ep = GET_EP();
1505 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1506 int in_class_frame = 0;
1508 escape_cfp = reg_cfp;
1511 while (!VM_ENV_LOCAL_P(ep)) {
1512 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1515 ep = VM_ENV_PREV_EP(ep);
1519 while (escape_cfp < eocfp) {
1520 const VALUE *lep = VM_CF_LEP(escape_cfp);
1526 if (lep == target_lep &&
1527 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1528 escape_cfp->iseq->body->type == ISEQ_TYPE_CLASS) {
1533 if (lep == target_lep) {
1534 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1536 if (in_class_frame) {
1541 const VALUE *tep = current_ep;
1543 while (target_lep != tep) {
1544 if (escape_cfp->ep == tep) {
1546 if (tep == target_ep) {
1550 goto unexpected_return;
1553 tep = VM_ENV_PREV_EP(tep);
1557 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1558 switch (escape_cfp->iseq->body->type) {
1560 case ISEQ_TYPE_MAIN:
1562 if (in_class_frame)
goto unexpected_return;
1563 if (target_ep == NULL) {
1567 goto unexpected_return;
1571 case ISEQ_TYPE_EVAL:
1572 case ISEQ_TYPE_CLASS:
1581 if (escape_cfp->ep == target_lep && escape_cfp->iseq->body->type == ISEQ_TYPE_METHOD) {
1582 if (target_ep == NULL) {
1586 goto unexpected_return;
1590 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1593 rb_vm_localjump_error(
"unexpected return", throwobj, TAG_RETURN);
1599 rb_bug(
"isns(throw): unsupported throw type");
1602 ec->tag->state = state;
1603 return (
VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1608 rb_num_t throw_state,
VALUE throwobj)
1610 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1611 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1614 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1617 return vm_throw_continue(ec, throwobj);
1622 vm_expandarray(
VALUE *sp,
VALUE ary, rb_num_t num,
int flag)
1624 int is_splat = flag & 0x01;
1625 rb_num_t space_size = num + is_splat;
1626 VALUE *base = sp - 1;
1629 const VALUE obj = ary;
1641 if (space_size == 0) {
1644 else if (flag & 0x02) {
1649 for (i=0; i<num-len; i++) {
1653 for (j=0; i<num; i++, j++) {
1654 VALUE v = ptr[len - j - 1];
1664 VALUE *bptr = &base[space_size - 1];
1666 for (i=0; i<num; i++) {
1668 for (; i<num; i++) {
1695 #if VM_CHECK_MODE > 0
1696 ccs->debug_sig = ~(
VALUE)ccs;
1702 ccs->entries = NULL;
1709 if (! vm_cc_markable(cc)) {
1712 else if (! vm_ci_markable(ci)) {
1716 if (UNLIKELY(ccs->len == ccs->capa)) {
1717 if (ccs->capa == 0) {
1719 ccs->entries =
ALLOC_N(
struct rb_class_cc_entries_entry, ccs->capa);
1723 REALLOC_N(ccs->entries,
struct rb_class_cc_entries_entry, ccs->capa);
1726 VM_ASSERT(ccs->len < ccs->capa);
1728 const int pos = ccs->len++;
1732 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
1738 #if VM_CHECK_MODE > 0
1742 ruby_debug_printf(
"ccs:%p (%d,%d)\n", (
void *)ccs, ccs->len, ccs->capa);
1743 for (
int i=0; i<ccs->len; i++) {
1744 vm_ci_dump(ccs->entries[i].ci);
1745 rp(ccs->entries[i].cc);
1752 VM_ASSERT(vm_ccs_p(ccs));
1753 VM_ASSERT(ccs->len <= ccs->capa);
1755 for (
int i=0; i<ccs->len; i++) {
1756 const struct rb_callinfo *ci = ccs->entries[i].ci;
1759 VM_ASSERT(vm_ci_p(ci));
1760 VM_ASSERT(vm_ci_mid(ci) == mid);
1761 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
1762 VM_ASSERT(vm_cc_class_check(cc, klass));
1763 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
1776 const ID mid = vm_ci_mid(ci);
1777 struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass);
1782 if (rb_id_table_lookup(cc_tbl, mid, &ccs_data)) {
1784 const int ccs_len = ccs->len;
1786 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
1787 rb_vm_ccs_free(ccs);
1788 rb_id_table_delete(cc_tbl, mid);
1792 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
1794 for (
int i=0; i<ccs_len; i++) {
1795 const struct rb_callinfo *ccs_ci = ccs->entries[i].ci;
1796 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
1798 VM_ASSERT(vm_ci_p(ccs_ci));
1799 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
1802 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
1804 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
1805 VM_ASSERT(ccs_cc->klass == klass);
1806 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
1815 cc_tbl = RCLASS_CC_TBL(klass) = rb_id_table_create(2);
1818 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
1824 cme = UNDEFINED_METHOD_ENTRY_P(cme) ? NULL : cme;
1826 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
1829 cme = rb_callable_method_entry(klass, mid);
1832 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
1836 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
1837 return &vm_empty_cc;
1840 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
1845 VM_ASSERT(cc_tbl != NULL);
1847 if (LIKELY(rb_id_table_lookup(cc_tbl, mid, &ccs_data))) {
1853 ccs = vm_ccs_create(klass, cme);
1854 rb_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
1858 cme = check_overloaded_cme(cme, ci);
1860 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general);
1861 vm_ccs_push(klass, ccs, ci, cc);
1863 VM_ASSERT(vm_cc_cme(cc) != NULL);
1864 VM_ASSERT(cme->called_id == mid);
1865 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
1879 cc = vm_search_cc(klass, ci);
1882 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
1883 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
1884 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
1885 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
1886 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
1897 #if USE_DEBUG_COUNTER
1901 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
1903 #if OPT_INLINE_METHOD_CACHE
1914 #if USE_DEBUG_COUNTER
1915 if (old_cc == &empty_cc) {
1917 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
1919 else if (old_cc == cc) {
1920 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
1922 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
1923 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
1925 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
1926 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
1927 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
1930 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
1935 VM_ASSERT(vm_cc_cme(cc) == NULL ||
1936 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
1949 #if OPT_INLINE_METHOD_CACHE
1950 if (LIKELY(vm_cc_class_check(cc, klass))) {
1951 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
1952 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
1953 RB_DEBUG_COUNTER_INC(mc_inline_hit);
1954 VM_ASSERT(vm_cc_cme(cc) == NULL ||
1955 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) ||
1956 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
1960 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
1963 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
1967 return vm_search_method_slowpath0(cd_owner, cd, klass);
1974 VM_ASSERT(klass !=
Qfalse);
1977 return vm_search_method_fastpath(cd_owner, cd, klass);
1987 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
1988 VM_ASSERT(callable_method_entry_p(me));
1990 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
1994 return me->def->body.cfunc.func == func;
2002 VM_ASSERT(iseq != NULL);
2004 return check_cfunc(vm_cc_cme(cc), func);
2007 #define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2039 opt_equality_specialized(
VALUE recv,
VALUE obj)
2041 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2042 goto compare_by_identity;
2044 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2045 goto compare_by_identity;
2048 goto compare_by_identity;
2057 #if MSC_VERSION_BEFORE(1300)
2061 else if (isnan(b)) {
2066 return RBOOL(a == b);
2073 return rb_str_eql_internal(obj, recv);
2078 compare_by_identity:
2079 return RBOOL(recv == obj);
2085 VM_ASSERT(cd_owner != NULL);
2087 VALUE val = opt_equality_specialized(recv, obj);
2088 if (val !=
Qundef)
return val;
2090 if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
2094 return RBOOL(recv == obj);
2098 #undef EQ_UNREDEFINED_P
2103 NOINLINE(
static VALUE opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid));
2106 opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid)
2108 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, 1);
2110 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2111 return RBOOL(recv == obj);
2121 VALUE val = opt_equality_specialized(recv, obj);
2126 return opt_equality_by_mid_slowpath(recv, obj, mid);
2133 return opt_equality_by_mid(obj1, obj2, idEq);
2139 return opt_equality_by_mid(obj1, obj2, idEqlP);
2151 case VM_CHECKMATCH_TYPE_WHEN:
2153 case VM_CHECKMATCH_TYPE_RESCUE:
2158 case VM_CHECKMATCH_TYPE_CASE: {
2159 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target,
RB_NO_KEYWORDS);
2162 rb_bug(
"check_match: unreachable");
2167 #if MSC_VERSION_BEFORE(1300)
2168 #define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
2170 #define CHECK_CMP_NAN(a, b)
2174 double_cmp_lt(
double a,
double b)
2176 CHECK_CMP_NAN(a, b);
2177 return RBOOL(a < b);
2181 double_cmp_le(
double a,
double b)
2183 CHECK_CMP_NAN(a, b);
2184 return RBOOL(a <= b);
2188 double_cmp_gt(
double a,
double b)
2190 CHECK_CMP_NAN(a, b);
2191 return RBOOL(a > b);
2195 double_cmp_ge(
double a,
double b)
2197 CHECK_CMP_NAN(a, b);
2198 return RBOOL(a >= b);
2201 static inline VALUE *
2207 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
2208 VALUE *bp = prev_cfp->sp + cfp->iseq->body->local_table_size + VM_ENV_DATA_SIZE;
2209 if (cfp->iseq->body->type == ISEQ_TYPE_METHOD) {
2213 #if VM_DEBUG_BP_CHECK
2214 if (bp != cfp->bp_check) {
2215 ruby_debug_printf(
"bp_check: %ld, bp: %ld\n",
2216 (
long)(cfp->bp_check - GET_EC()->vm_stack),
2217 (
long)(bp - GET_EC()->vm_stack));
2218 rb_bug(
"vm_base_ptr: unreachable");
2233 #include "vm_args.c"
2243 static vm_call_handler vm_call_iseq_setup_func(
const struct rb_callinfo *ci,
const int param_size,
const int local_size);
2248 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2250 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2256 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2259 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2260 int param = iseq->body->
param.size;
2261 int local = iseq->body->local_table_size;
2262 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2268 return iseq->body->
param.flags.has_opt == FALSE &&
2269 iseq->body->
param.flags.has_rest == FALSE &&
2270 iseq->body->
param.flags.has_post == FALSE &&
2271 iseq->body->
param.flags.has_kw == FALSE &&
2272 iseq->body->
param.flags.has_kwrest == FALSE &&
2273 iseq->body->
param.flags.accepts_no_kwarg == FALSE &&
2274 iseq->body->
param.flags.has_block == FALSE;
2277 MJIT_FUNC_EXPORTED
bool
2278 rb_iseq_only_optparam_p(
const rb_iseq_t *iseq)
2280 return iseq->body->
param.flags.has_opt == TRUE &&
2281 iseq->body->
param.flags.has_rest == FALSE &&
2282 iseq->body->
param.flags.has_post == FALSE &&
2283 iseq->body->
param.flags.has_kw == FALSE &&
2284 iseq->body->
param.flags.has_kwrest == FALSE &&
2285 iseq->body->
param.flags.accepts_no_kwarg == FALSE &&
2286 iseq->body->
param.flags.has_block == FALSE;
2289 MJIT_FUNC_EXPORTED
bool
2290 rb_iseq_only_kwparam_p(
const rb_iseq_t *iseq)
2292 return iseq->body->
param.flags.has_opt == FALSE &&
2293 iseq->body->
param.flags.has_rest == FALSE &&
2294 iseq->body->
param.flags.has_post == FALSE &&
2295 iseq->body->
param.flags.has_kw == TRUE &&
2296 iseq->body->
param.flags.has_kwrest == FALSE &&
2297 iseq->body->
param.flags.has_block == FALSE;
2302 rb_splat_or_kwargs_p(
const struct rb_callinfo *restrict ci)
2304 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
2313 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2318 vm_caller_setup_arg_splat(cfp, calling);
2319 if (!IS_ARGS_KW_OR_KW_SPLAT(ci) &&
2320 calling->argc > 0 &&
2322 (((
struct RHash *)final_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2324 calling->kw_splat = 1;
2327 if (UNLIKELY(IS_ARGS_KW_OR_KW_SPLAT(ci))) {
2328 if (IS_ARGS_KEYWORD(ci)) {
2333 vm_caller_setup_arg_kw(cfp, calling, ci);
2336 VALUE keyword_hash = cfp->sp[-1];
2339 cfp->sp[-1] =
rb_hash_dup(rb_to_hash_type(keyword_hash));
2341 else if (!IS_ARGS_KW_SPLAT_MUT(ci)) {
2356 if (UNLIKELY(calling->kw_splat)) {
2363 calling->kw_splat = 0;
2368 #define USE_OPT_HIST 0
2371 #define OPT_HIST_MAX 64
2372 static int opt_hist[OPT_HIST_MAX+1];
2374 __attribute__((destructor))
2376 opt_hist_show_results_at_exit(
void)
2378 for (
int i=0; i<OPT_HIST_MAX; i++) {
2379 ruby_debug_printf(
"opt_hist\t%d\t%d\n", i, opt_hist[i]);
2389 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2390 const int lead_num = iseq->body->
param.lead_num;
2391 const int opt = calling->argc - lead_num;
2392 const int opt_num = iseq->body->
param.opt_num;
2393 const int opt_pc = (int)iseq->body->
param.opt_table[opt];
2394 const int param = iseq->body->
param.size;
2395 const int local = iseq->body->local_table_size;
2396 const int delta = opt_num - opt;
2398 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2401 if (opt_pc < OPT_HIST_MAX) {
2405 opt_hist[OPT_HIST_MAX]++;
2409 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
2417 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2418 const int lead_num = iseq->body->
param.lead_num;
2419 const int opt = calling->argc - lead_num;
2420 const int opt_pc = (int)iseq->body->
param.opt_table[opt];
2422 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2425 if (opt_pc < OPT_HIST_MAX) {
2429 opt_hist[OPT_HIST_MAX]++;
2433 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2438 VALUE *
const passed_values,
const int passed_keyword_len,
const VALUE *
const passed_keywords,
2439 VALUE *
const locals);
2448 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
2449 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
2451 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2452 const struct rb_iseq_param_keyword *kw_param = iseq->body->
param.keyword;
2454 const int ci_kw_len = kw_arg->keyword_len;
2455 const VALUE *
const ci_keywords = kw_arg->keywords;
2456 VALUE *argv = cfp->sp - calling->argc;
2457 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2458 const int lead_num = iseq->body->
param.lead_num;
2460 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
2461 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2463 int param = iseq->body->
param.size;
2464 int local = iseq->body->local_table_size;
2465 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2472 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->ci;
2475 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
2476 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
2478 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2479 const struct rb_iseq_param_keyword *kw_param = iseq->body->
param.keyword;
2480 VALUE *
const argv = cfp->sp - calling->argc;
2481 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2484 for (i=0; i<kw_param->num; i++) {
2485 klocals[i] = kw_param->default_values[i];
2492 int param = iseq->body->
param.size;
2493 int local = iseq->body->local_table_size;
2494 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2499 const rb_iseq_t *iseq,
VALUE *argv,
int param_size,
int local_size)
2503 bool cacheable_ci = vm_ci_markable(ci);
2505 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
2506 if (LIKELY(rb_simple_iseq_p(iseq))) {
2508 CALLER_SETUP_ARG(cfp, calling, ci);
2509 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
2511 if (calling->argc != iseq->body->
param.lead_num) {
2512 argument_arity_error(ec, iseq, calling->argc, iseq->body->
param.lead_num, iseq->body->
param.lead_num);
2515 VM_ASSERT(ci == calling->ci);
2516 VM_ASSERT(cc == calling->cc);
2517 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size), cacheable_ci && vm_call_iseq_optimizable_p(ci, cc));
2520 else if (rb_iseq_only_optparam_p(iseq)) {
2522 CALLER_SETUP_ARG(cfp, calling, ci);
2523 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
2525 const int lead_num = iseq->body->
param.lead_num;
2526 const int opt_num = iseq->body->
param.opt_num;
2527 const int argc = calling->argc;
2528 const int opt = argc - lead_num;
2530 if (opt < 0 || opt > opt_num) {
2531 argument_arity_error(ec, iseq, argc, lead_num, lead_num + opt_num);
2534 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
2535 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
2536 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
2537 cacheable_ci && METHOD_ENTRY_CACHEABLE(vm_cc_cme(cc)));
2540 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
2541 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
2542 cacheable_ci && METHOD_ENTRY_CACHEABLE(vm_cc_cme(cc)));
2546 VM_ASSERT((
int)iseq->body->
param.size == lead_num + opt_num);
2547 for (
int i=argc; i<lead_num + opt_num; i++) {
2550 return (
int)iseq->body->
param.opt_table[opt];
2552 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
2553 const int lead_num = iseq->body->
param.lead_num;
2554 const int argc = calling->argc;
2555 const struct rb_iseq_param_keyword *kw_param = iseq->body->
param.keyword;
2557 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
2560 if (argc - kw_arg->keyword_len == lead_num) {
2561 const int ci_kw_len = kw_arg->keyword_len;
2562 const VALUE *
const ci_keywords = kw_arg->keywords;
2564 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
2566 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2567 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2569 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
2570 cacheable_ci && METHOD_ENTRY_CACHEABLE(vm_cc_cme(cc)));
2575 else if (argc == lead_num) {
2577 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2578 args_setup_kw_parameters(ec, iseq, NULL, 0, NULL, klocals);
2580 if (klocals[kw_param->num] ==
INT2FIX(0)) {
2582 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
2583 cacheable_ci && METHOD_ENTRY_CACHEABLE(vm_cc_cme(cc)));
2591 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
2597 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
2600 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2601 const int param_size = iseq->body->
param.size;
2602 const int local_size = iseq->body->local_table_size;
2603 const int opt_pc = vm_callee_setup_arg(ec, calling, def_iseq_ptr(vm_cc_cme(cc)->def), cfp->sp - calling->argc, param_size, local_size);
2604 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
2609 int opt_pc,
int param_size,
int local_size)
2614 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
2615 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
2618 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2624 int opt_pc,
int param_size,
int local_size)
2626 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
2627 VALUE *argv = cfp->sp - calling->argc;
2628 VALUE *sp = argv + param_size;
2629 cfp->sp = argv - 1 ;
2631 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
2632 calling->block_handler, (
VALUE)me,
2633 iseq->body->iseq_encoded + opt_pc, sp,
2634 local_size - param_size,
2635 iseq->body->stack_max);
2644 VALUE *argv = cfp->sp - calling->argc;
2646 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
2647 VALUE *src_argv = argv;
2648 VALUE *sp_orig, *sp;
2649 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
2651 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
2652 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
2653 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
2654 dst_captured->code.val = src_captured->code.val;
2655 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
2656 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
2659 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
2663 vm_pop_frame(ec, cfp, cfp->ep);
2666 sp_orig = sp = cfp->sp;
2669 sp[0] = calling->recv;
2673 for (i=0; i < iseq->body->
param.size; i++) {
2674 *sp++ = src_argv[i];
2677 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
2678 calling->recv, calling->block_handler, (
VALUE)me,
2679 iseq->body->iseq_encoded + opt_pc, sp,
2680 iseq->body->local_table_size - iseq->body->
param.size,
2681 iseq->body->stack_max);
2689 ractor_unsafe_check(
void)
2691 if (!rb_ractor_main_p()) {
2692 rb_raise(rb_eRactorUnsafeError,
"ractor unsafe method called from not main ractor");
2699 ractor_unsafe_check();
2706 ractor_unsafe_check();
2707 return (*func)(argc, argv, recv);
2713 ractor_unsafe_check();
2721 ractor_unsafe_check();
2723 return (*f)(recv, argv[0]);
2729 ractor_unsafe_check();
2731 return (*f)(recv, argv[0], argv[1]);
2737 ractor_unsafe_check();
2739 return (*f)(recv, argv[0], argv[1], argv[2]);
2745 ractor_unsafe_check();
2747 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
2753 ractor_unsafe_check();
2754 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2755 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
2761 ractor_unsafe_check();
2762 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2763 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
2769 ractor_unsafe_check();
2770 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2771 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
2777 ractor_unsafe_check();
2778 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2779 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
2785 ractor_unsafe_check();
2786 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2787 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
2793 ractor_unsafe_check();
2794 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2795 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
2801 ractor_unsafe_check();
2802 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2803 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
2809 ractor_unsafe_check();
2810 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2811 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
2817 ractor_unsafe_check();
2818 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2819 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
2825 ractor_unsafe_check();
2826 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2827 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
2833 ractor_unsafe_check();
2834 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2835 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
2847 return (*func)(argc, argv, recv);
2861 return (*f)(recv, argv[0]);
2868 return (*f)(recv, argv[0], argv[1]);
2875 return (*f)(recv, argv[0], argv[1], argv[2]);
2882 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
2888 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2889 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
2895 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2896 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
2902 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2903 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
2909 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2910 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
2916 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2917 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
2923 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2924 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
2930 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2931 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
2937 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2938 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
2944 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2945 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
2951 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2952 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
2958 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2959 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
2965 const int ov_flags = RAISED_STACKOVERFLOW;
2966 if (LIKELY(reg_cfp == ec->cfp + 1))
return TRUE;
2967 if (rb_ec_raised_p(ec, ov_flags)) {
2968 rb_ec_raised_reset(ec, ov_flags);
2974 #define CHECK_CFP_CONSISTENCY(func) \
2975 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
2976 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
2982 #if VM_DEBUG_VERIFY_METHOD_CACHE
2983 switch (me->def->type) {
2984 case VM_METHOD_TYPE_CFUNC:
2985 case VM_METHOD_TYPE_NOTIMPLEMENTED:
2987 # define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
2989 METHOD_BUG(ATTRSET);
2991 METHOD_BUG(BMETHOD);
2994 METHOD_BUG(OPTIMIZED);
2995 METHOD_BUG(MISSING);
2996 METHOD_BUG(REFINED);
3000 rb_bug(
"wrong method type: %d", me->def->type);
3003 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3009 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3015 int len = cfunc->argc;
3017 VALUE recv = calling->recv;
3018 VALUE block_handler = calling->block_handler;
3019 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3020 int argc = calling->argc;
3021 int orig_argc = argc;
3023 if (UNLIKELY(calling->kw_splat)) {
3024 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3027 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3030 vm_push_frame(ec, NULL, frame_type, recv,
3031 block_handler, (
VALUE)me,
3032 0, ec->cfp->sp, 0, 0);
3036 reg_cfp->sp -= orig_argc + 1;
3037 val = (*cfunc->invoker)(recv, argc, reg_cfp->sp + 1, cfunc->func);
3039 CHECK_CFP_CONSISTENCY(
"vm_call_cfunc");
3041 rb_vm_pop_frame(ec);
3043 EXEC_EVENT_HOOK(ec,
RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3044 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3053 RB_DEBUG_COUNTER_INC(ccf_cfunc);
3055 CALLER_SETUP_ARG(reg_cfp, calling, ci);
3056 CALLER_REMOVE_EMPTY_KW_SPLAT(reg_cfp, calling, ci);
3057 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat);
3058 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
3065 RB_DEBUG_COUNTER_INC(ccf_ivar);
3067 return vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE);
3074 RB_DEBUG_COUNTER_INC(ccf_attrset);
3075 VALUE val = *(cfp->sp - 1);
3077 return vm_setivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, val, NULL, NULL, cc, 1);
3081 rb_vm_call_ivar_attrset_p(
const vm_call_handler ch)
3083 return (ch == vm_call_ivar || ch == vm_call_attrset);
3093 VALUE procv = cme->def->body.bmethod.proc;
3096 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
3101 GetProcPtr(procv, proc);
3102 val = rb_vm_invoke_bmethod(ec, proc, calling->recv, calling->argc, argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
3110 RB_DEBUG_COUNTER_INC(ccf_bmethod);
3116 CALLER_SETUP_ARG(cfp, calling, ci);
3117 argc = calling->argc;
3120 cfp->sp += - argc - 1;
3122 return vm_call_bmethod_body(ec, calling, argv);
3125 MJIT_FUNC_EXPORTED
VALUE
3126 rb_find_defined_class_by_owner(
VALUE current_class,
VALUE target_owner)
3128 VALUE klass = current_class;
3136 while (
RTEST(klass)) {
3138 if (owner == target_owner) {
3144 return current_class;
3153 if (orig_me->defined_class == 0) {
3154 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
3156 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
3158 if (me->def->alias_count + me->def->complemented_count == 0) {
3159 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
3163 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
3171 VM_ASSERT(callable_method_entry_p(cme));
3178 return aliased_callable_method_entry(me);
3184 calling->cc = &VM_CC_ON_STACK(
Qundef,
3187 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
3189 return vm_call_method_each_type(ec, cfp, calling);
3192 static enum method_missing_reason
3195 enum method_missing_reason stat = MISSING_NOENTRY;
3196 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
3197 if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
3198 if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
3206 ASSUME(calling->argc >= 0);
3209 enum method_missing_reason missing_reason = MISSING_NOENTRY;
3210 int argc = calling->argc;
3211 VALUE recv = calling->recv;
3214 int flags = VM_CALL_FCALL |
3216 (calling->kw_splat ? VM_CALL_KW_SPLAT : 0);
3218 if (UNLIKELY(! mid)) {
3219 mid = idMethodMissing;
3220 missing_reason = ci_missing_reason(ci);
3221 ec->method_missing_reason = missing_reason;
3237 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
3240 argc = ++calling->argc;
3245 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
3246 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
3247 VALUE exc = rb_make_no_method_exception(
3257 calling->ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci));
3258 calling->cc = &VM_CC_ON_STACK(klass,
3260 { .method_missing_reason = missing_reason },
3261 rb_callable_method_entry_with_refinements(klass, mid, NULL));
3263 return vm_call_method(ec, reg_cfp, calling);
3269 RB_DEBUG_COUNTER_INC(ccf_opt_send);
3274 CALLER_SETUP_ARG(reg_cfp, calling, calling->ci);
3276 i = calling->argc - 1;
3278 if (calling->argc == 0) {
3303 return vm_call_symbol(ec, reg_cfp, calling, calling->ci, sym);
3309 const struct rb_callinfo *orig_ci,
enum method_missing_reason reason)
3311 RB_DEBUG_COUNTER_INC(ccf_method_missing);
3313 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
3316 CALLER_SETUP_ARG(reg_cfp, calling, orig_ci);
3317 argc = calling->argc + 1;
3319 unsigned int flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | (calling->kw_splat ? VM_CALL_KW_SPLAT : 0);
3320 calling->argc = argc;
3323 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
3324 vm_check_canary(ec, reg_cfp->sp);
3328 argv[0] =
ID2SYM(vm_ci_mid(orig_ci));
3331 ec->method_missing_reason = reason;
3332 calling->ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci));
3333 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, { 0 },
3334 rb_callable_method_entry_without_refinements(
CLASS_OF(calling->recv), idMethodMissing, NULL));
3335 return vm_call_method(ec, reg_cfp, calling);
3341 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->ci, vm_cc_cmethod_missing_reason(calling->cc));
3352 return vm_call_method_nome(ec, cfp, calling);
3354 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
3355 cme->def->body.refined.orig_me) {
3356 cme = refined_method_callable_without_refinement(cme);
3359 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, { 0 }, cme);
3361 return vm_call_method_each_type(ec, cfp, calling);
3365 find_refinement(
VALUE refinements,
VALUE klass)
3367 if (
NIL_P(refinements)) {
3379 if (cfp->iseq && cfp->iseq->body->type == ISEQ_TYPE_BLOCK) {
3380 const rb_iseq_t *local_iseq = cfp->iseq->body->local_iseq;
3383 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
3384 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
3388 }
while (cfp->iseq != local_iseq);
3399 if (orig_me->defined_class == 0) {
3407 VM_ASSERT(callable_method_entry_p(cme));
3409 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
3419 ID mid = vm_ci_mid(calling->ci);
3420 const rb_cref_t *cref = vm_get_cref(cfp->ep);
3424 for (; cref; cref = CREF_NEXT(cref)) {
3425 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
3426 if (
NIL_P(refinement))
continue;
3429 rb_callable_method_entry(refinement, mid);
3432 if (vm_cc_call(cc) == vm_call_super_method) {
3435 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
3440 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
3441 cme->def != ref_me->def) {
3444 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
3453 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
3454 return refined_method_callable_without_refinement(vm_cc_cme(cc));
3467 search_refined_method(ec, cfp, calling));
3469 if (vm_cc_cme(ref_cc)) {
3470 calling->cc= ref_cc;
3471 return vm_call_method(ec, cfp, calling);
3474 return vm_call_method_nome(ec, cfp, calling);
3480 NOINLINE(
static VALUE
3488 int argc = calling->argc;
3491 if (argc > 0)
MEMMOVE(&TOPN(argc), &TOPN(argc-1),
VALUE, argc);
3494 return vm_invoke_block(ec, reg_cfp, calling, ci,
false, block_handler);
3500 RB_DEBUG_COUNTER_INC(ccf_opt_call);
3503 VALUE procval = calling->recv;
3504 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
3510 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
3512 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
3515 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
3516 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
3519 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
3520 calling->cc = rb_vm_search_method_slowpath(ci,
CLASS_OF(calling->recv));
3521 return vm_call_general(ec, reg_cfp, calling);
3528 VALUE recv = calling->recv;
3531 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
3532 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
3534 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
3535 return internal_RSTRUCT_GET(recv, off);
3541 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
3543 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
3551 VALUE recv = calling->recv;
3554 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
3555 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
3559 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
3560 internal_RSTRUCT_SET(recv, off, val);
3568 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
3570 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
3582 switch (vm_cc_cme(cc)->def->body.optimized.type) {
3583 case OPTIMIZED_METHOD_TYPE_SEND:
3584 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
3585 return vm_call_opt_send(ec, cfp, calling);
3586 case OPTIMIZED_METHOD_TYPE_CALL:
3587 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
3588 return vm_call_opt_call(ec, cfp, calling);
3589 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
3590 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
3591 return vm_call_opt_block_call(ec, cfp, calling);
3592 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF:
3593 CALLER_SETUP_ARG(cfp, calling, ci);
3594 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3596 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE));
3597 return vm_call_opt_struct_aref(ec, cfp, calling);
3599 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET:
3600 CALLER_SETUP_ARG(cfp, calling, ci);
3601 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3603 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE));
3604 return vm_call_opt_struct_aset(ec, cfp, calling);
3606 rb_bug(
"vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
3610 #define VM_CALL_METHOD_ATTR(var, func, nohook) \
3611 if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
3612 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
3613 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
3615 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
3616 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
3631 switch (cme->def->type) {
3632 case VM_METHOD_TYPE_ISEQ:
3633 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
3634 return vm_call_iseq_setup(ec, cfp, calling);
3636 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3637 case VM_METHOD_TYPE_CFUNC:
3638 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
3639 return vm_call_cfunc(ec, cfp, calling);
3641 case VM_METHOD_TYPE_ATTRSET:
3642 CALLER_SETUP_ARG(cfp, calling, ci);
3643 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3646 vm_cc_attr_index_set(cc, 0);
3647 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG);
3648 VM_CALL_METHOD_ATTR(v,
3649 vm_call_attrset(ec, cfp, calling),
3650 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
3653 case VM_METHOD_TYPE_IVAR:
3654 CALLER_SETUP_ARG(cfp, calling, ci);
3655 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3657 vm_cc_attr_index_set(cc, 0);
3658 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT);
3659 VM_CALL_METHOD_ATTR(v,
3660 vm_call_ivar(ec, cfp, calling),
3661 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
3664 case VM_METHOD_TYPE_MISSING:
3665 vm_cc_method_missing_reason_set(cc, 0);
3666 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
3667 return vm_call_method_missing(ec, cfp, calling);
3669 case VM_METHOD_TYPE_BMETHOD:
3670 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
3671 return vm_call_bmethod(ec, cfp, calling);
3673 case VM_METHOD_TYPE_ALIAS:
3674 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
3675 return vm_call_alias(ec, cfp, calling);
3677 case VM_METHOD_TYPE_OPTIMIZED:
3678 return vm_call_optimized(ec, cfp, calling, ci, cc);
3680 case VM_METHOD_TYPE_UNDEF:
3683 case VM_METHOD_TYPE_ZSUPER:
3684 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
3686 case VM_METHOD_TYPE_REFINED:
3689 return vm_call_refined(ec, cfp, calling);
3692 rb_bug(
"vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
3702 const int stat = ci_missing_reason(ci);
3704 if (vm_ci_mid(ci) == idMethodMissing) {
3706 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
3707 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
3710 return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
3720 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
3722 if (vm_cc_cme(cc) != NULL) {
3723 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
3724 case METHOD_VISI_PUBLIC:
3725 return vm_call_method_each_type(ec, cfp, calling);
3727 case METHOD_VISI_PRIVATE:
3728 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
3729 enum method_missing_reason stat = MISSING_PRIVATE;
3730 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
3732 vm_cc_method_missing_reason_set(cc, stat);
3733 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
3734 return vm_call_method_missing(ec, cfp, calling);
3736 return vm_call_method_each_type(ec, cfp, calling);
3738 case METHOD_VISI_PROTECTED:
3739 if (!(vm_ci_flag(ci) & VM_CALL_OPT_SEND)) {
3741 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
3742 return vm_call_method_missing(ec, cfp, calling);
3746 VM_ASSERT(vm_cc_cme(cc) != NULL);
3749 calling->cc = &cc_on_stack;
3750 return vm_call_method_each_type(ec, cfp, calling);
3753 return vm_call_method_each_type(ec, cfp, calling);
3760 return vm_call_method_nome(ec, cfp, calling);
3767 RB_DEBUG_COUNTER_INC(ccf_general);
3768 return vm_call_method(ec, reg_cfp, calling);
3774 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
3775 VM_ASSERT(cc != vm_cc_empty());
3777 *(vm_call_handler *)&cc->call_ = vm_call_general;
3783 RB_DEBUG_COUNTER_INC(ccf_super_method);
3788 if (ec == NULL)
rb_bug(
"unreachable");
3791 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
3792 return vm_call_method(ec, reg_cfp, calling);
3798 vm_search_normal_superclass(
VALUE klass)
3802 klass =
RBASIC(klass)->klass;
3804 klass = RCLASS_ORIGIN(klass);
3808 NORETURN(
static void vm_super_outside(
void));
3811 vm_super_outside(
void)
3817 empty_cc_for_super(
void)
3820 return rb_vm_empty_cc_for_super();
3822 return &vm_empty_cc_for_super;
3829 VALUE current_defined_class;
3836 current_defined_class = me->defined_class;
3838 if (!
NIL_P(RCLASS_REFINED_CLASS(current_defined_class))) {
3839 current_defined_class = RCLASS_REFINED_CLASS(current_defined_class);
3843 !
FL_TEST_RAW(current_defined_class, RMODULE_INCLUDED_INTO_REFINEMENT) &&
3844 reg_cfp->iseq != method_entry_iseqptr(me) &&
3847 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
3851 "self has wrong type to call super in this context: "
3852 "%"PRIsVALUE
" (expected %"PRIsVALUE
")",
3857 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
3859 "implicit argument passing of super from method defined"
3860 " by define_method() is not supported."
3861 " Specify all arguments explicitly.");
3864 ID mid = me->def->original_id;
3867 cd->ci = vm_ci_new_runtime(mid,
3870 vm_ci_kwarg(cd->ci));
3876 VALUE klass = vm_search_normal_superclass(me->defined_class);
3880 cc = vm_cc_new(klass, NULL, vm_call_method_missing);
3884 cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd, klass);
3888 if (cached_cme == NULL) {
3890 cd->cc = empty_cc_for_super();
3892 else if (cached_cme->called_id != mid) {
3895 cc = vm_cc_new(klass, cme, vm_call_super_method);
3899 cd->cc = cc = empty_cc_for_super();
3903 switch (cached_cme->def->type) {
3905 case VM_METHOD_TYPE_REFINED:
3907 case VM_METHOD_TYPE_ATTRSET:
3908 case VM_METHOD_TYPE_IVAR:
3909 vm_cc_call_set(cc, vm_call_super_method);
3917 VM_ASSERT((vm_cc_cme(cc),
true));
3925 block_proc_is_lambda(
const VALUE procval)
3930 GetProcPtr(procval, proc);
3931 return proc->is_lambda;
3941 VALUE self,
int argc,
const VALUE *argv,
int kw_splat,
VALUE block_handler,
3944 int is_lambda = FALSE;
3945 VALUE val, arg, blockarg;
3947 const struct vm_ifunc *ifunc = captured->code.ifunc;
3952 else if (argc == 0) {
3959 blockarg = rb_vm_bh_to_procval(ec, block_handler);
3961 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
3963 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
3966 vm_push_frame(ec, (
const rb_iseq_t *)captured->code.ifunc,
3969 VM_GUARDED_PREV_EP(captured->ep),
3971 0, ec->cfp->sp, 0, 0);
3972 val = (*ifunc->func)(arg, (
VALUE)ifunc->data, argc, argv, blockarg);
3973 rb_vm_pop_frame(ec);
3981 return rb_sym_proc_call(
SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
3990 CHECK_VM_STACK_OVERFLOW(cfp, iseq->body->
param.lead_num);
3992 for (i=0; i<len && i<iseq->body->
param.lead_num; i++) {
4000 vm_callee_setup_block_arg_arg0_check(
VALUE *argv)
4002 VALUE ary, arg0 = argv[0];
4007 VM_ASSERT(argv[0] == arg0);
4015 if (rb_simple_iseq_p(iseq)) {
4019 CALLER_SETUP_ARG(cfp, calling, ci);
4020 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
4022 if (arg_setup_type == arg_setup_block &&
4023 calling->argc == 1 &&
4024 iseq->body->
param.flags.has_lead &&
4025 !iseq->body->
param.flags.ambiguous_param0 &&
4026 !
NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
4027 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
4030 if (calling->argc != iseq->body->
param.lead_num) {
4031 if (arg_setup_type == arg_setup_block) {
4032 if (calling->argc < iseq->body->
param.lead_num) {
4034 CHECK_VM_STACK_OVERFLOW(cfp, iseq->body->
param.lead_num);
4035 for (i=calling->argc; i<iseq->body->param.lead_num; i++) argv[i] =
Qnil;
4036 calling->argc = iseq->body->
param.lead_num;
4038 else if (calling->argc > iseq->body->
param.lead_num) {
4039 calling->argc = iseq->body->
param.lead_num;
4043 argument_arity_error(ec, iseq, calling->argc, iseq->body->
param.lead_num, iseq->body->
param.lead_num);
4050 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
4059 calling = &calling_entry;
4060 calling->argc = argc;
4061 calling->block_handler = block_handler;
4062 calling->kw_splat = kw_splat;
4064 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, (kw_splat ? VM_CALL_KW_SPLAT : 0), 0, 0);
4066 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
4074 bool is_lambda,
VALUE block_handler)
4077 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4078 const int arg_size = iseq->body->
param.size;
4079 VALUE *
const rsp = GET_SP() - calling->argc;
4080 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, rsp, is_lambda ? arg_setup_method : arg_setup_block);
4084 vm_push_frame(ec, iseq,
4085 VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0),
4087 VM_GUARDED_PREV_EP(captured->ep), 0,
4088 iseq->body->iseq_encoded + opt_pc,
4090 iseq->body->local_table_size - arg_size, iseq->body->stack_max);
4098 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
4100 if (calling->argc < 1) {
4104 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
4105 CALLER_SETUP_ARG(reg_cfp, calling, ci);
4106 calling->recv = TOPN(--calling->argc);
4107 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol);
4114 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
4119 CALLER_SETUP_ARG(ec->cfp, calling, ci);
4120 CALLER_REMOVE_EMPTY_KW_SPLAT(ec->cfp, calling, ci);
4121 argc = calling->argc;
4122 val = vm_yield_with_cfunc(ec, captured, captured->self, argc, STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
4128 vm_proc_to_block_handler(
VALUE procval)
4130 const struct rb_block *block = vm_proc_block(procval);
4132 switch (vm_block_type(block)) {
4133 case block_type_iseq:
4134 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
4135 case block_type_ifunc:
4136 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
4137 case block_type_symbol:
4138 return VM_BH_FROM_SYMBOL(block->as.symbol);
4139 case block_type_proc:
4140 return VM_BH_FROM_PROC(block->as.proc);
4142 VM_UNREACHABLE(vm_yield_with_proc);
4149 bool is_lambda,
VALUE block_handler)
4151 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
4152 VALUE proc = VM_BH_TO_PROC(block_handler);
4153 is_lambda = block_proc_is_lambda(proc);
4154 block_handler = vm_proc_to_block_handler(proc);
4157 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
4163 bool is_lambda,
VALUE block_handler)
4167 bool is_lambda,
VALUE block_handler);
4169 switch (vm_block_handler_type(block_handler)) {
4170 case block_handler_type_iseq: func = vm_invoke_iseq_block;
break;
4171 case block_handler_type_ifunc: func = vm_invoke_ifunc_block;
break;
4172 case block_handler_type_proc: func = vm_invoke_proc_block;
break;
4173 case block_handler_type_symbol: func = vm_invoke_symbol_block;
break;
4174 default:
rb_bug(
"vm_invoke_block: unreachable");
4177 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
4181 vm_make_proc_with_iseq(
const rb_iseq_t *blockiseq)
4188 rb_bug(
"vm_make_proc_with_iseq: unreachable");
4191 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
4192 captured->code.iseq = blockiseq;
4194 return rb_vm_make_proc(ec, captured,
rb_cProc);
4198 vm_once_exec(
VALUE iseq)
4205 vm_once_clear(
VALUE data)
4208 is->once.running_thread = NULL;
4220 args[0] = obj; args[1] =
Qfalse;
4234 enum defined_type
type = (
enum defined_type)op_type;
4241 return rb_gvar_defined(
SYM2ID(obj));
4243 case DEFINED_CVAR: {
4244 const rb_cref_t *cref = vm_get_cref(GET_EP());
4245 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
4250 case DEFINED_CONST_FROM: {
4251 bool allow_nil =
type == DEFINED_CONST;
4253 return vm_get_ev_const(ec, klass,
SYM2ID(obj), allow_nil,
true);
4258 return rb_ec_obj_respond_to(ec, v,
SYM2ID(obj), TRUE);
4260 case DEFINED_METHOD:{
4265 switch (METHOD_ENTRY_VISI(me)) {
4266 case METHOD_VISI_PRIVATE:
4268 case METHOD_VISI_PROTECTED:
4272 case METHOD_VISI_PUBLIC:
4276 rb_bug(
"vm_defined: unreachable: %u", (
unsigned int)METHOD_ENTRY_VISI(me));
4280 return check_respond_to_missing(obj, v);
4285 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
4289 case DEFINED_ZSUPER:
4294 VALUE klass = vm_search_normal_superclass(me->defined_class);
4295 ID id = me->def->original_id;
4306 rb_bug(
"unimplemented defined? type (VM)");
4316 return vm_defined(ec, reg_cfp, op_type, obj, v);
4319 static const VALUE *
4320 vm_get_ep(
const VALUE *
const reg_ep, rb_num_t lv)
4323 const VALUE *ep = reg_ep;
4324 for (i = 0; i < lv; i++) {
4325 ep = GET_PREV_EP(ep);
4331 vm_get_special_object(
const VALUE *
const reg_ep,
4332 enum vm_special_object_type
type)
4335 case VM_SPECIAL_OBJECT_VMCORE:
4336 return rb_mRubyVMFrozenCore;
4337 case VM_SPECIAL_OBJECT_CBASE:
4338 return vm_get_cbase(reg_ep);
4339 case VM_SPECIAL_OBJECT_CONST_BASE:
4340 return vm_get_const_base(reg_ep);
4342 rb_bug(
"putspecialobject insn: unknown value_type %d",
type);
4349 const VALUE ary2 = ary2st;
4350 VALUE tmp1 = rb_check_to_array(ary1);
4351 VALUE tmp2 = rb_check_to_array(ary2);
4370 VALUE tmp = rb_check_to_array(ary);
4374 else if (
RTEST(flag)) {
4385 return vm_splat_array(flag, ary);
4391 enum vm_check_match_type
type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
4393 if (flag & VM_CHECKMATCH_ARRAY) {
4397 for (i = 0; i < n; i++) {
4399 VALUE c = check_match(ec, v, target,
type);
4408 return check_match(ec, pattern, target,
type);
4413 vm_check_keyword(lindex_t bits, lindex_t idx,
const VALUE *ep)
4415 const VALUE kw_bits = *(ep - bits);
4418 unsigned int b = (
unsigned int)
FIX2ULONG(kw_bits);
4419 if ((idx < KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
4432 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
4433 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
4434 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
4435 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
4439 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
4442 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
4445 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
4448 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
4455 vm_const_get_under(
ID id, rb_num_t flags,
VALUE cbase)
4459 if ((ns = vm_search_const_defined_class(cbase,
id)) == 0) {
4462 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
4463 return rb_public_const_get_at(ns,
id);
4471 vm_check_if_class(
ID id, rb_num_t flags,
VALUE super,
VALUE klass)
4476 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
4481 "superclass mismatch for class %"PRIsVALUE
"",
4494 vm_check_if_module(
ID id,
VALUE mod)
4513 vm_declare_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
4516 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
4524 vm_declare_module(
ID id,
VALUE cbase)
4530 NORETURN(
static void unmatched_redefinition(
const char *
type,
VALUE cbase,
ID id,
VALUE old));
4537 VALUE location = rb_const_source_location_at(cbase,
id);
4538 if (!
NIL_P(location)) {
4539 rb_str_catf(message,
"\n%"PRIsVALUE
":%"PRIsVALUE
":"
4540 " previous definition of %"PRIsVALUE
" was here",
4547 vm_define_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
4551 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !
RB_TYPE_P(super,
T_CLASS)) {
4553 "superclass must be an instance of Class (given an instance of %"PRIsVALUE
")",
4557 vm_check_if_namespace(cbase);
4561 if ((klass = vm_const_get_under(
id, flags, cbase)) != 0) {
4562 if (!vm_check_if_class(
id, flags, super, klass))
4563 unmatched_redefinition(
"class", cbase,
id, klass);
4567 return vm_declare_class(
id, flags, cbase, super);
4572 vm_define_module(
ID id, rb_num_t flags,
VALUE cbase)
4576 vm_check_if_namespace(cbase);
4577 if ((mod = vm_const_get_under(
id, flags, cbase)) != 0) {
4578 if (!vm_check_if_module(
id, mod))
4579 unmatched_redefinition(
"module", cbase,
id, mod);
4583 return vm_declare_module(
id, cbase);
4588 vm_find_or_create_class_by_id(
ID id,
4593 rb_vm_defineclass_type_t
type = VM_DEFINECLASS_TYPE(flags);
4596 case VM_DEFINECLASS_TYPE_CLASS:
4598 return vm_define_class(
id, flags, cbase, super);
4600 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
4604 case VM_DEFINECLASS_TYPE_MODULE:
4606 return vm_define_module(
id, flags, cbase);
4609 rb_bug(
"unknown defineclass type: %d", (
int)
type);
4613 static rb_method_visibility_t
4618 if (!vm_env_cref_by_cref(cfp->ep)) {
4619 return METHOD_VISI_PUBLIC;
4622 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
4631 if (!vm_env_cref_by_cref(cfp->ep)) {
4635 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
4643 rb_method_visibility_t visi;
4648 visi = METHOD_VISI_PUBLIC;
4651 klass = CREF_CLASS_FOR_DEFINITION(cref);
4652 visi = vm_scope_visibility_get(ec);
4659 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, visi);
4661 if (!is_singleton && vm_scope_module_func_check(ec)) {
4663 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
4673 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
4675 if (block_handler == VM_BLOCK_HANDLER_NONE) {
4676 rb_vm_localjump_error(
"no block given (yield)",
Qnil, 0);
4679 return vm_invoke_block(ec, GET_CFP(), calling, ci,
false, block_handler);
4687 return vm_search_method((
VALUE)reg_cfp->iseq, cd, recv);
4694 .flags =
T_IMEMO | (imemo_callcache <<
FL_USHIFT) | VM_CALLCACHE_UNMARKABLE,
4697 .call_ = vm_invokeblock_i,
4703 # define mexp_search_method vm_search_method_wrap
4704 # define mexp_search_super vm_search_super_method
4705 # define mexp_search_invokeblock vm_search_invokeblock
4707 enum method_explorer_type {
4709 mexp_search_invokeblock,
4723 VALUE block_handler,
4727 enum method_explorer_type method_explorer
4733 int argc = vm_ci_argc(ci);
4734 VALUE recv = TOPN(argc);
4736 .block_handler = block_handler,
4737 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
4745 calling.cc = cc = method_explorer(GET_CFP(), cd, recv);
4746 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
4748 switch (method_explorer) {
4749 case mexp_search_method:
4750 calling.cc = cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd,
CLASS_OF(recv));
4751 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
4753 case mexp_search_super:
4754 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
4755 calling.ci = cd->ci;
4756 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
4758 case mexp_search_invokeblock:
4759 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
4778 if (GET_ISEQ()->body->catch_except_p) {
4779 VM_ENV_FLAGS_SET(GET_EP(), VM_FRAME_FLAG_FINISH);
4780 return vm_exec(ec,
true);
4782 else if ((val = mjit_exec(ec)) ==
Qundef) {
4783 VM_ENV_FLAGS_SET(GET_EP(), VM_FRAME_FLAG_FINISH);
4784 return vm_exec(ec,
false);
4793 return mjit_exec(ec);
4813 switch (
TYPE(recv)) {
4826 if (check_cfunc(vm_cc_cme(cc), rb_mod_to_s)) {
4832 val = rb_mod_to_s(recv);
4838 if (check_cfunc(vm_cc_cme(cc), rb_nil_to_s)) {
4839 return rb_nil_to_s(recv);
4843 if (check_cfunc(vm_cc_cme(cc), rb_true_to_s)) {
4844 return rb_true_to_s(recv);
4848 if (check_cfunc(vm_cc_cme(cc), rb_false_to_s)) {
4849 return rb_false_to_s(recv);
4853 if (check_cfunc(vm_cc_cme(cc), rb_int_to_s)) {
4854 return rb_fix_to_s(recv);
4862 vm_opt_str_freeze(
VALUE str,
int bop,
ID id)
4864 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
4873 #define id_cmp idCmp
4878 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
4884 VALUE result = *ptr;
4885 rb_snum_t i = num - 1;
4887 const VALUE v = *++ptr;
4888 if (OPTIMIZED_CMP(v, result, cmp_opt) > 0) {
4903 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
4909 VALUE result = *ptr;
4910 rb_snum_t i = num - 1;
4912 const VALUE v = *++ptr;
4913 if (OPTIMIZED_CMP(v, result, cmp_opt) < 0) {
4927 #define IMEMO_CONST_CACHE_SHAREABLE IMEMO_FL_USER0
4931 vm_inlined_ic_hit_p(
VALUE flags,
VALUE value,
const rb_cref_t *ic_cref, rb_serial_t ic_serial,
const VALUE *reg_ep)
4933 if (ic_serial == GET_GLOBAL_CONSTANT_STATE() &&
4934 ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p())) {
4938 return (ic_cref == NULL ||
4939 ic_cref == vm_get_cref(reg_ep));
4947 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
4948 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, GET_IC_SERIAL(ice), reg_ep);
4953 rb_vm_ic_hit_p(
IC ic,
const VALUE *reg_ep)
4955 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
4964 ice->ic_cref = vm_get_const_key_cref(reg_ep);
4965 SET_IC_SERIAL(ice, GET_GLOBAL_CONSTANT_STATE() - ruby_vm_const_missing_count);
4967 ruby_vm_const_missing_count = 0;
4972 rb_yjit_constant_ic_update(iseq, ic);
4983 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
4984 return is->once.value;
4986 else if (is->once.running_thread == NULL) {
4988 is->once.running_thread = th;
4992 is->once.running_thread = RUNNING_THREAD_ONCE_DONE;
4995 else if (is->once.running_thread == th) {
4997 return vm_once_exec((
VALUE)iseq);
5001 RUBY_VM_CHECK_INTS(ec);
5008 vm_case_dispatch(CDHASH hash, OFFSET else_offset,
VALUE key)
5010 switch (OBJ_BUILTIN_TYPE(key)) {
5016 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
5017 SYMBOL_REDEFINED_OP_FLAG |
5018 INTEGER_REDEFINED_OP_FLAG |
5019 FLOAT_REDEFINED_OP_FLAG |
5020 NIL_REDEFINED_OP_FLAG |
5021 TRUE_REDEFINED_OP_FLAG |
5022 FALSE_REDEFINED_OP_FLAG |
5023 STRING_REDEFINED_OP_FLAG)) {
5027 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
5031 if (rb_hash_stlike_lookup(hash, key, &val)) {
5042 NORETURN(
static void
5051 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
5052 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
5053 static const char stack_consistency_error[] =
5054 "Stack consistency error (sp: %"PRIdPTRDIFF
", bp: %"PRIdPTRDIFF
")";
5055 #if defined RUBY_DEVEL
5061 rb_bug(stack_consistency_error, nsp, nbp);
5068 if (FIXNUM_2_P(recv, obj) &&
5069 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
5070 return rb_fix_plus_fix(recv, obj);
5072 else if (FLONUM_2_P(recv, obj) &&
5073 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
5081 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
5086 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
5087 return rb_str_opt_plus(recv, obj);
5091 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
5102 if (FIXNUM_2_P(recv, obj) &&
5103 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
5104 return rb_fix_minus_fix(recv, obj);
5106 else if (FLONUM_2_P(recv, obj) &&
5107 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
5115 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
5126 if (FIXNUM_2_P(recv, obj) &&
5127 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
5128 return rb_fix_mul_fix(recv, obj);
5130 else if (FLONUM_2_P(recv, obj) &&
5131 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
5139 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
5150 if (FIXNUM_2_P(recv, obj) &&
5151 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
5152 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_div_fix(recv, obj);
5154 else if (FLONUM_2_P(recv, obj) &&
5155 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
5156 return rb_flo_div_flo(recv, obj);
5163 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
5164 return rb_flo_div_flo(recv, obj);
5174 if (FIXNUM_2_P(recv, obj) &&
5175 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
5176 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_mod_fix(recv, obj);
5178 else if (FLONUM_2_P(recv, obj) &&
5179 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
5187 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
5198 return vm_opt_mod(recv, obj);
5204 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not_equal)) {
5205 VALUE val = opt_equality(iseq, recv, obj, cd_eq);
5218 if (FIXNUM_2_P(recv, obj) &&
5219 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
5222 else if (FLONUM_2_P(recv, obj) &&
5223 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
5231 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
5243 if (FIXNUM_2_P(recv, obj) &&
5244 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
5247 else if (FLONUM_2_P(recv, obj) &&
5248 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
5256 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
5268 if (FIXNUM_2_P(recv, obj) &&
5269 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
5272 else if (FLONUM_2_P(recv, obj) &&
5273 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
5281 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
5293 if (FIXNUM_2_P(recv, obj) &&
5294 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
5297 else if (FLONUM_2_P(recv, obj) &&
5298 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
5306 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
5323 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
5327 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
5338 if (FIXNUM_2_P(recv, obj) &&
5339 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
5340 return (recv & obj) | 1;
5350 if (FIXNUM_2_P(recv, obj) &&
5351 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
5363 if (FIXNUM_2_P(recv, obj) &&
5364 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
5365 return rb_fix_aref(recv, obj);
5370 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
5372 return rb_ary_entry_internal(recv,
FIX2LONG(obj));
5375 return rb_ary_aref1(recv, obj);
5379 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
5394 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
5400 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
5413 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG) &&
5414 rb_hash_compare_by_id_p(recv) ==
Qfalse) {
5426 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG) &&
5427 rb_hash_compare_by_id_p(recv) ==
Qfalse) {
5436 vm_opt_length(
VALUE recv,
int bop)
5442 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
5443 if (bop == BOP_EMPTY_P) {
5451 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
5455 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
5464 vm_opt_empty_p(
VALUE recv)
5466 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
5479 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
5482 else if (vm_method_cfunc_is(iseq, cd, recv, rb_false)) {
5498 case RSHIFT(~0UL, 1):
5501 return
rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
5519 vm_opt_succ(
VALUE recv)
5522 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
5523 return fix_succ(recv);
5529 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
5540 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not)) {
5556 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
5560 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
5578 VALUE self = GET_SELF();
5580 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
5582 if (event & global_hooks->events) {
5585 vm_dtrace(event, ec);
5586 rb_exec_event_hook_orig(ec, global_hooks, event,
self, 0, 0, 0 , val, 0);
5590 if (local_hooks != NULL) {
5591 if (event & local_hooks->events) {
5594 rb_exec_event_hook_orig(ec, local_hooks, event,
self, 0, 0, 0 , val, 0);
5606 return check_cfunc(vm_cc_cme(cc), rb_obj_equal);
5607 case BIN(opt_nil_p):
5608 return check_cfunc(vm_cc_cme(cc), rb_false);
5610 return check_cfunc(vm_cc_cme(cc), rb_obj_not);
5616 #define VM_TRACE_HOOK(target_event, val) do { \
5617 if ((pc_events & (target_event)) & enabled_flags) { \
5618 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks, (val)); \
5625 const VALUE *pc = reg_cfp->pc;
5626 rb_event_flag_t enabled_flags = ruby_vm_event_flags & ISEQ_TRACE_EVENTS;
5629 if (enabled_flags == 0 && ruby_vm_event_local_num == 0) {
5634 size_t pos = pc - iseq->body->iseq_encoded;
5637 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
5640 bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
5641 enabled_flags |= iseq_local_events;
5643 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
5645 if (bmethod_frame) {
5647 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
5648 bmethod_local_hooks = me->def->body.bmethod.hooks;
5649 if (bmethod_local_hooks) {
5650 bmethod_local_events = bmethod_local_hooks->events;
5655 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
5659 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
5667 else if (ec->trace_arg != NULL) {
5675 rb_event_flag_t bmethod_events = global_events | bmethod_local_events;
5678 ruby_debug_printf(
"vm_trace>>%4d (%4x) - %s:%d %s\n",
5682 (
int)rb_iseq_line_no(iseq, pos),
5685 VM_ASSERT(reg_cfp->pc == pc);
5686 VM_ASSERT(pc_events != 0);
5695 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE,
Qundef);
5696 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH,
Qundef);
5705 #undef VM_TRACE_HOOK
5707 #if VM_CHECK_MODE > 0
5708 NORETURN( NOINLINE( COLDFUNC
5709 void rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)));
5712 Init_vm_stack_canary(
void)
5715 int n = ruby_fill_random_bytes(&vm_stack_canary,
sizeof vm_stack_canary,
false);
5716 vm_stack_canary |= 0x01;
5718 vm_stack_canary_was_born =
true;
5723 MJIT_FUNC_EXPORTED
void
5724 rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)
5728 const char *insn = rb_insns_name(i);
5732 rb_bug(
"dead canary found at %s: %s", insn, str);
5737 void Init_vm_stack_canary(
void) { }
5769 return (*(rb_invoke_funcptr0_t)funcptr)(ec,
self);
5776 return (*(rb_invoke_funcptr1_t)funcptr)(ec,
self, argv[0]);
5783 return (*(rb_invoke_funcptr2_t)funcptr)(ec,
self, argv[0], argv[1]);
5790 return (*(rb_invoke_funcptr3_t)funcptr)(ec,
self, argv[0], argv[1], argv[2]);
5797 return (*(rb_invoke_funcptr4_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3]);
5804 return (*(rb_invoke_funcptr5_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4]);
5811 return (*(rb_invoke_funcptr6_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
5818 return (*(rb_invoke_funcptr7_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
5825 return (*(rb_invoke_funcptr8_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
5831 typedef VALUE (*rb_invoke_funcptr9_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9);
5832 return (*(rb_invoke_funcptr9_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
5838 typedef VALUE (*rb_invoke_funcptr10_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10);
5839 return (*(rb_invoke_funcptr10_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
5845 typedef VALUE (*rb_invoke_funcptr11_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11);
5846 return (*(rb_invoke_funcptr11_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
5852 typedef VALUE (*rb_invoke_funcptr12_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12);
5853 return (*(rb_invoke_funcptr12_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
5859 typedef VALUE (*rb_invoke_funcptr13_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13);
5860 return (*(rb_invoke_funcptr13_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
5866 typedef VALUE (*rb_invoke_funcptr14_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14);
5867 return (*(rb_invoke_funcptr14_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
5873 typedef VALUE (*rb_invoke_funcptr15_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14,
VALUE v15);
5874 return (*(rb_invoke_funcptr15_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
5879 static builtin_invoker
5880 lookup_builtin_invoker(
int argc)
5882 static const builtin_invoker invokers[] = {
5901 return invokers[argc];
5907 const bool canary_p = reg_cfp->iseq->body->builtin_inline_p;
5908 SETUP_CANARY(canary_p);
5909 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, (rb_insn_func_t)bf->func_ptr);
5910 CHECK_CANARY(canary_p, BIN(invokebuiltin));
5917 return invoke_bf(ec, cfp, bf, argv);
5924 fputs(
"vm_invoke_builtin_delegate: passing -> ", stderr);
5925 for (
int i=0; i<bf->argc; i++) {
5926 ruby_debug_printf(
":%s ",
rb_id2name(cfp->iseq->body->local_table[i+start_index]));
5928 ruby_debug_printf(
"\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc, bf->func_ptr);
5931 if (bf->argc == 0) {
5932 return invoke_bf(ec, cfp, bf, NULL);
5935 const VALUE *argv = cfp->ep - cfp->iseq->body->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
5936 return invoke_bf(ec, cfp, bf, argv);
5946 return cfp->ep[index];
#define RUBY_ASSERT(expr)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_EVENT_END
Encountered an end of a class clause.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
#define RUBY_EVENT_CLASS
Encountered a new class.
#define RUBY_EVENT_LINE
Encountered a new line.
#define RUBY_EVENT_RETURN
Encountered a return statement.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
uint32_t rb_event_flag_t
Represents event(s).
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
static VALUE RB_OBJ_FROZEN_RAW(VALUE obj)
This is an implenentation detail of RB_OBJ_FROZEN().
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
VALUE rb_module_new(void)
Creates a new, anonymous module.
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class::inherited.
VALUE rb_define_class_id(ID id, VALUE super)
This is a very badly designed API that creates an anonymous class.
#define TYPE(_)
Old name of rb_type.
#define FL_SINGLETON
Old name of RUBY_FL_SINGLETON.
#define FL_EXIVAR
Old name of RUBY_FL_EXIVAR.
#define REALLOC_N
Old name of RB_REALLOC_N.
#define ALLOC
Old name of RB_ALLOC.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define T_STRING
Old name of RUBY_T_STRING.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define T_NIL
Old name of RUBY_T_NIL.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define T_IMEMO
Old name of RUBY_T_IMEMO.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
#define SYM2ID
Old name of RB_SYM2ID.
#define CLASS_OF
Old name of rb_class_of.
#define rb_ary_new4
Old name of rb_ary_new_from_values.
#define FIXABLE
Old name of RB_FIXABLE.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2INT
Old name of RB_FIX2INT.
#define T_MODULE
Old name of RUBY_T_MODULE.
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
#define FIX2ULONG
Old name of RB_FIX2ULONG.
#define T_TRUE
Old name of RUBY_T_TRUE.
#define T_ICLASS
Old name of RUBY_T_ICLASS.
#define T_HASH
Old name of RUBY_T_HASH.
#define ALLOC_N
Old name of RB_ALLOC_N.
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
#define rb_ary_new3
Old name of rb_ary_new_from_args.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define rb_exc_new3
Old name of rb_exc_new_str.
#define T_FALSE
Old name of RUBY_T_FALSE.
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
#define DBL2NUM
Old name of rb_float_new.
#define T_CLASS
Old name of RUBY_T_CLASS.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define FL_TEST
Old name of RB_FL_TEST.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
void rb_notimplement(void)
void rb_raise(VALUE exc, const char *fmt,...)
Exception entry point.
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
void rb_bug(const char *fmt,...)
Interpreter panic switch.
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eFatal
fatal exception.
VALUE rb_eNoMethodError
NoMethodError exception.
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
VALUE rb_eRuntimeError
RuntimeError exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports always regardless of runtime -W flag.
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
VALUE rb_eArgError
ArgumentError exception.
VALUE rb_ensure(VALUE(*b_proc)(VALUE), VALUE data1, VALUE(*e_proc)(VALUE), VALUE data2)
An equivalent to ensure clause.
VALUE rb_cClass
Class class.
VALUE rb_cArray
Array class.
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
VALUE rb_cRegexp
Regexp class.
VALUE rb_cHash
Hash class.
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
VALUE rb_cBasicObject
BasicObject class.
VALUE rb_cModule
Module class.
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
VALUE rb_cFloat
Float class.
VALUE rb_cProc
Proc class.
VALUE rb_cString
String class.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
VALUE rb_ary_concat(VALUE lhs, VALUE rhs)
Destructively appends the contents of latter into the end of former.
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
VALUE rb_ary_plus(VALUE lhs, VALUE rhs)
Creates a new array, concatenating the former to the latter.
VALUE rb_check_array_type(VALUE obj)
Try converting an object to its array representation using its to_ary method, if any.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
VALUE rb_dbl2big(double d)
Converts a C's double into a bignum.
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
#define rb_check_frozen
Just another name of rb_check_frozen.
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
#define rb_check_frozen_internal(obj)
int rb_during_gc(void)
Queries if the GC is busy.
VALUE rb_hash_aref(VALUE hash, VALUE key)
Queries the given key in the given hash table.
VALUE rb_hash_aset(VALUE hash, VALUE key, VALUE val)
Inserts or replaces ("upsert"s) the objects into the given hash table.
VALUE rb_hash_lookup(VALUE hash, VALUE key)
Identical to rb_hash_aref(), except it always returns RUBY_Qnil for misshits.
VALUE rb_hash_dup(VALUE hash)
Duplicates a hash.
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
VALUE rb_sym_to_s(VALUE sym)
This is an rb_sym2str() + rb_str_dup() combo.
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
VALUE rb_str_cat_cstr(VALUE dst, const char *src)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
VALUE rb_str_intern(VALUE str)
Identical to rb_to_symbol(), except it assumes the receiver being an instance of RString.
void rb_thread_schedule(void)
Tries to switch to another thread.
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
VALUE rb_attr_get(VALUE obj, ID name)
Identical to rb_ivar_get()
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
int rb_method_basic_definition_p(VALUE klass, ID mid)
Well...
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
const char * rb_id2name(ID id)
Retrieves the name mapped to the given id.
VALUE rb_sym2str(VALUE id)
Identical to rb_id2str(), except it takes an instance of rb_cSymbol rather than an ID.
VALUE rb_id2str(ID id)
Identical to rb_id2name(), except it returns a Ruby's String instead of C's.
VALUE rb_sprintf(const char *fmt,...)
Ruby's extended sprintf(3).
VALUE rb_str_catf(VALUE dst, const char *fmt,...)
Identical to rb_sprintf(), except it renders the output to the specified object rather than creating ...
VALUE rb_uint2big(uintptr_t i)
Converts a C's intptr_t into an instance of rb_cInteger.
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
#define ALLOCA_N(type, n)
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
VALUE type(ANYARGS)
ANYARGS-ed function type.
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
#define RARRAY_LEN
Just another name of rb_array_len.
#define RARRAY_CONST_PTR_TRANSIENT
Just another name of rb_array_const_ptr_transient.
#define RARRAY_AREF(a, i)
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
#define RBASIC(obj)
Convenient casting macro.
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
#define RHASH_SIZE(h)
Queries the size of the hash.
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
static VALUE * ROBJECT_IVPTR(VALUE obj)
Queries the instance variables.
static uint32_t ROBJECT_NUMIV(VALUE obj)
Queries the number of instance variables.
static char * RSTRING_PTR(VALUE str)
Queries the contents pointer of the string.
static long RSTRING_LEN(VALUE str)
Queries the length of the string.
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
#define RB_NO_KEYWORDS
Do not pass keywords.
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
struct rb_iseq_constant_body::@152 param
parameter information
rb_cref_t * cref
class reference, should be marked
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
IFUNC (Internal FUNCtion)
const VALUE cref_or_me
class reference or rb_method_entry_t
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
uintptr_t VALUE
Type that represents a Ruby object.
static enum ruby_value_type RB_BUILTIN_TYPE(VALUE obj)
Queries the type of the object.
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.