4 #include "internal/compile.h"
5 #include "internal/class.h"
6 #include "internal/hash.h"
7 #include "internal/object.h"
8 #include "internal/sanitizers.h"
9 #include "internal/string.h"
10 #include "internal/struct.h"
11 #include "internal/variable.h"
12 #include "internal/re.h"
14 #include "probes_helper.h"
16 #include "yjit_iface.h"
17 #include "yjit_core.h"
18 #include "yjit_codegen.h"
22 static codegen_fn gen_fns[VM_INSTRUCTION_SIZE] = { NULL };
25 static st_table *yjit_method_codegen_table = NULL;
28 static void *leave_exit_code;
31 static uint32_t outline_full_cfunc_return_pos;
35 uint32_t inline_patch_pos;
36 uint32_t outlined_target_pos;
41 static patch_array_t global_inval_patches = NULL;
46 jit_print_loc(
jitstate_t *jit, const
char *msg)
50 VALUE path = rb_iseq_path(jit->iseq);
52 fprintf(stderr,
"%s %.*s:%u\n", msg, (
int)len, ptr, rb_iseq_line_no(jit->iseq, jit->insn_idx));
60 mov(cb, C_ARG_REGS[0], opnd);
61 call_ptr(cb, REG0, (
void *)rb_obj_info_dump);
76 return jit->insn_idx + insn_len(jit_get_opcode(jit));
83 RUBY_ASSERT(arg_idx + 1 < (
size_t)insn_len(jit_get_opcode(jit)));
84 return *(jit->pc + arg_idx + 1);
91 RUBY_ASSERT(reg.type == OPND_REG && reg.num_bits == 64);
94 mov(cb, reg, const_ptr_opnd((
void*)ptr));
97 uint32_t ptr_offset = cb->write_pos -
sizeof(
VALUE);
100 if (!rb_darray_append(&jit->block->gc_object_offsets, ptr_offset)) {
101 rb_bug(
"allocation failed");
111 const VALUE *ec_pc = jit->ec->cfp->pc;
112 return (ec_pc == jit->pc);
126 VALUE *sp = jit->ec->cfp->sp;
128 return *(sp - 1 - n);
134 return jit->ec->cfp->self;
143 int32_t local_table_size = jit->iseq->body->local_table_size;
144 RUBY_ASSERT(n < (
int)jit->iseq->body->local_table_size);
146 const VALUE *ep = jit->ec->cfp->ep;
147 return ep[-VM_ENV_DATA_SIZE - local_table_size + n + 1];
156 mov(cb, scratch_reg, const_ptr_opnd(jit->pc + insn_len(jit->opcode)));
167 if (ctx->sp_offset != 0) {
168 x86opnd_t stack_pointer = ctx_sp_opnd(ctx, 0);
170 lea(cb, REG_SP, stack_pointer);
184 jit->record_boundary_patch_point =
true;
185 jit_save_pc(jit, scratch_reg);
186 jit_save_sp(jit, ctx);
192 record_global_inval_patch(
const codeblock_t *cb, uint32_t outline_block_target_pos)
194 struct codepage_patch patch_point = { cb->write_pos, outline_block_target_pos };
195 if (!rb_darray_append(&global_inval_patches, patch_point))
rb_bug(
"allocation failed");
204 _add_comment(
codeblock_t *cb,
const char *comment_str)
211 size_t num_comments = rb_darray_size(yjit_code_comments);
212 if (num_comments > 0) {
213 struct yjit_comment last_comment = rb_darray_get(yjit_code_comments, num_comments - 1);
214 if (last_comment.offset == cb->write_pos && strcmp(last_comment.comment, comment_str) == 0) {
219 struct yjit_comment new_comment = (
struct yjit_comment){ cb->write_pos, comment_str };
220 rb_darray_append(&yjit_code_comments, new_comment);
224 #define ADD_COMMENT(cb, comment) _add_comment((cb), (comment))
234 VALUE self_val = jit_peek_at_self(jit, ctx);
235 if (type_diff(yjit_type_of_value(self_val), ctx->self_type) == INT_MAX) {
236 rb_bug(
"verify_ctx: ctx type (%s) incompatible with actual value of self: %s", yjit_type_name(ctx->self_type), rb_obj_info(self_val));
239 for (
int i = 0; i < ctx->stack_size && i < MAX_TEMP_TYPES; i++) {
241 VALUE val = jit_peek_at_stack(jit, ctx, i);
242 val_type_t detected = yjit_type_of_value(val);
244 if (learned.mapping.kind == TEMP_SELF) {
245 if (self_val != val) {
246 rb_bug(
"verify_ctx: stack value was mapped to self, but values did not match\n"
250 rb_obj_info(self_val));
254 if (learned.mapping.kind == TEMP_LOCAL) {
255 int local_idx = learned.mapping.idx;
256 VALUE local_val = jit_peek_at_local(jit, ctx, local_idx);
257 if (local_val != val) {
258 rb_bug(
"verify_ctx: stack value was mapped to local, but values did not match\n"
263 rb_obj_info(local_val));
267 if (type_diff(detected, learned.type) == INT_MAX) {
268 rb_bug(
"verify_ctx: ctx type (%s) incompatible with actual value on stack: %s", yjit_type_name(learned.type), rb_obj_info(val));
272 int32_t local_table_size = jit->iseq->body->local_table_size;
273 for (
int i = 0; i < local_table_size && i < MAX_TEMP_TYPES; i++) {
275 VALUE val = jit_peek_at_local(jit, ctx, i);
276 val_type_t detected = yjit_type_of_value(val);
278 if (type_diff(detected, learned) == INT_MAX) {
279 rb_bug(
"verify_ctx: ctx type (%s) incompatible with actual value of local: %s", yjit_type_name(learned), rb_obj_info(val));
286 #define ADD_COMMENT(cb, comment) ((void)0)
287 #define verify_ctx(jit, ctx) ((void)0)
294 #define GEN_COUNTER_INC(cb, counter_name) _gen_counter_inc(cb, &(yjit_runtime_counters . counter_name))
296 _gen_counter_inc(
codeblock_t *cb, int64_t *counter)
298 if (!rb_yjit_opts.gen_stats)
return;
301 mov(cb, REG1, const_ptr_opnd(counter));
302 cb_write_lock_prefix(cb);
303 add(cb, mem_opnd(64, REG1, 0), imm_opnd(1));
307 #define COUNTED_EXIT(jit, side_exit, counter_name) _counted_side_exit(jit, side_exit, &(yjit_runtime_counters . counter_name))
309 _counted_side_exit(
jitstate_t* jit, uint8_t *existing_side_exit, int64_t *counter)
311 if (!rb_yjit_opts.gen_stats)
return existing_side_exit;
313 uint8_t *start = cb_get_ptr(jit->ocb, jit->ocb->write_pos);
314 _gen_counter_inc(jit->ocb, counter);
315 jmp_ptr(jit->ocb, existing_side_exit);
321 #define GEN_COUNTER_INC(cb, counter_name) ((void)0)
322 #define COUNTED_EXIT(jit, side_exit, counter_name) side_exit
330 const uint32_t code_pos = cb->write_pos;
332 ADD_COMMENT(cb,
"exit to interpreter");
336 if (ctx->sp_offset != 0) {
337 x86opnd_t stack_pointer = ctx_sp_opnd(ctx, 0);
338 lea(cb, REG_SP, stack_pointer);
343 mov(cb, RAX, const_ptr_opnd(exit_pc));
348 if (rb_yjit_opts.gen_stats) {
349 mov(cb, RDI, const_ptr_opnd(exit_pc));
350 call_ptr(cb, RSI, (
void *)&yjit_count_side_exit_op);
358 mov(cb, RAX, imm_opnd(
Qundef));
368 uint8_t *code_ptr = cb_get_ptr(cb, cb->write_pos);
374 GEN_COUNTER_INC(cb, leave_interp_return);
390 gen_code_for_exit_from_stub(
void)
393 code_for_exit_from_stub = cb_get_ptr(cb, cb->write_pos);
395 GEN_COUNTER_INC(cb, exit_from_branch_stub);
401 mov(cb, RAX, imm_opnd(
Qundef));
420 if (!jit->side_exit_for_pc) {
422 uint32_t pos = yjit_gen_exit(jit->pc, ctx, ocb);
423 jit->side_exit_for_pc = cb_get_ptr(ocb, pos);
426 return jit->side_exit_for_pc;
435 if (block->entry_exit)
return;
437 if (jit->insn_idx == block->blockid.idx) {
440 block->entry_exit = yjit_side_exit(jit, &block->ctx);
443 VALUE *pc = yjit_iseq_pc_at_idx(block->blockid.iseq, block->blockid.idx);
444 uint32_t pos = yjit_gen_exit(pc, &block->ctx, ocb);
445 block->entry_exit = cb_get_ptr(ocb, pos);
460 mov(cb, REG1, const_ptr_opnd(iseq->body->iseq_encoded));
464 uint32_t pc_is_zero = cb_new_label(cb,
"pc_is_zero");
465 jz_label(cb, pc_is_zero);
468 GEN_COUNTER_INC(cb, leave_start_pc_non_zero);
474 mov(cb, RAX, imm_opnd(
Qundef));
478 cb_write_label(cb, pc_is_zero);
500 EXEC_EVENT_HOOK(ec,
RUBY_EVENT_C_RETURN, cfp->self, me->def->original_id, me->called_id, me->owner, return_value);
504 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
508 ec->cfp->sp[0] = return_value;
514 gen_full_cfunc_return(
void)
517 outline_full_cfunc_return_pos = ocb->write_pos;
523 mov(cb, C_ARG_REGS[0], REG_EC);
524 mov(cb, C_ARG_REGS[1], RAX);
525 call_ptr(cb, REG0, (
void *)full_cfunc_return);
528 GEN_COUNTER_INC(cb, traced_cfunc_return);
535 mov(cb, RAX, imm_opnd(
Qundef));
548 enum { MAX_PROLOGUE_SIZE = 1024 };
551 if (cb->write_pos + MAX_PROLOGUE_SIZE >= cb->mem_size) {
555 const uint32_t old_write_pos = cb->write_pos;
558 cb_align_pos(cb, 64);
560 uint8_t *code_ptr = cb_get_ptr(cb, cb->write_pos);
561 ADD_COMMENT(cb,
"yjit entry");
568 mov(cb, REG_EC, C_ARG_REGS[0]);
569 mov(cb, REG_CFP, C_ARG_REGS[1]);
576 mov(cb, REG0, const_ptr_opnd(leave_exit_code));
585 if (iseq->body->
param.flags.has_opt) {
586 yjit_pc_guard(cb, iseq);
598 yjit_check_ints(
codeblock_t *cb, uint8_t *side_exit)
602 ADD_COMMENT(cb,
"RUBY_VM_CHECK_INTS(ec)");
606 jnz_ptr(cb, side_exit);
616 ctx_t reset_depth = *current_context;
617 reset_depth.chain_depth = 0;
619 blockid_t jump_block = { jit->iseq, jit_next_insn_idx(jit) };
622 if (jit->record_boundary_patch_point) {
623 uint32_t exit_pos = yjit_gen_exit(jit->pc + insn_len(jit->opcode), &reset_depth, jit->ocb);
624 record_global_inval_patch(jit->cb, exit_pos);
625 jit->record_boundary_patch_point =
false;
642 verify_blockid(blockid);
651 ctx_t ctx_copy = *start_ctx;
652 ctx_t *ctx = &ctx_copy;
655 *ctx = limit_block_versions(blockid, ctx);
658 block->blockid = blockid;
661 RUBY_ASSERT(!(blockid.idx == 0 && start_ctx->stack_size > 0));
663 const rb_iseq_t *iseq = block->blockid.iseq;
664 const unsigned int iseq_size = iseq->body->iseq_size;
665 uint32_t insn_idx = block->blockid.idx;
666 const uint32_t starting_insn_idx = insn_idx;
678 block->start_addr = cb_get_write_ptr(cb);
681 while (insn_idx < iseq_size) {
683 VALUE *pc = yjit_iseq_pc_at_idx(iseq, insn_idx);
684 int opcode = yjit_opcode_at_pc(iseq, pc);
685 RUBY_ASSERT(opcode >= 0 && opcode < VM_INSTRUCTION_SIZE);
689 if (opcode == BIN(opt_getinlinecache) && insn_idx > starting_insn_idx) {
690 jit_jump_to_next_insn(&jit, ctx);
695 jit.insn_idx = insn_idx;
698 jit.side_exit_for_pc = NULL;
701 if (jit.record_boundary_patch_point) {
703 uint32_t exit_pos = yjit_gen_exit(jit.pc, ctx, ocb);
704 record_global_inval_patch(cb, exit_pos);
705 jit.record_boundary_patch_point =
false;
709 if (jit_at_current_insn(&jit)) {
710 verify_ctx(&jit, ctx);
714 codegen_fn gen_fn = gen_fns[opcode];
715 codegen_status_t status = YJIT_CANT_COMPILE;
718 fprintf(stderr,
"compiling %d: %s\n", insn_idx, insn_name(opcode));
719 print_str(cb, insn_name(opcode));
725 GEN_COUNTER_INC(cb, exec_instruction);
728 ADD_COMMENT(cb, insn_name(opcode));
731 status = gen_fn(&jit, ctx, cb);
736 if (status == YJIT_CANT_COMPILE) {
740 uint32_t exit_off = yjit_gen_exit(jit.pc, ctx, cb);
744 if (insn_idx == block->blockid.idx) {
745 block->entry_exit = cb_get_ptr(cb, exit_off);
752 ctx->chain_depth = 0;
755 insn_idx += insn_len(opcode);
758 if (status == YJIT_END_BLOCK) {
764 block->end_addr = cb_get_write_ptr(cb);
767 block->end_idx = insn_idx;
774 if (cb->dropped_bytes || ocb->dropped_bytes) {
775 yjit_free_block(block);
779 if (YJIT_DUMP_MODE >= 2) {
781 fprintf(stderr,
"Compiled the following for iseq=%p:\n", (
void *)iseq);
782 for (uint32_t idx = block->blockid.idx; idx < insn_idx; ) {
783 int opcode = yjit_opcode_at_pc(iseq, yjit_iseq_pc_at_idx(iseq, idx));
784 fprintf(stderr,
" %04d %s\n", idx, insn_name(opcode));
785 idx += insn_len(opcode);
794 static codegen_status_t
798 return YJIT_KEEP_COMPILING;
801 static codegen_status_t
805 x86opnd_t dup_val = ctx_stack_pop(ctx, 0);
809 x86opnd_t loc0 = ctx_stack_push_mapping(ctx, mapping);
810 mov(cb, REG0, dup_val);
813 return YJIT_KEEP_COMPILING;
817 static codegen_status_t
820 rb_num_t n = (rb_num_t)jit_get_arg(jit, 0);
824 return YJIT_CANT_COMPILE;
827 x86opnd_t opnd1 = ctx_stack_opnd(ctx, 1);
828 x86opnd_t opnd0 = ctx_stack_opnd(ctx, 0);
832 x86opnd_t dst1 = ctx_stack_push_mapping(ctx, mapping1);
833 mov(cb, REG0, opnd1);
836 x86opnd_t dst0 = ctx_stack_push_mapping(ctx, mapping0);
837 mov(cb, REG0, opnd0);
840 return YJIT_KEEP_COMPILING;
846 x86opnd_t opnd0 = ctx_stack_opnd(ctx, offset0);
847 x86opnd_t opnd1 = ctx_stack_opnd(ctx, offset1);
852 mov(cb, reg0, opnd0);
853 mov(cb, reg1, opnd1);
854 mov(cb, opnd0, reg1);
855 mov(cb, opnd1, reg0);
857 ctx_set_opnd_mapping(ctx, OPND_STACK(offset0), mapping1);
858 ctx_set_opnd_mapping(ctx, OPND_STACK(offset1), mapping0);
862 static codegen_status_t
865 stack_swap(ctx , cb, 0, 1, REG0, REG1);
866 return YJIT_KEEP_COMPILING;
870 static codegen_status_t
873 rb_num_t n = (rb_num_t)jit_get_arg(jit, 0);
876 x86opnd_t top_val = ctx_stack_pop(ctx, 0);
877 x86opnd_t dst_opnd = ctx_stack_opnd(ctx, (int32_t)n);
878 mov(cb, REG0, top_val);
879 mov(cb, dst_opnd, REG0);
882 ctx_set_opnd_mapping(ctx, OPND_STACK(n), mapping);
884 return YJIT_KEEP_COMPILING;
888 static codegen_status_t
891 int32_t n = (int32_t)jit_get_arg(jit, 0);
894 x86opnd_t top_n_val = ctx_stack_opnd(ctx, n);
897 x86opnd_t loc0 = ctx_stack_push_mapping(ctx, mapping);
898 mov(cb, REG0, top_n_val);
901 return YJIT_KEEP_COMPILING;
904 static codegen_status_t
908 ctx_stack_pop(ctx, 1);
909 return YJIT_KEEP_COMPILING;
913 static codegen_status_t
916 rb_num_t n = (rb_num_t)jit_get_arg(jit, 0);
917 ctx_stack_pop(ctx, n);
918 return YJIT_KEEP_COMPILING;
922 static codegen_status_t
925 rb_num_t n = (rb_num_t)jit_get_arg(jit, 0);
928 jit_prepare_routine_call(jit, ctx, REG0);
930 x86opnd_t values_ptr = ctx_sp_opnd(ctx, -(
sizeof(
VALUE) * (uint32_t)n));
933 mov(cb, C_ARG_REGS[0], REG_EC);
934 mov(cb, C_ARG_REGS[1], imm_opnd(n));
935 lea(cb, C_ARG_REGS[2], values_ptr);
936 call_ptr(cb, REG0, (
void *)rb_ec_ary_new_from_values);
938 ctx_stack_pop(ctx, n);
939 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_ARRAY);
940 mov(cb, stack_ret, RAX);
942 return YJIT_KEEP_COMPILING;
946 static codegen_status_t
949 VALUE ary = jit_get_arg(jit, 0);
952 jit_prepare_routine_call(jit, ctx, REG0);
955 jit_mov_gc_ptr(jit, cb, C_ARG_REGS[0], ary);
958 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_ARRAY);
959 mov(cb, stack_ret, RAX);
961 return YJIT_KEEP_COMPILING;
965 static codegen_status_t
968 VALUE hash = jit_get_arg(jit, 0);
971 jit_prepare_routine_call(jit, ctx, REG0);
974 jit_mov_gc_ptr(jit, cb, C_ARG_REGS[0], hash);
975 call_ptr(cb, REG0, (
void *)rb_hash_resurrect);
977 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_HASH);
978 mov(cb, stack_ret, RAX);
980 return YJIT_KEEP_COMPILING;
986 static codegen_status_t
993 jit_prepare_routine_call(jit, ctx, REG0);
996 x86opnd_t ary_opnd = ctx_stack_pop(ctx, 1);
999 jit_mov_gc_ptr(jit, cb, C_ARG_REGS[0], flag);
1000 mov(cb, C_ARG_REGS[1], ary_opnd);
1001 call_ptr(cb, REG1, (
void *) rb_vm_splat_array);
1003 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_ARRAY);
1004 mov(cb, stack_ret, RAX);
1006 return YJIT_KEEP_COMPILING;
1010 static codegen_status_t
1013 rb_num_t flag = (rb_num_t)jit_get_arg(jit, 0);
1016 jit_prepare_routine_call(jit, ctx, REG0);
1019 mov(cb, C_ARG_REGS[0], ctx_stack_opnd(ctx, 1));
1020 mov(cb, C_ARG_REGS[1], ctx_stack_opnd(ctx, 0));
1021 mov(cb, C_ARG_REGS[2], imm_opnd(flag));
1024 ctx_stack_pop(ctx, 2);
1025 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_HEAP);
1026 mov(cb, stack_ret, RAX);
1028 return YJIT_KEEP_COMPILING;
1034 ADD_COMMENT(cb,
"guard object is heap");
1038 jnz_ptr(cb, side_exit);
1041 cmp(cb, object_opnd, imm_opnd(
Qnil));
1043 jbe_ptr(cb, side_exit);
1049 ADD_COMMENT(cb,
"guard object is array");
1052 mov(cb, flags_opnd, member_opnd(object_opnd,
struct RBasic, flags));
1056 cmp(cb, flags_opnd, imm_opnd(
T_ARRAY));
1057 jne_ptr(cb, side_exit);
1061 static codegen_status_t
1064 int flag = (int) jit_get_arg(jit, 1);
1068 GEN_COUNTER_INC(cb, expandarray_splat);
1069 return YJIT_CANT_COMPILE;
1074 GEN_COUNTER_INC(cb, expandarray_postarg);
1075 return YJIT_CANT_COMPILE;
1078 uint8_t *side_exit = yjit_side_exit(jit, ctx);
1082 int num = (int)jit_get_arg(jit, 0);
1083 val_type_t array_type = ctx_get_opnd_type(ctx, OPND_STACK(0));
1084 x86opnd_t array_opnd = ctx_stack_pop(ctx, 1);
1086 if (array_type.type == ETYPE_NIL) {
1089 for (
int i = 0; i < num; i++) {
1090 x86opnd_t push = ctx_stack_push(ctx, TYPE_NIL);
1091 mov(cb, push, imm_opnd(
Qnil));
1093 return YJIT_KEEP_COMPILING;
1097 mov(cb, REG0, array_opnd);
1098 guard_object_is_heap(cb, REG0, ctx, COUNTED_EXIT(jit, side_exit, expandarray_not_array));
1099 guard_object_is_array(cb, REG0, REG1, ctx, COUNTED_EXIT(jit, side_exit, expandarray_not_array));
1103 return YJIT_KEEP_COMPILING;
1108 mov(cb, REG1, flags_opnd);
1111 and(cb, REG1, imm_opnd(RARRAY_EMBED_LEN_MASK));
1115 test(cb, flags_opnd, imm_opnd(RARRAY_EMBED_FLAG));
1116 cmovz(cb, REG1, member_opnd(REG0,
struct RArray, as.
heap.len));
1120 cmp(cb, REG1, imm_opnd(num));
1121 jl_ptr(cb, COUNTED_EXIT(jit, side_exit, expandarray_rhs_too_small));
1125 lea(cb, REG1, member_opnd(REG0,
struct RArray, as.
ary));
1129 test(cb, flags_opnd, imm_opnd(RARRAY_EMBED_FLAG));
1130 cmovz(cb, REG1, member_opnd(REG0,
struct RArray, as.
heap.ptr));
1133 for (int32_t i = (int32_t) num - 1; i >= 0; i--) {
1134 x86opnd_t top = ctx_stack_push(ctx, TYPE_UNKNOWN);
1139 return YJIT_KEEP_COMPILING;
1143 static codegen_status_t
1146 int32_t num = (int32_t)jit_get_arg(jit, 0);
1149 jit_prepare_routine_call(jit, ctx, REG0);
1153 mov(cb, C_ARG_REGS[0], imm_opnd(num / 2));
1154 call_ptr(cb, REG0, (
void *)rb_hash_new_with_size);
1161 mov(cb, C_ARG_REGS[0], imm_opnd(num));
1162 lea(cb, C_ARG_REGS[1], ctx_stack_opnd(ctx, num - 1));
1163 mov(cb, C_ARG_REGS[2], RAX);
1169 ctx_stack_pop(ctx, num);
1170 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_HASH);
1171 mov(cb, stack_ret, RAX);
1177 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_HASH);
1178 mov(cb, stack_ret, RAX);
1181 return YJIT_KEEP_COMPILING;
1189 val_type_t val_type = yjit_type_of_value(arg);
1190 x86opnd_t stack_top = ctx_stack_push(ctx, val_type);
1202 if (imm.num_bits <= 32) {
1203 mov(cb, stack_top, imm);
1207 mov(cb, stack_top, REG0);
1213 jit_mov_gc_ptr(jit, cb, REG0, arg);
1216 mov(cb, stack_top, REG0);
1220 static codegen_status_t
1223 jit_putobject(jit, ctx,
Qnil);
1224 return YJIT_KEEP_COMPILING;
1227 static codegen_status_t
1230 VALUE arg = jit_get_arg(jit, 0);
1232 jit_putobject(jit, ctx, arg);
1233 return YJIT_KEEP_COMPILING;
1236 static codegen_status_t
1239 VALUE put_val = jit_get_arg(jit, 0);
1242 jit_prepare_routine_call(jit, ctx, REG0);
1244 mov(cb, C_ARG_REGS[0], REG_EC);
1245 jit_mov_gc_ptr(jit, cb, C_ARG_REGS[1], put_val);
1246 call_ptr(cb, REG0, (
void *)rb_ec_str_resurrect);
1248 x86opnd_t stack_top = ctx_stack_push(ctx, TYPE_STRING);
1249 mov(cb, stack_top, RAX);
1251 return YJIT_KEEP_COMPILING;
1254 static codegen_status_t
1257 int opcode = jit_get_opcode(jit);
1258 int cst_val = (opcode == BIN(putobject_INT2FIX_0_))? 0:1;
1260 jit_putobject(jit, ctx,
INT2FIX(cst_val));
1261 return YJIT_KEEP_COMPILING;
1264 static codegen_status_t
1271 x86opnd_t stack_top = ctx_stack_push_self(ctx);
1272 mov(cb, stack_top, REG0);
1274 return YJIT_KEEP_COMPILING;
1277 static codegen_status_t
1280 enum vm_special_object_type
type = (
enum vm_special_object_type)jit_get_arg(jit, 0);
1282 if (
type == VM_SPECIAL_OBJECT_VMCORE) {
1283 x86opnd_t stack_top = ctx_stack_push(ctx, TYPE_HEAP);
1284 jit_mov_gc_ptr(jit, cb, REG0, rb_mRubyVMFrozenCore);
1285 mov(cb, stack_top, REG0);
1286 return YJIT_KEEP_COMPILING;
1291 return YJIT_CANT_COMPILE;
1306 mov(cb, reg, mem_opnd(64, REG0,
SIZEOF_VALUE * VM_ENV_DATA_INDEX_SPECVAL));
1307 and(cb, reg, imm_opnd(~0x03));
1313 slot_to_local_idx(
const rb_iseq_t *iseq, int32_t slot_idx)
1316 int32_t local_table_size = iseq->body->local_table_size;
1317 int32_t op = slot_idx - VM_ENV_DATA_SIZE;
1318 int32_t local_idx = local_idx = local_table_size - op - 1;
1319 RUBY_ASSERT(local_idx >= 0 && local_idx < local_table_size);
1320 return (uint32_t)local_idx;
1323 static codegen_status_t
1327 int32_t slot_idx = (int32_t)jit_get_arg(jit, 0);
1329 uint32_t local_idx = slot_to_local_idx(jit->iseq, slot_idx);
1332 gen_get_ep(cb, REG0, 0);
1335 mov(cb, REG0, mem_opnd(64, REG0, offs));
1338 x86opnd_t stack_top = ctx_stack_push_local(ctx, local_idx);
1339 mov(cb, stack_top, REG0);
1341 return YJIT_KEEP_COMPILING;
1344 static codegen_status_t
1345 gen_getlocal_generic(
ctx_t *ctx, uint32_t local_idx, uint32_t level)
1347 gen_get_ep(cb, REG0, level);
1352 mov(cb, REG0, mem_opnd(64, REG0, offs));
1355 x86opnd_t stack_top = ctx_stack_push(ctx, TYPE_UNKNOWN);
1356 mov(cb, stack_top, REG0);
1358 return YJIT_KEEP_COMPILING;
1361 static codegen_status_t
1364 int32_t idx = (int32_t)jit_get_arg(jit, 0);
1365 int32_t level = (int32_t)jit_get_arg(jit, 1);
1366 return gen_getlocal_generic(ctx, idx, level);
1369 static codegen_status_t
1372 int32_t idx = (int32_t)jit_get_arg(jit, 0);
1373 return gen_getlocal_generic(ctx, idx, 1);
1376 static codegen_status_t
1392 int32_t slot_idx = (int32_t)jit_get_arg(jit, 0);
1393 uint32_t local_idx = slot_to_local_idx(jit->iseq, slot_idx);
1396 gen_get_ep(cb, REG0, 0);
1399 x86opnd_t flags_opnd = mem_opnd(64, REG0,
sizeof(
VALUE) * VM_ENV_DATA_INDEX_FLAGS);
1400 test(cb, flags_opnd, imm_opnd(VM_ENV_FLAG_WB_REQUIRED));
1403 uint8_t *side_exit = yjit_side_exit(jit, ctx);
1406 jnz_ptr(cb, side_exit);
1409 val_type_t temp_type = ctx_get_opnd_type(ctx, OPND_STACK(0));
1410 ctx_set_local_type(ctx, local_idx, temp_type);
1413 x86opnd_t stack_top = ctx_stack_pop(ctx, 1);
1414 mov(cb, REG1, stack_top);
1417 const int32_t offs = -8 * slot_idx;
1418 mov(cb, mem_opnd(64, REG0, offs), REG1);
1420 return YJIT_KEEP_COMPILING;
1425 static codegen_status_t
1430 if (jit->iseq->body->
param.keyword->num >= 32) {
1431 return YJIT_CANT_COMPILE;
1435 int32_t bits_offset = (int32_t)jit_get_arg(jit, 0);
1438 int32_t index = (int32_t)jit_get_arg(jit, 1);
1441 gen_get_ep(cb, REG0, 0);
1444 x86opnd_t bits_opnd = mem_opnd(64, REG0,
sizeof(
VALUE) * -bits_offset);
1450 int64_t bit_test = 0x01 << (index + 1);
1451 test(cb, bits_opnd, imm_opnd(bit_test));
1452 mov(cb, REG0, imm_opnd(
Qfalse));
1453 mov(cb, REG1, imm_opnd(
Qtrue));
1454 cmovz(cb, REG0, REG1);
1456 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_IMM);
1457 mov(cb, stack_ret, REG0);
1459 return YJIT_KEEP_COMPILING;
1462 static codegen_status_t
1463 gen_setlocal_generic(
jitstate_t *jit,
ctx_t *ctx, uint32_t local_idx, uint32_t level)
1466 gen_get_ep(cb, REG0, level);
1469 x86opnd_t flags_opnd = mem_opnd(64, REG0,
sizeof(
VALUE) * VM_ENV_DATA_INDEX_FLAGS);
1470 test(cb, flags_opnd, imm_opnd(VM_ENV_FLAG_WB_REQUIRED));
1473 uint8_t *side_exit = yjit_side_exit(jit, ctx);
1476 jnz_ptr(cb, side_exit);
1479 x86opnd_t stack_top = ctx_stack_pop(ctx, 1);
1480 mov(cb, REG1, stack_top);
1484 mov(cb, mem_opnd(64, REG0, offs), REG1);
1486 return YJIT_KEEP_COMPILING;
1489 static codegen_status_t
1492 int32_t idx = (int32_t)jit_get_arg(jit, 0);
1493 int32_t level = (int32_t)jit_get_arg(jit, 1);
1494 return gen_setlocal_generic(jit, ctx, idx, level);
1497 static codegen_status_t
1500 int32_t idx = (int32_t)jit_get_arg(jit, 0);
1501 return gen_setlocal_generic(jit, ctx, idx, 1);
1505 gen_jnz_to_target0(
codeblock_t *cb, uint8_t *target0, uint8_t *target1, uint8_t shape)
1514 jnz_ptr(cb, target0);
1520 gen_jz_to_target0(
codeblock_t *cb, uint8_t *target0, uint8_t *target1, uint8_t shape)
1529 jz_ptr(cb, target0);
1535 gen_jbe_to_target0(
codeblock_t *cb, uint8_t *target0, uint8_t *target1, uint8_t shape)
1544 jbe_ptr(cb, target0);
1561 jit_chain_guard(
enum jcc_kinds jcc,
jitstate_t *jit,
const ctx_t *ctx, uint8_t depth_limit, uint8_t *side_exit)
1563 branchgen_fn target0_gen_fn;
1568 target0_gen_fn = gen_jnz_to_target0;
1572 target0_gen_fn = gen_jz_to_target0;
1576 target0_gen_fn = gen_jbe_to_target0;
1579 rb_bug(
"yjit: unimplemented jump kind");
1583 if (ctx->chain_depth < depth_limit) {
1584 ctx_t deeper = *ctx;
1585 deeper.chain_depth++;
1590 (
blockid_t) { jit->iseq, jit->insn_idx },
1598 target0_gen_fn(cb, side_exit, NULL, SHAPE_DEFAULT);
1603 GETIVAR_MAX_DEPTH = 10,
1604 OPT_AREF_MAX_CHAIN_DEPTH = 2,
1615 static codegen_status_t
1620 jit_prepare_routine_call(jit, ctx, REG0);
1623 x86opnd_t val_opnd = ctx_stack_pop(ctx, 1);
1624 x86opnd_t recv_opnd = ctx_stack_pop(ctx, 1);
1626 uint32_t ivar_index = rb_obj_ensure_iv_index_mapping(recv, ivar_name);
1629 mov(cb, C_ARG_REGS[0], recv_opnd);
1630 mov(cb, C_ARG_REGS[1], imm_opnd(ivar_index));
1631 mov(cb, C_ARG_REGS[2], val_opnd);
1632 call_ptr(cb, REG0, (
void *)rb_vm_set_ivar_idx);
1634 x86opnd_t out_opnd = ctx_stack_push(ctx, TYPE_UNKNOWN);
1635 mov(cb, out_opnd, RAX);
1637 return YJIT_KEEP_COMPILING;
1645 static codegen_status_t
1649 const ctx_t starting_context = *ctx;
1659 ADD_COMMENT(cb,
"call rb_ivar_get()");
1662 jit_prepare_routine_call(jit, ctx, REG1);
1664 mov(cb, C_ARG_REGS[0], REG0);
1665 mov(cb, C_ARG_REGS[1], imm_opnd((int64_t)ivar_name));
1668 if (!reg0_opnd.is_self) {
1669 (void)ctx_stack_pop(ctx, 1);
1672 x86opnd_t out_opnd = ctx_stack_push(ctx, TYPE_UNKNOWN);
1673 mov(cb, out_opnd, RAX);
1676 jit_jump_to_next_insn(jit, ctx);
1677 return YJIT_END_BLOCK;
1702 uint32_t ivar_index = rb_obj_ensure_iv_index_mapping(comptime_receiver, ivar_name);
1705 if (!reg0_opnd.is_self) {
1706 (void)ctx_stack_pop(ctx, 1);
1715 ADD_COMMENT(cb,
"guard embedded getivar");
1717 test(cb, flags_opnd, imm_opnd(ROBJECT_EMBED));
1718 jit_chain_guard(JCC_JZ, jit, &starting_context, max_chain_depth, COUNTED_EXIT(jit, side_exit, getivar_megamorphic));
1722 mov(cb, REG1, ivar_opnd);
1725 cmp(cb, REG1, imm_opnd(
Qundef));
1726 mov(cb, REG0, imm_opnd(
Qnil));
1727 cmove(cb, REG1, REG0);
1730 x86opnd_t out_opnd = ctx_stack_push(ctx, TYPE_UNKNOWN);
1731 mov(cb, out_opnd, REG1);
1738 ADD_COMMENT(cb,
"guard extended getivar");
1740 test(cb, flags_opnd, imm_opnd(ROBJECT_EMBED));
1741 jit_chain_guard(JCC_JNZ, jit, &starting_context, max_chain_depth, COUNTED_EXIT(jit, side_exit, getivar_megamorphic));
1747 cmp(cb, num_slots, imm_opnd(ivar_index));
1748 jle_ptr(cb, COUNTED_EXIT(jit, side_exit, getivar_idx_out_of_range));
1753 mov(cb, REG0, tbl_opnd);
1756 x86opnd_t ivar_opnd = mem_opnd(64, REG0,
sizeof(
VALUE) * ivar_index);
1757 mov(cb, REG0, ivar_opnd);
1760 cmp(cb, REG0, imm_opnd(
Qundef));
1761 mov(cb, REG1, imm_opnd(
Qnil));
1762 cmove(cb, REG0, REG1);
1765 x86opnd_t out_opnd = ctx_stack_push(ctx, TYPE_UNKNOWN);
1766 mov(cb, out_opnd, REG0);
1770 jit_jump_to_next_insn(jit, ctx);
1771 return YJIT_END_BLOCK;
1774 static codegen_status_t
1778 if (!jit_at_current_insn(jit)) {
1779 defer_compilation(jit, ctx);
1780 return YJIT_END_BLOCK;
1783 ID ivar_name = (
ID)jit_get_arg(jit, 0);
1785 VALUE comptime_val = jit_peek_at_self(jit, ctx);
1789 uint8_t *side_exit = yjit_side_exit(jit, ctx);
1794 jit_guard_known_klass(jit, ctx, comptime_val_klass, OPND_SELF, comptime_val, GETIVAR_MAX_DEPTH, side_exit);
1796 return gen_get_ivar(jit, ctx, GETIVAR_MAX_DEPTH, comptime_val, ivar_name, OPND_SELF, side_exit);
1801 static codegen_status_t
1804 ID id = (
ID)jit_get_arg(jit, 0);
1805 IVC ic = (
IVC)jit_get_arg(jit, 1);
1809 jit_prepare_routine_call(jit, ctx, REG0);
1812 x86opnd_t val_opnd = ctx_stack_pop(ctx, 1);
1816 mov(cb, C_ARG_REGS[3], val_opnd);
1817 mov(cb, C_ARG_REGS[2], imm_opnd(
id));
1818 mov(cb, C_ARG_REGS[4], const_ptr_opnd(ic));
1819 jit_mov_gc_ptr(jit, cb, C_ARG_REGS[0], (
VALUE)jit->iseq);
1820 call_ptr(cb, REG0, (
void *)rb_vm_setinstancevariable);
1822 return YJIT_KEEP_COMPILING;
1827 static codegen_status_t
1830 rb_num_t op_type = (rb_num_t)jit_get_arg(jit, 0);
1836 jit_prepare_routine_call(jit, ctx, REG0);
1839 x86opnd_t v_opnd = ctx_stack_pop(ctx, 1);
1842 mov(cb, C_ARG_REGS[0], REG_EC);
1843 mov(cb, C_ARG_REGS[1], REG_CFP);
1844 mov(cb, C_ARG_REGS[2], imm_opnd(op_type));
1845 jit_mov_gc_ptr(jit, cb, C_ARG_REGS[3], (
VALUE)obj);
1846 mov(cb, C_ARG_REGS[4], v_opnd);
1847 call_ptr(cb, REG0, (
void *)rb_vm_defined);
1852 jit_mov_gc_ptr(jit, cb, REG1, (
VALUE)pushval);
1853 cmp(cb, AL, imm_opnd(0));
1854 mov(cb, RAX, imm_opnd(
Qnil));
1855 cmovnz(cb, RAX, REG1);
1859 x86opnd_t stack_ret = ctx_stack_push(ctx, out_type);
1860 mov(cb, stack_ret, RAX);
1862 return YJIT_KEEP_COMPILING;
1865 static codegen_status_t
1871 val_type_t val_type = ctx_get_opnd_type(ctx, OPND_STACK(0));
1877 if ((type_val ==
T_STRING && val_type.type == ETYPE_STRING) ||
1878 (type_val ==
T_ARRAY && val_type.type == ETYPE_ARRAY) ||
1879 (type_val ==
T_HASH && val_type.type == ETYPE_HASH)) {
1881 stack_ret = ctx_stack_push(ctx, TYPE_TRUE);
1882 mov(cb, stack_ret, imm_opnd(
Qtrue));
1883 return YJIT_KEEP_COMPILING;
1885 else if (val_type.is_imm || val_type.type != ETYPE_UNKNOWN) {
1887 stack_ret = ctx_stack_push(ctx, TYPE_FALSE);
1888 mov(cb, stack_ret, imm_opnd(
Qfalse));
1889 return YJIT_KEEP_COMPILING;
1893 mov(cb, REG1, imm_opnd(
Qfalse));
1895 uint32_t ret = cb_new_label(cb,
"ret");
1897 if (!val_type.is_heap) {
1902 cmp(cb, REG0, imm_opnd(
Qnil));
1907 mov(cb, REG0, mem_opnd(64, REG0, offsetof(
struct RBasic, flags)));
1909 cmp(cb, REG0, imm_opnd(type_val));
1910 mov(cb, REG0, imm_opnd(
Qtrue));
1912 cmove(cb, REG1, REG0);
1914 cb_write_label(cb, ret);
1915 stack_ret = ctx_stack_push(ctx, TYPE_IMM);
1916 mov(cb, stack_ret, REG1);
1919 return YJIT_KEEP_COMPILING;
1922 return YJIT_CANT_COMPILE;
1926 static codegen_status_t
1929 rb_num_t n = (rb_num_t)jit_get_arg(jit, 0);
1932 jit_prepare_routine_call(jit, ctx, REG0);
1934 x86opnd_t values_ptr = ctx_sp_opnd(ctx, -(
sizeof(
VALUE) * (uint32_t)n));
1937 mov(cb, C_ARG_REGS[0], imm_opnd(n));
1938 lea(cb, C_ARG_REGS[1], values_ptr);
1939 call_ptr(cb, REG0, (
void *)rb_str_concat_literals);
1941 ctx_stack_pop(ctx, n);
1942 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_STRING);
1943 mov(cb, stack_ret, RAX);
1945 return YJIT_KEEP_COMPILING;
1949 guard_two_fixnums(
ctx_t *ctx, uint8_t *side_exit)
1952 val_type_t arg1_type = ctx_get_opnd_type(ctx, OPND_STACK(0));
1953 val_type_t arg0_type = ctx_get_opnd_type(ctx, OPND_STACK(1));
1955 if (arg0_type.is_heap || arg1_type.is_heap) {
1956 jmp_ptr(cb, side_exit);
1960 if (arg0_type.type != ETYPE_FIXNUM && arg0_type.type != ETYPE_UNKNOWN) {
1961 jmp_ptr(cb, side_exit);
1965 if (arg1_type.type != ETYPE_FIXNUM && arg1_type.type != ETYPE_UNKNOWN) {
1966 jmp_ptr(cb, side_exit);
1972 RUBY_ASSERT(arg0_type.type == ETYPE_FIXNUM || arg0_type.type == ETYPE_UNKNOWN);
1973 RUBY_ASSERT(arg1_type.type == ETYPE_FIXNUM || arg1_type.type == ETYPE_UNKNOWN);
1976 x86opnd_t arg1 = ctx_stack_opnd(ctx, 0);
1977 x86opnd_t arg0 = ctx_stack_opnd(ctx, 1);
1980 if (arg0_type.type != ETYPE_FIXNUM) {
1981 ADD_COMMENT(cb,
"guard arg0 fixnum");
1983 jz_ptr(cb, side_exit);
1985 if (arg1_type.type != ETYPE_FIXNUM) {
1986 ADD_COMMENT(cb,
"guard arg1 fixnum");
1988 jz_ptr(cb, side_exit);
1992 ctx_upgrade_opnd_type(ctx, OPND_STACK(0), TYPE_FIXNUM);
1993 ctx_upgrade_opnd_type(ctx, OPND_STACK(1), TYPE_FIXNUM);
1999 static codegen_status_t
2003 if (!jit_at_current_insn(jit)) {
2004 defer_compilation(jit, ctx);
2005 return YJIT_END_BLOCK;
2008 VALUE comptime_a = jit_peek_at_stack(jit, ctx, 1);
2009 VALUE comptime_b = jit_peek_at_stack(jit, ctx, 0);
2014 uint8_t *side_exit = yjit_side_exit(jit, ctx);
2016 if (!assume_bop_not_redefined(jit, INTEGER_REDEFINED_OP_FLAG, BOP_LT)) {
2017 return YJIT_CANT_COMPILE;
2021 guard_two_fixnums(ctx, side_exit);
2028 xor(cb, REG0_32, REG0_32);
2029 mov(cb, REG1, arg0);
2030 cmp(cb, REG1, arg1);
2031 mov(cb, REG1, imm_opnd(
Qtrue));
2032 cmov_op(cb, REG0, REG1);
2035 x86opnd_t dst = ctx_stack_push(ctx, TYPE_UNKNOWN);
2038 return YJIT_KEEP_COMPILING;
2041 return gen_opt_send_without_block(jit, ctx, cb);
2045 static codegen_status_t
2048 return gen_fixnum_cmp(jit, ctx, cmovl);
2051 static codegen_status_t
2054 return gen_fixnum_cmp(jit, ctx, cmovle);
2057 static codegen_status_t
2060 return gen_fixnum_cmp(jit, ctx, cmovge);
2063 static codegen_status_t
2066 return gen_fixnum_cmp(jit, ctx, cmovg);
2072 gen_equality_specialized(
jitstate_t *jit,
ctx_t *ctx, uint8_t *side_exit)
2074 VALUE comptime_a = jit_peek_at_stack(jit, ctx, 1);
2075 VALUE comptime_b = jit_peek_at_stack(jit, ctx, 0);
2077 x86opnd_t a_opnd = ctx_stack_opnd(ctx, 1);
2078 x86opnd_t b_opnd = ctx_stack_opnd(ctx, 0);
2081 if (!assume_bop_not_redefined(jit, INTEGER_REDEFINED_OP_FLAG, BOP_EQ)) {
2086 guard_two_fixnums(ctx, side_exit);
2088 mov(cb, REG0, a_opnd);
2089 cmp(cb, REG0, b_opnd);
2091 mov(cb, REG0, imm_opnd(
Qfalse));
2092 mov(cb, REG1, imm_opnd(
Qtrue));
2093 cmove(cb, REG0, REG1);
2096 ctx_stack_pop(ctx, 2);
2097 x86opnd_t dst = ctx_stack_push(ctx, TYPE_IMM);
2104 if (!assume_bop_not_redefined(jit, STRING_REDEFINED_OP_FLAG, BOP_EQ)) {
2110 mov(cb, C_ARG_REGS[0], a_opnd);
2111 mov(cb, C_ARG_REGS[1], b_opnd);
2114 mov(cb, REG0, C_ARG_REGS[0]);
2115 jit_guard_known_klass(jit, ctx,
rb_cString, OPND_STACK(1), comptime_a, SEND_MAX_DEPTH, side_exit);
2117 uint32_t ret = cb_new_label(cb,
"ret");
2120 cmp(cb, C_ARG_REGS[0], C_ARG_REGS[1]);
2121 mov(cb, RAX, imm_opnd(
Qtrue));
2125 if (ctx_get_opnd_type(ctx, OPND_STACK(0)).
type != ETYPE_STRING) {
2126 mov(cb, REG0, C_ARG_REGS[1]);
2128 jit_guard_known_klass(jit, ctx,
rb_cString, OPND_STACK(0), comptime_b, SEND_MAX_DEPTH, side_exit);
2132 call_ptr(cb, REG0, (
void *)rb_str_eql_internal);
2135 cb_write_label(cb, ret);
2136 ctx_stack_pop(ctx, 2);
2137 x86opnd_t dst = ctx_stack_push(ctx, TYPE_IMM);
2148 static codegen_status_t
2152 if (!jit_at_current_insn(jit)) {
2153 defer_compilation(jit, ctx);
2154 return YJIT_END_BLOCK;
2158 uint8_t *side_exit = yjit_side_exit(jit, ctx);
2160 if (gen_equality_specialized(jit, ctx, side_exit)) {
2161 jit_jump_to_next_insn(jit, ctx);
2162 return YJIT_END_BLOCK;
2165 return gen_opt_send_without_block(jit, ctx, cb);
2171 static codegen_status_t
2177 return gen_send_general(jit, ctx, cd, NULL);
2180 static codegen_status_t
2184 int32_t argc = (int32_t)vm_ci_argc(cd->ci);
2188 GEN_COUNTER_INC(cb, oaref_argc_not_one);
2189 return YJIT_CANT_COMPILE;
2193 if (!jit_at_current_insn(jit)) {
2194 defer_compilation(jit, ctx);
2195 return YJIT_END_BLOCK;
2199 const ctx_t starting_context = *ctx;
2202 VALUE comptime_idx = jit_peek_at_stack(jit, ctx, 0);
2203 VALUE comptime_recv = jit_peek_at_stack(jit, ctx, 1);
2206 uint8_t *side_exit = yjit_side_exit(jit, ctx);
2209 if (!assume_bop_not_redefined(jit, ARRAY_REDEFINED_OP_FLAG, BOP_AREF)) {
2210 return YJIT_CANT_COMPILE;
2214 x86opnd_t idx_opnd = ctx_stack_pop(ctx, 1);
2215 x86opnd_t recv_opnd = ctx_stack_pop(ctx, 1);
2216 mov(cb, REG0, recv_opnd);
2221 jnz_ptr(cb, side_exit);
2222 cmp(cb, REG0, imm_opnd(
Qfalse));
2223 je_ptr(cb, side_exit);
2224 cmp(cb, REG0, imm_opnd(
Qnil));
2225 je_ptr(cb, side_exit);
2229 mov(cb, REG1, mem_opnd(64, REG0, offsetof(
struct RBasic, klass)));
2230 mov(cb, REG0, const_ptr_opnd((
void *)
rb_cArray));
2231 cmp(cb, REG0, REG1);
2232 jit_chain_guard(JCC_JNE, jit, &starting_context, OPT_AREF_MAX_CHAIN_DEPTH, side_exit);
2235 mov(cb, REG1, idx_opnd);
2237 jz_ptr(cb, COUNTED_EXIT(jit, side_exit, oaref_arg_not_fixnum));
2242 mov(cb, RDI, recv_opnd);
2243 sar(cb, REG1, imm_opnd(1));
2245 call_ptr(cb, REG0, (
void *)rb_ary_entry_internal);
2248 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
2249 mov(cb, stack_ret, RAX);
2253 jit_jump_to_next_insn(jit, ctx);
2254 return YJIT_END_BLOCK;
2257 if (!assume_bop_not_redefined(jit, HASH_REDEFINED_OP_FLAG, BOP_AREF)) {
2258 return YJIT_CANT_COMPILE;
2261 x86opnd_t key_opnd = ctx_stack_opnd(ctx, 0);
2262 x86opnd_t recv_opnd = ctx_stack_opnd(ctx, 1);
2265 mov(cb, REG0, recv_opnd);
2266 jit_guard_known_klass(jit, ctx,
rb_cHash, OPND_STACK(1), comptime_recv, OPT_AREF_MAX_CHAIN_DEPTH, side_exit);
2269 mov(cb, C_ARG_REGS[0], REG0);
2270 mov(cb, C_ARG_REGS[1], key_opnd);
2273 jit_prepare_routine_call(jit, ctx, REG0);
2278 (void)ctx_stack_pop(ctx, 2);
2281 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
2282 mov(cb, stack_ret, RAX);
2285 jit_jump_to_next_insn(jit, ctx);
2286 return YJIT_END_BLOCK;
2290 return gen_opt_send_without_block(jit, ctx, cb);
2294 static codegen_status_t
2298 if (!jit_at_current_insn(jit)) {
2299 defer_compilation(jit, ctx);
2300 return YJIT_END_BLOCK;
2303 VALUE comptime_recv = jit_peek_at_stack(jit, ctx, 2);
2304 VALUE comptime_key = jit_peek_at_stack(jit, ctx, 1);
2307 x86opnd_t recv = ctx_stack_opnd(ctx, 2);
2312 uint8_t *side_exit = yjit_side_exit(jit, ctx);
2315 mov(cb, REG0, recv);
2316 jit_guard_known_klass(jit, ctx,
rb_cArray, OPND_STACK(2), comptime_recv, SEND_MAX_DEPTH, side_exit);
2320 jit_guard_known_klass(jit, ctx,
rb_cInteger, OPND_STACK(1), comptime_key, SEND_MAX_DEPTH, side_exit);
2323 mov(cb, C_ARG_REGS[0], recv);
2324 mov(cb, C_ARG_REGS[1], key);
2325 sar(cb, C_ARG_REGS[1], imm_opnd(1));
2326 mov(cb, C_ARG_REGS[2], val);
2329 jit_prepare_routine_call(jit, ctx, REG0);
2335 mov(cb, REG0, ctx_stack_opnd(ctx, 0));
2338 ctx_stack_pop(ctx, 3);
2339 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
2340 mov(cb, stack_ret, REG0);
2342 jit_jump_to_next_insn(jit, ctx);
2343 return YJIT_END_BLOCK;
2346 uint8_t *side_exit = yjit_side_exit(jit, ctx);
2349 mov(cb, REG0, recv);
2350 jit_guard_known_klass(jit, ctx,
rb_cHash, OPND_STACK(2), comptime_recv, SEND_MAX_DEPTH, side_exit);
2353 mov(cb, C_ARG_REGS[0], recv);
2354 mov(cb, C_ARG_REGS[1], key);
2355 mov(cb, C_ARG_REGS[2], val);
2358 jit_prepare_routine_call(jit, ctx, REG0);
2363 ctx_stack_pop(ctx, 3);
2364 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
2365 mov(cb, stack_ret, RAX);
2367 jit_jump_to_next_insn(jit, ctx);
2368 return YJIT_END_BLOCK;
2371 return gen_opt_send_without_block(jit, ctx, cb);
2375 static codegen_status_t
2379 if (!jit_at_current_insn(jit)) {
2380 defer_compilation(jit, ctx);
2381 return YJIT_END_BLOCK;
2384 VALUE comptime_a = jit_peek_at_stack(jit, ctx, 1);
2385 VALUE comptime_b = jit_peek_at_stack(jit, ctx, 0);
2390 uint8_t *side_exit = yjit_side_exit(jit, ctx);
2392 if (!assume_bop_not_redefined(jit, INTEGER_REDEFINED_OP_FLAG, BOP_AND)) {
2393 return YJIT_CANT_COMPILE;
2397 guard_two_fixnums(ctx, side_exit);
2404 mov(cb, REG0, arg0);
2405 and(cb, REG0, arg1);
2408 x86opnd_t dst = ctx_stack_push(ctx, TYPE_FIXNUM);
2411 return YJIT_KEEP_COMPILING;
2415 return gen_opt_send_without_block(jit, ctx, cb);
2419 static codegen_status_t
2423 if (!jit_at_current_insn(jit)) {
2424 defer_compilation(jit, ctx);
2425 return YJIT_END_BLOCK;
2428 VALUE comptime_a = jit_peek_at_stack(jit, ctx, 1);
2429 VALUE comptime_b = jit_peek_at_stack(jit, ctx, 0);
2434 uint8_t *side_exit = yjit_side_exit(jit, ctx);
2436 if (!assume_bop_not_redefined(jit, INTEGER_REDEFINED_OP_FLAG, BOP_OR)) {
2437 return YJIT_CANT_COMPILE;
2441 guard_two_fixnums(ctx, side_exit);
2448 mov(cb, REG0, arg0);
2452 x86opnd_t dst = ctx_stack_push(ctx, TYPE_FIXNUM);
2455 return YJIT_KEEP_COMPILING;
2459 return gen_opt_send_without_block(jit, ctx, cb);
2463 static codegen_status_t
2467 if (!jit_at_current_insn(jit)) {
2468 defer_compilation(jit, ctx);
2469 return YJIT_END_BLOCK;
2472 VALUE comptime_a = jit_peek_at_stack(jit, ctx, 1);
2473 VALUE comptime_b = jit_peek_at_stack(jit, ctx, 0);
2478 uint8_t *side_exit = yjit_side_exit(jit, ctx);
2480 if (!assume_bop_not_redefined(jit, INTEGER_REDEFINED_OP_FLAG, BOP_MINUS)) {
2481 return YJIT_CANT_COMPILE;
2485 guard_two_fixnums(ctx, side_exit);
2492 mov(cb, REG0, arg0);
2493 sub(cb, REG0, arg1);
2494 jo_ptr(cb, side_exit);
2495 add(cb, REG0, imm_opnd(1));
2498 x86opnd_t dst = ctx_stack_push(ctx, TYPE_FIXNUM);
2501 return YJIT_KEEP_COMPILING;
2505 return gen_opt_send_without_block(jit, ctx, cb);
2509 static codegen_status_t
2513 if (!jit_at_current_insn(jit)) {
2514 defer_compilation(jit, ctx);
2515 return YJIT_END_BLOCK;
2518 VALUE comptime_a = jit_peek_at_stack(jit, ctx, 1);
2519 VALUE comptime_b = jit_peek_at_stack(jit, ctx, 0);
2524 uint8_t *side_exit = yjit_side_exit(jit, ctx);
2526 if (!assume_bop_not_redefined(jit, INTEGER_REDEFINED_OP_FLAG, BOP_PLUS)) {
2527 return YJIT_CANT_COMPILE;
2531 guard_two_fixnums(ctx, side_exit);
2538 mov(cb, REG0, arg0);
2539 sub(cb, REG0, imm_opnd(1));
2540 add(cb, REG0, arg1);
2541 jo_ptr(cb, side_exit);
2544 x86opnd_t dst = ctx_stack_push(ctx, TYPE_FIXNUM);
2547 return YJIT_KEEP_COMPILING;
2551 return gen_opt_send_without_block(jit, ctx, cb);
2555 static codegen_status_t
2559 return gen_opt_send_without_block(jit, ctx, cb);
2562 static codegen_status_t
2566 return gen_opt_send_without_block(jit, ctx, cb);
2571 static codegen_status_t
2576 jit_prepare_routine_call(jit, ctx, REG0);
2578 uint8_t *side_exit = yjit_side_exit(jit, ctx);
2585 mov(cb, C_ARG_REGS[0], arg0);
2586 mov(cb, C_ARG_REGS[1], arg1);
2587 call_ptr(cb, REG0, (
void *)rb_vm_opt_mod);
2590 cmp(cb, RAX, imm_opnd(
Qundef));
2591 je_ptr(cb, side_exit);
2594 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
2595 mov(cb, stack_ret, RAX);
2597 return YJIT_KEEP_COMPILING;
2600 static codegen_status_t
2604 return gen_opt_send_without_block(jit, ctx, cb);
2607 static codegen_status_t
2611 return gen_opt_send_without_block(jit, ctx, cb);
2614 static codegen_status_t
2618 return gen_opt_send_without_block(jit, ctx, cb);
2621 static codegen_status_t
2624 if (!assume_bop_not_redefined(jit, STRING_REDEFINED_OP_FLAG, BOP_FREEZE)) {
2625 return YJIT_CANT_COMPILE;
2628 VALUE str = jit_get_arg(jit, 0);
2629 jit_mov_gc_ptr(jit, cb, REG0, str);
2632 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_STRING);
2633 mov(cb, stack_ret, REG0);
2635 return YJIT_KEEP_COMPILING;
2638 static codegen_status_t
2641 if (!assume_bop_not_redefined(jit, STRING_REDEFINED_OP_FLAG, BOP_UMINUS)) {
2642 return YJIT_CANT_COMPILE;
2645 VALUE str = jit_get_arg(jit, 0);
2646 jit_mov_gc_ptr(jit, cb, REG0, str);
2649 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_STRING);
2650 mov(cb, stack_ret, REG0);
2652 return YJIT_KEEP_COMPILING;
2655 static codegen_status_t
2658 return gen_opt_send_without_block(jit, ctx, cb);
2661 static codegen_status_t
2664 return gen_opt_send_without_block(jit, ctx, cb);
2667 static codegen_status_t
2670 return gen_opt_send_without_block(jit, ctx, cb);
2673 static codegen_status_t
2676 return gen_opt_send_without_block(jit, ctx, cb);
2679 static codegen_status_t
2690 ctx_stack_pop(ctx, 1);
2692 return YJIT_KEEP_COMPILING;
2696 gen_branchif_branch(
codeblock_t *cb, uint8_t *target0, uint8_t *target1, uint8_t shape)
2700 jz_ptr(cb, target1);
2704 jnz_ptr(cb, target0);
2708 jnz_ptr(cb, target0);
2709 jmp_ptr(cb, target1);
2714 static codegen_status_t
2717 int32_t jump_offset = (int32_t)jit_get_arg(jit, 0);
2720 if (jump_offset < 0) {
2721 uint8_t *side_exit = yjit_side_exit(jit, ctx);
2722 yjit_check_ints(cb, side_exit);
2728 x86opnd_t val_opnd = ctx_stack_pop(ctx, 1);
2729 test(cb, val_opnd, imm_opnd(~
Qnil));
2732 uint32_t next_idx = jit_next_insn_idx(jit);
2733 uint32_t jump_idx = next_idx + jump_offset;
2734 blockid_t next_block = { jit->iseq, next_idx };
2735 blockid_t jump_block = { jit->iseq, jump_idx };
2748 return YJIT_END_BLOCK;
2752 gen_branchunless_branch(
codeblock_t *cb, uint8_t *target0, uint8_t *target1, uint8_t shape)
2756 jnz_ptr(cb, target1);
2760 jz_ptr(cb, target0);
2764 jz_ptr(cb, target0);
2765 jmp_ptr(cb, target1);
2770 static codegen_status_t
2773 int32_t jump_offset = (int32_t)jit_get_arg(jit, 0);
2776 if (jump_offset < 0) {
2777 uint8_t *side_exit = yjit_side_exit(jit, ctx);
2778 yjit_check_ints(cb, side_exit);
2784 x86opnd_t val_opnd = ctx_stack_pop(ctx, 1);
2785 test(cb, val_opnd, imm_opnd(~
Qnil));
2788 uint32_t next_idx = jit_next_insn_idx(jit);
2789 uint32_t jump_idx = next_idx + jump_offset;
2790 blockid_t next_block = { jit->iseq, next_idx };
2791 blockid_t jump_block = { jit->iseq, jump_idx };
2801 gen_branchunless_branch
2804 return YJIT_END_BLOCK;
2808 gen_branchnil_branch(
codeblock_t *cb, uint8_t *target0, uint8_t *target1, uint8_t shape)
2812 jne_ptr(cb, target1);
2816 je_ptr(cb, target0);
2820 je_ptr(cb, target0);
2821 jmp_ptr(cb, target1);
2826 static codegen_status_t
2829 int32_t jump_offset = (int32_t)jit_get_arg(jit, 0);
2832 if (jump_offset < 0) {
2833 uint8_t *side_exit = yjit_side_exit(jit, ctx);
2834 yjit_check_ints(cb, side_exit);
2839 x86opnd_t val_opnd = ctx_stack_pop(ctx, 1);
2840 cmp(cb, val_opnd, imm_opnd(
Qnil));
2843 uint32_t next_idx = jit_next_insn_idx(jit);
2844 uint32_t jump_idx = next_idx + jump_offset;
2845 blockid_t next_block = { jit->iseq, next_idx };
2846 blockid_t jump_block = { jit->iseq, jump_idx };
2856 gen_branchnil_branch
2859 return YJIT_END_BLOCK;
2862 static codegen_status_t
2865 int32_t jump_offset = (int32_t)jit_get_arg(jit, 0);
2868 if (jump_offset < 0) {
2869 uint8_t *side_exit = yjit_side_exit(jit, ctx);
2870 yjit_check_ints(cb, side_exit);
2874 uint32_t jump_idx = jit_next_insn_idx(jit) + jump_offset;
2875 blockid_t jump_block = { jit->iseq, jump_idx };
2884 return YJIT_END_BLOCK;
2898 val_type_t val_type = ctx_get_opnd_type(ctx, insn_opnd);
2902 if (val_type.type != ETYPE_NIL) {
2905 ADD_COMMENT(cb,
"guard object is nil");
2906 cmp(cb, REG0, imm_opnd(
Qnil));
2907 jit_chain_guard(JCC_JNE, jit, ctx, max_chain_depth, side_exit);
2909 ctx_upgrade_opnd_type(ctx, insn_opnd, TYPE_NIL);
2914 if (val_type.type != ETYPE_TRUE) {
2917 ADD_COMMENT(cb,
"guard object is true");
2918 cmp(cb, REG0, imm_opnd(
Qtrue));
2919 jit_chain_guard(JCC_JNE, jit, ctx, max_chain_depth, side_exit);
2921 ctx_upgrade_opnd_type(ctx, insn_opnd, TYPE_TRUE);
2926 if (val_type.type != ETYPE_FALSE) {
2929 ADD_COMMENT(cb,
"guard object is false");
2930 STATIC_ASSERT(qfalse_is_zero,
Qfalse == 0);
2931 test(cb, REG0, REG0);
2932 jit_chain_guard(JCC_JNZ, jit, ctx, max_chain_depth, side_exit);
2934 ctx_upgrade_opnd_type(ctx, insn_opnd, TYPE_FALSE);
2941 if (val_type.type != ETYPE_FIXNUM || !val_type.is_imm) {
2944 ADD_COMMENT(cb,
"guard object is fixnum");
2946 jit_chain_guard(JCC_JZ, jit, ctx, max_chain_depth, side_exit);
2947 ctx_upgrade_opnd_type(ctx, insn_opnd, TYPE_FIXNUM);
2954 if (val_type.type != ETYPE_SYMBOL || !val_type.is_imm) {
2957 ADD_COMMENT(cb,
"guard object is static symbol");
2960 jit_chain_guard(JCC_JNE, jit, ctx, max_chain_depth, side_exit);
2961 ctx_upgrade_opnd_type(ctx, insn_opnd, TYPE_STATIC_SYMBOL);
2966 if (val_type.type != ETYPE_FLONUM || !val_type.is_imm) {
2970 ADD_COMMENT(cb,
"guard object is flonum");
2971 mov(cb, REG1, REG0);
2974 jit_chain_guard(JCC_JNE, jit, ctx, max_chain_depth, side_exit);
2975 ctx_upgrade_opnd_type(ctx, insn_opnd, TYPE_FLONUM);
2989 ADD_COMMENT(cb,
"guard known object with singleton class");
2991 jit_mov_gc_ptr(jit, cb, REG1, sample_instance);
2992 cmp(cb, REG0, REG1);
2993 jit_chain_guard(JCC_JNE, jit, ctx, max_chain_depth, side_exit);
3000 if (!val_type.is_heap) {
3001 ADD_COMMENT(cb,
"guard not immediate");
3004 jit_chain_guard(JCC_JNZ, jit, ctx, max_chain_depth, side_exit);
3005 cmp(cb, REG0, imm_opnd(
Qnil));
3006 jit_chain_guard(JCC_JBE, jit, ctx, max_chain_depth, side_exit);
3008 ctx_upgrade_opnd_type(ctx, insn_opnd, TYPE_HEAP);
3011 x86opnd_t klass_opnd = mem_opnd(64, REG0, offsetof(
struct RBasic, klass));
3015 ADD_COMMENT(cb,
"guard known class");
3016 jit_mov_gc_ptr(jit, cb, REG1, known_klass);
3017 cmp(cb, klass_opnd, REG1);
3018 jit_chain_guard(JCC_JNE, jit, ctx, max_chain_depth, side_exit);
3031 jit_mov_gc_ptr(jit, cb, C_ARG_REGS[1], cme->defined_class);
3036 jz_ptr(cb, COUNTED_EXIT(jit, side_exit, send_se_protected_check_failed));
3050 yjit_reg_method(
VALUE klass,
const char *mid_str, method_codegen_t gen_fn)
3061 RUBY_ASSERT(me->def->type == VM_METHOD_TYPE_CFUNC);
3063 st_insert(yjit_method_codegen_table, (st_data_t)me->def->method_serial, (st_data_t)gen_fn);
3072 const val_type_t recv_opnd = ctx_get_opnd_type(ctx, OPND_STACK(0));
3074 if (recv_opnd.type == ETYPE_NIL || recv_opnd.type == ETYPE_FALSE) {
3075 ADD_COMMENT(cb,
"rb_obj_not(nil_or_false)");
3076 ctx_stack_pop(ctx, 1);
3077 x86opnd_t out_opnd = ctx_stack_push(ctx, TYPE_TRUE);
3078 mov(cb, out_opnd, imm_opnd(
Qtrue));
3080 else if (recv_opnd.is_heap || recv_opnd.type != ETYPE_UNKNOWN) {
3082 ADD_COMMENT(cb,
"rb_obj_not(truthy)");
3083 ctx_stack_pop(ctx, 1);
3084 x86opnd_t out_opnd = ctx_stack_push(ctx, TYPE_FALSE);
3085 mov(cb, out_opnd, imm_opnd(
Qfalse));
3100 ADD_COMMENT(cb,
"nil? == true");
3101 ctx_stack_pop(ctx, 1);
3102 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_TRUE);
3103 mov(cb, stack_ret, imm_opnd(
Qtrue));
3111 ADD_COMMENT(cb,
"nil? == false");
3112 ctx_stack_pop(ctx, 1);
3113 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_FALSE);
3114 mov(cb, stack_ret, imm_opnd(
Qfalse));
3123 ADD_COMMENT(cb,
"equal?");
3127 mov(cb, REG0, obj1);
3128 cmp(cb, REG0, obj2);
3129 mov(cb, REG0, imm_opnd(
Qtrue));
3130 mov(cb, REG1, imm_opnd(
Qfalse));
3131 cmovne(cb, REG0, REG1);
3133 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_IMM);
3134 mov(cb, stack_ret, REG0);
3139 yjit_str_bytesize(
VALUE str)
3147 ADD_COMMENT(cb,
"String#bytesize");
3150 mov(cb, C_ARG_REGS[0], recv);
3151 call_ptr(cb, REG0, (
void *)&yjit_str_bytesize);
3153 x86opnd_t out_opnd = ctx_stack_push(ctx, TYPE_FIXNUM);
3154 mov(cb, out_opnd, RAX);
3166 if (recv_known_klass && *recv_known_klass ==
rb_cString) {
3167 ADD_COMMENT(cb,
"to_s on plain string");
3178 ADD_COMMENT(cb,
"Thread.current");
3179 ctx_stack_pop(ctx, 1);
3185 mov(cb, REG0, member_opnd(REG0,
rb_thread_t,
self));
3187 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_HEAP);
3188 mov(cb, stack_ret, REG0);
3193 static method_codegen_t
3196 method_codegen_t gen_fn;
3197 if (st_lookup(yjit_method_codegen_table, def->method_serial, (st_data_t *)&gen_fn)) {
3205 c_method_tracing_currently_enabled(
const jitstate_t *jit)
3208 if (rb_multi_ractor_p()) {
3209 tracing_events = ruby_vm_event_enabled_global_flags;
3215 tracing_events = rb_ec_ractor_hooks(jit->ec)->events;
3221 static codegen_status_t
3227 if (cfunc->argc < 0 && cfunc->argc != -1) {
3228 GEN_COUNTER_INC(cb, send_cfunc_ruby_array_varg);
3229 return YJIT_CANT_COMPILE;
3233 if (cfunc->argc >= 0 && cfunc->argc != argc) {
3234 GEN_COUNTER_INC(cb, send_cfunc_argc_mismatch);
3235 return YJIT_CANT_COMPILE;
3239 if (cfunc->argc >= 0 && argc + 1 > NUM_C_ARG_REGS) {
3240 GEN_COUNTER_INC(cb, send_cfunc_toomany_args);
3241 return YJIT_CANT_COMPILE;
3244 if (c_method_tracing_currently_enabled(jit)) {
3246 GEN_COUNTER_INC(cb, send_cfunc_tracing);
3247 return YJIT_CANT_COMPILE;
3252 method_codegen_t known_cfunc_codegen;
3253 if ((known_cfunc_codegen = lookup_cfunc_codegen(cme->def))) {
3254 if (known_cfunc_codegen(jit, ctx, ci, cme, block, argc, recv_known_klass)) {
3257 jit_jump_to_next_insn(jit, ctx);
3258 return YJIT_END_BLOCK;
3273 uint8_t *side_exit = yjit_side_exit(jit, ctx);
3276 yjit_check_ints(cb, side_exit);
3282 cmp(cb, REG_CFP, REG0);
3283 jle_ptr(cb, COUNTED_EXIT(jit, side_exit, send_se_cf_overflow));
3286 x86opnd_t recv = ctx_stack_opnd(ctx, argc);
3289 jit_save_pc(jit, REG0);
3295 jit_mov_gc_ptr(jit, cb, REG0, (
VALUE)block);
3301 lea(cb, REG0, ctx_sp_opnd(ctx,
sizeof(
VALUE) * 3));
3307 jit_mov_gc_ptr(jit, cb, REG1, (
VALUE)cme);
3308 mov(cb, mem_opnd(64, REG0, 8 * -3), REG1);
3315 or(cb, REG1, imm_opnd(1));
3316 mov(cb, mem_opnd(64, REG0, 8 * -2), REG1);
3319 mov(cb, mem_opnd(64, REG0, 8 * -2), imm_opnd(VM_BLOCK_HANDLER_NONE));
3324 uint64_t frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3325 mov(cb, mem_opnd(64, REG0, 8 * -1), imm_opnd(frame_type));
3350 sub(cb, REG0, imm_opnd(
sizeof(
VALUE)));
3352 mov(cb, REG0, recv);
3356 if (YJIT_CHECK_MODE > 0) {
3358 mov(cb, C_ARG_REGS[0], recv);
3359 jit_mov_gc_ptr(jit, cb, C_ARG_REGS[1], (
VALUE)ci);
3360 mov(cb, C_ARG_REGS[2], const_ptr_opnd((
void *)cfunc->func));
3361 jit_mov_gc_ptr(jit, cb, C_ARG_REGS[3], (
VALUE)cme);
3362 call_ptr(cb, REG0, (
void *)&check_cfunc_dispatch);
3366 lea(cb, RAX, ctx_sp_opnd(ctx, 0));
3369 ctx_stack_pop(ctx, argc + 1);
3373 jit_save_sp(jit, ctx);
3376 if (cfunc->argc >= 0) {
3379 for (int32_t i = 0; i < argc + 1; ++i)
3383 mov(cb, c_arg_reg, stack_opnd);
3387 if (cfunc->argc == -1) {
3390 mov(cb, C_ARG_REGS[0], imm_opnd(argc));
3391 lea(cb, C_ARG_REGS[1], mem_opnd(64, RAX, -(argc) *
SIZEOF_VALUE));
3392 mov(cb, C_ARG_REGS[2], mem_opnd(64, RAX, -(argc + 1) *
SIZEOF_VALUE));
3399 call_ptr(cb, REG0, (
void*)cfunc->func);
3402 record_global_inval_patch(cb, outline_full_cfunc_return_pos);
3405 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
3406 mov(cb, stack_ret, RAX);
3416 ctx_clear_local_types(ctx);
3423 jit_jump_to_next_insn(jit, ctx);
3424 return YJIT_END_BLOCK;
3428 gen_return_branch(
codeblock_t *cb, uint8_t *target0, uint8_t *target1, uint8_t shape)
3437 mov(cb, REG0, const_ptr_opnd(target0));
3445 rb_leaf_invokebuiltin_iseq_p(
const rb_iseq_t *iseq)
3447 unsigned int invokebuiltin_len = insn_len(BIN(opt_invokebuiltin_delegate_leave));
3448 unsigned int leave_len = insn_len(BIN(leave));
3450 return (iseq->body->iseq_size == (invokebuiltin_len + leave_len) &&
3451 rb_vm_insn_addr2opcode((
void *)iseq->body->iseq_encoded[0]) == BIN(opt_invokebuiltin_delegate_leave) &&
3452 rb_vm_insn_addr2opcode((
void *)iseq->body->iseq_encoded[invokebuiltin_len]) == BIN(leave) &&
3453 iseq->body->builtin_inline_p
3459 rb_leaf_builtin_function(
const rb_iseq_t *iseq)
3461 if (!rb_leaf_invokebuiltin_iseq_p(iseq))
3466 static codegen_status_t
3469 const rb_iseq_t *iseq = def_iseq_ptr(cme->def);
3476 const bool doing_kw_call = iseq->body->
param.flags.has_kw;
3477 const bool supplying_kws = vm_ci_flag(ci) & VM_CALL_KWARG;
3479 if (vm_ci_flag(ci) & VM_CALL_TAILCALL) {
3481 GEN_COUNTER_INC(cb, send_iseq_tailcall);
3482 return YJIT_CANT_COMPILE;
3487 if (iseq->body->
param.flags.has_rest ||
3488 iseq->body->
param.flags.has_post ||
3489 iseq->body->
param.flags.has_kwrest) {
3490 GEN_COUNTER_INC(cb, send_iseq_complex_callee);
3491 return YJIT_CANT_COMPILE;
3497 if (supplying_kws && !iseq->body->
param.flags.has_kw) {
3498 GEN_COUNTER_INC(cb, send_iseq_complex_callee);
3499 return YJIT_CANT_COMPILE;
3504 if (supplying_kws && iseq->body->
param.flags.accepts_no_kwarg) {
3505 GEN_COUNTER_INC(cb, send_iseq_complex_callee);
3506 return YJIT_CANT_COMPILE;
3510 int num_params = iseq->body->
param.size;
3513 if (iseq->body->
param.flags.has_block) {
3514 if (iseq->body->local_iseq == iseq) {
3523 GEN_COUNTER_INC(cb, send_iseq_complex_callee);
3524 return YJIT_CANT_COMPILE;
3528 uint32_t start_pc_offset = 0;
3530 const int required_num = iseq->body->
param.lead_num;
3535 const int kw_arg_num = kw_arg ? kw_arg->keyword_len : 0;
3538 const int opts_filled = argc - required_num - kw_arg_num;
3539 const int opt_num = iseq->body->
param.opt_num;
3540 const int opts_missing = opt_num - opts_filled;
3542 if (opts_filled < 0 || opts_filled > opt_num) {
3543 GEN_COUNTER_INC(cb, send_iseq_arity_error);
3544 return YJIT_CANT_COMPILE;
3550 if (doing_kw_call && opts_missing > 0) {
3551 GEN_COUNTER_INC(cb, send_iseq_complex_callee);
3552 return YJIT_CANT_COMPILE;
3556 num_params -= opt_num - opts_filled;
3557 start_pc_offset = (uint32_t)iseq->body->
param.opt_table[opts_filled];
3560 if (doing_kw_call) {
3566 const struct rb_iseq_param_keyword *keyword = iseq->body->
param.keyword;
3568 int required_kwargs_filled = 0;
3570 if (keyword->num > 30) {
3574 GEN_COUNTER_INC(cb, send_iseq_complex_callee);
3575 return YJIT_CANT_COMPILE;
3579 if (supplying_kws) {
3582 const ID *callee_kwargs = keyword->table;
3590 for (
int kwarg_idx = 0; kwarg_idx < kw_arg->keyword_len; kwarg_idx++)
3591 caller_kwargs[kwarg_idx] =
SYM2ID(kw_arg->keywords[kwarg_idx]);
3596 for (
int caller_idx = 0; caller_idx < kw_arg->keyword_len; caller_idx++) {
3599 for (callee_idx = 0; callee_idx < keyword->num; callee_idx++) {
3600 if (caller_kwargs[caller_idx] == callee_kwargs[callee_idx]) {
3608 if (callee_idx == keyword->num) {
3609 GEN_COUNTER_INC(cb, send_iseq_kwargs_mismatch);
3610 return YJIT_CANT_COMPILE;
3614 if (callee_idx < keyword->required_num) {
3615 required_kwargs_filled++;
3620 RUBY_ASSERT(required_kwargs_filled <= keyword->required_num);
3621 if (required_kwargs_filled != keyword->required_num) {
3622 GEN_COUNTER_INC(cb, send_iseq_kwargs_mismatch);
3623 return YJIT_CANT_COMPILE;
3628 const int num_locals = iseq->body->local_table_size - num_params;
3631 uint8_t *side_exit = yjit_side_exit(jit, ctx);
3634 yjit_check_ints(cb, side_exit);
3638 if (leaf_builtin && !block && leaf_builtin->argc + 1 <= NUM_C_ARG_REGS) {
3639 ADD_COMMENT(cb,
"inlined leaf builtin");
3641 jit_prepare_routine_call(jit, ctx, REG0);
3644 mov(cb, C_ARG_REGS[0], REG_EC);
3647 for (int32_t i = 0; i < leaf_builtin->argc + 1; i++) {
3648 x86opnd_t stack_opnd = ctx_stack_opnd(ctx, leaf_builtin->argc - i);
3649 x86opnd_t c_arg_reg = C_ARG_REGS[i + 1];
3650 mov(cb, c_arg_reg, stack_opnd);
3652 ctx_stack_pop(ctx, leaf_builtin->argc + 1);
3653 call_ptr(cb, REG0, (
void *)leaf_builtin->func_ptr);
3656 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
3657 mov(cb, stack_ret, RAX);
3662 return YJIT_KEEP_COMPILING;
3668 ADD_COMMENT(cb,
"stack overflow check");
3669 lea(cb, REG0, ctx_sp_opnd(ctx,
sizeof(
VALUE) * (num_locals + iseq->body->stack_max) + 2 *
sizeof(
rb_control_frame_t)));
3670 cmp(cb, REG_CFP, REG0);
3671 jle_ptr(cb, COUNTED_EXIT(jit, side_exit, send_se_cf_overflow));
3673 if (doing_kw_call) {
3679 const int args_before_kw = required_num + opt_num;
3683 int caller_keyword_len = 0;
3684 const VALUE *caller_keywords = NULL;
3685 if (vm_ci_kwarg(ci)) {
3686 caller_keyword_len = vm_ci_kwarg(ci)->keyword_len;
3687 caller_keywords = &vm_ci_kwarg(ci)->keywords[0];
3692 const struct rb_iseq_param_keyword *
const keyword = iseq->body->
param.keyword;
3694 ADD_COMMENT(cb,
"keyword args");
3698 const ID *callee_kwargs = keyword->table;
3700 int total_kwargs = keyword->num;
3709 for (kwarg_idx = 0; kwarg_idx < caller_keyword_len; kwarg_idx++) {
3710 caller_kwargs[kwarg_idx] =
SYM2ID(caller_keywords[kwarg_idx]);
3713 int unspecified_bits = 0;
3715 for (
int callee_idx = keyword->required_num; callee_idx < total_kwargs; callee_idx++) {
3716 bool already_passed =
false;
3717 ID callee_kwarg = callee_kwargs[callee_idx];
3719 for (
int caller_idx = 0; caller_idx < caller_keyword_len; caller_idx++) {
3720 if (caller_kwargs[caller_idx] == callee_kwarg) {
3721 already_passed =
true;
3726 if (!already_passed) {
3731 x86opnd_t default_arg = ctx_stack_push(ctx, TYPE_UNKNOWN);
3732 VALUE default_value = keyword->default_values[callee_idx - keyword->required_num];
3734 if (default_value ==
Qundef) {
3738 unspecified_bits |= 0x01 << (callee_idx - keyword->required_num);
3739 default_value =
Qnil;
3743 jit_mov_gc_ptr(jit, cb, REG0, default_value);
3744 mov(cb, default_arg, REG0);
3746 caller_kwargs[kwarg_idx++] = callee_kwarg;
3754 for (kwarg_idx = 0; kwarg_idx < total_kwargs; kwarg_idx++) {
3755 ID callee_kwarg = callee_kwargs[kwarg_idx];
3760 if (callee_kwarg == caller_kwargs[kwarg_idx])
continue;
3765 for (
int swap_idx = kwarg_idx + 1; swap_idx < total_kwargs; swap_idx++) {
3766 if (callee_kwarg == caller_kwargs[swap_idx]) {
3769 stack_swap(ctx, cb, argc - 1 - swap_idx - args_before_kw, argc - 1 - kwarg_idx - args_before_kw, REG1, REG0);
3774 ID tmp = caller_kwargs[kwarg_idx];
3775 caller_kwargs[kwarg_idx] = caller_kwargs[swap_idx];
3776 caller_kwargs[swap_idx] = tmp;
3786 mov(cb, ctx_stack_opnd(ctx, -1), imm_opnd(
INT2FIX(unspecified_bits)));
3789 x86opnd_t recv = ctx_stack_opnd(ctx, argc);
3792 ADD_COMMENT(cb,
"store caller sp");
3793 lea(cb, REG0, ctx_sp_opnd(ctx,
sizeof(
VALUE) * -(argc + 1)));
3797 jit_save_pc(jit, REG0);
3803 jit_mov_gc_ptr(jit, cb, REG0, (
VALUE)block);
3808 lea(cb, REG0, ctx_sp_opnd(ctx,
sizeof(
VALUE) * (3 + num_locals + doing_kw_call)));
3811 for (
int i = 0; i < num_locals; i++) {
3812 mov(cb, mem_opnd(64, REG0,
sizeof(
VALUE) * (i - num_locals - 3)), imm_opnd(
Qnil));
3815 ADD_COMMENT(cb,
"push env");
3818 jit_mov_gc_ptr(jit, cb, REG1, (
VALUE)cme);
3821 mov(cb, mem_opnd(64, REG0, 8 * -3), REG1);
3828 or(cb, REG1, imm_opnd(1));
3829 mov(cb, mem_opnd(64, REG0, 8 * -2), REG1);
3832 mov(cb, mem_opnd(64, REG0, 8 * -2), imm_opnd(VM_BLOCK_HANDLER_NONE));
3837 uint64_t frame_type = VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL;
3838 mov(cb, mem_opnd(64, REG0, 8 * -1), imm_opnd(frame_type));
3840 ADD_COMMENT(cb,
"push callee CFP");
3855 mov(cb, REG1, recv);
3857 mov(cb, REG_SP, REG0);
3860 sub(cb, REG0, imm_opnd(
sizeof(
VALUE)));
3862 jit_mov_gc_ptr(jit, cb, REG0, (
VALUE)iseq);
3872 blockid_t return_block = { jit->iseq, jit_next_insn_idx(jit) };
3875 ctx_t callee_ctx = DEFAULT_CTX;
3878 for (int32_t arg_idx = 0; arg_idx < argc; ++arg_idx) {
3879 val_type_t arg_type = ctx_get_opnd_type(ctx, OPND_STACK(argc - arg_idx - 1));
3880 ctx_set_local_type(&callee_ctx, arg_idx, arg_type);
3882 val_type_t recv_type = ctx_get_opnd_type(ctx, OPND_STACK(argc));
3883 ctx_upgrade_opnd_type(&callee_ctx, OPND_SELF, recv_type);
3886 ctx_clear_local_types(ctx);
3891 ctx_t return_ctx = *ctx;
3892 ctx_stack_pop(&return_ctx, argc + 1);
3893 ctx_stack_push(&return_ctx, TYPE_UNKNOWN);
3894 return_ctx.sp_offset = 1;
3895 return_ctx.chain_depth = 0;
3918 return YJIT_END_BLOCK;
3921 static codegen_status_t
3923 if (vm_ci_argc(ci) != 0) {
3924 return YJIT_CANT_COMPILE;
3927 const unsigned int off = cme->def->body.optimized.index;
3936 uint64_t native_off = (uint64_t)off * (uint64_t)
SIZEOF_VALUE;
3937 if (native_off > (uint64_t)INT32_MAX) {
3938 return YJIT_CANT_COMPILE;
3946 bool embedded =
FL_TEST_RAW(comptime_recv, RSTRUCT_EMBED_LEN_MASK);
3948 ADD_COMMENT(cb,
"struct aref");
3952 mov(cb, REG0, recv);
3955 mov(cb, REG0, member_opnd_idx(REG0,
struct RStruct, as.ary, off));
3958 mov(cb, REG0, member_opnd(REG0,
struct RStruct, as.heap.ptr));
3962 x86opnd_t ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
3965 jit_jump_to_next_insn(jit, ctx);
3966 return YJIT_END_BLOCK;
3969 static codegen_status_t
3971 if (vm_ci_argc(ci) != 1) {
3972 return YJIT_CANT_COMPILE;
3975 const unsigned int off = cme->def->body.optimized.index;
3981 ADD_COMMENT(cb,
"struct aset");
3986 mov(cb, C_ARG_REGS[0], recv);
3987 mov(cb, C_ARG_REGS[1], imm_opnd(off));
3988 mov(cb, C_ARG_REGS[2], val);
3991 x86opnd_t ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
3994 jit_jump_to_next_insn(jit, ctx);
3995 return YJIT_END_BLOCK;
4001 static codegen_status_t
4016 int32_t argc = (int32_t)vm_ci_argc(ci);
4017 ID mid = vm_ci_mid(ci);
4020 if (vm_ci_flag(ci) & VM_CALL_KW_SPLAT) {
4021 GEN_COUNTER_INC(cb, send_kw_splat);
4022 return YJIT_CANT_COMPILE;
4027 if ((vm_ci_flag(ci) & VM_CALL_ARGS_SPLAT) != 0) {
4028 GEN_COUNTER_INC(cb, send_args_splat);
4029 return YJIT_CANT_COMPILE;
4031 if ((vm_ci_flag(ci) & VM_CALL_ARGS_BLOCKARG) != 0) {
4032 GEN_COUNTER_INC(cb, send_block_arg);
4033 return YJIT_CANT_COMPILE;
4037 if (!jit_at_current_insn(jit)) {
4038 defer_compilation(jit, ctx);
4039 return YJIT_END_BLOCK;
4042 VALUE comptime_recv = jit_peek_at_stack(jit, ctx, argc);
4046 uint8_t *side_exit = yjit_side_exit(jit, ctx);
4049 x86opnd_t recv = ctx_stack_opnd(ctx, argc);
4051 mov(cb, REG0, recv);
4052 if (!jit_guard_known_klass(jit, ctx, comptime_recv_klass, recv_opnd, comptime_recv, SEND_MAX_DEPTH, side_exit)) {
4053 return YJIT_CANT_COMPILE;
4060 return YJIT_CANT_COMPILE;
4063 switch (METHOD_ENTRY_VISI(cme)) {
4064 case METHOD_VISI_PUBLIC:
4067 case METHOD_VISI_PRIVATE:
4068 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
4071 return YJIT_CANT_COMPILE;
4074 case METHOD_VISI_PROTECTED:
4075 jit_protected_callee_ancestry_guard(jit, cb, cme, side_exit);
4077 case METHOD_VISI_UNDEF:
4078 RUBY_ASSERT(
false &&
"cmes should always have a visibility");
4084 assume_method_lookup_stable(comptime_recv_klass, cme, jit);
4089 switch (cme->def->type) {
4090 case VM_METHOD_TYPE_ISEQ:
4091 return gen_send_iseq(jit, ctx, ci, cme, block, argc);
4092 case VM_METHOD_TYPE_CFUNC:
4093 if ((vm_ci_flag(ci) & VM_CALL_KWARG) != 0) {
4094 GEN_COUNTER_INC(cb, send_cfunc_kwargs);
4095 return YJIT_CANT_COMPILE;
4097 return gen_send_cfunc(jit, ctx, ci, cme, block, argc, &comptime_recv_klass);
4098 case VM_METHOD_TYPE_IVAR:
4101 GEN_COUNTER_INC(cb, send_getter_arity);
4102 return YJIT_CANT_COMPILE;
4104 if (c_method_tracing_currently_enabled(jit)) {
4114 GEN_COUNTER_INC(cb, send_cfunc_tracing);
4115 return YJIT_CANT_COMPILE;
4118 mov(cb, REG0, recv);
4120 ID ivar_name = cme->def->body.attr.id;
4121 return gen_get_ivar(jit, ctx, SEND_MAX_DEPTH, comptime_recv, ivar_name, recv_opnd, side_exit);
4122 case VM_METHOD_TYPE_ATTRSET:
4123 if ((vm_ci_flag(ci) & VM_CALL_KWARG) != 0) {
4124 GEN_COUNTER_INC(cb, send_attrset_kwargs);
4125 return YJIT_CANT_COMPILE;
4128 GEN_COUNTER_INC(cb, send_ivar_set_method);
4129 return YJIT_CANT_COMPILE;
4131 else if (c_method_tracing_currently_enabled(jit)) {
4134 GEN_COUNTER_INC(cb, send_cfunc_tracing);
4135 return YJIT_CANT_COMPILE;
4138 ID ivar_name = cme->def->body.attr.id;
4139 return gen_set_ivar(jit, ctx, comptime_recv, comptime_recv_klass, ivar_name);
4142 case VM_METHOD_TYPE_BMETHOD:
4143 GEN_COUNTER_INC(cb, send_bmethod);
4144 return YJIT_CANT_COMPILE;
4145 case VM_METHOD_TYPE_ZSUPER:
4146 GEN_COUNTER_INC(cb, send_zsuper_method);
4147 return YJIT_CANT_COMPILE;
4148 case VM_METHOD_TYPE_ALIAS: {
4150 cme = rb_aliased_callable_method_entry(cme);
4153 case VM_METHOD_TYPE_UNDEF:
4154 GEN_COUNTER_INC(cb, send_undef_method);
4155 return YJIT_CANT_COMPILE;
4156 case VM_METHOD_TYPE_NOTIMPLEMENTED:
4157 GEN_COUNTER_INC(cb, send_not_implemented_method);
4158 return YJIT_CANT_COMPILE;
4160 case VM_METHOD_TYPE_OPTIMIZED:
4161 switch (cme->def->body.optimized.type) {
4162 case OPTIMIZED_METHOD_TYPE_SEND:
4163 GEN_COUNTER_INC(cb, send_optimized_method_send);
4164 return YJIT_CANT_COMPILE;
4165 case OPTIMIZED_METHOD_TYPE_CALL:
4166 GEN_COUNTER_INC(cb, send_optimized_method_call);
4167 return YJIT_CANT_COMPILE;
4168 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
4169 GEN_COUNTER_INC(cb, send_optimized_method_block_call);
4170 return YJIT_CANT_COMPILE;
4171 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF:
4172 return gen_struct_aref(jit, ctx, ci, cme, comptime_recv, comptime_recv_klass);
4173 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET:
4174 return gen_struct_aset(jit, ctx, ci, cme, comptime_recv, comptime_recv_klass);
4176 rb_bug(
"unknown optimized method type (%d)", cme->def->body.optimized.type);
4179 case VM_METHOD_TYPE_MISSING:
4180 GEN_COUNTER_INC(cb, send_missing_method);
4181 return YJIT_CANT_COMPILE;
4182 case VM_METHOD_TYPE_REFINED:
4183 GEN_COUNTER_INC(cb, send_refined_method);
4184 return YJIT_CANT_COMPILE;
4193 static codegen_status_t
4197 return gen_send_general(jit, ctx, cd, NULL);
4200 static codegen_status_t
4205 return gen_send_general(jit, ctx, cd, block);
4208 static codegen_status_t
4215 if (!jit_at_current_insn(jit)) {
4216 defer_compilation(jit, ctx);
4217 return YJIT_END_BLOCK;
4222 return YJIT_CANT_COMPILE;
4226 VALUE current_defined_class = me->defined_class;
4227 ID mid = me->def->original_id;
4229 if (me != rb_callable_method_entry(current_defined_class, me->called_id)) {
4233 return YJIT_CANT_COMPILE;
4238 return YJIT_CANT_COMPILE;
4243 int32_t argc = (int32_t)vm_ci_argc(ci);
4247 if ((vm_ci_flag(ci) & VM_CALL_ARGS_SPLAT) != 0) {
4248 GEN_COUNTER_INC(cb, send_args_splat);
4249 return YJIT_CANT_COMPILE;
4251 if ((vm_ci_flag(ci) & VM_CALL_KWARG) != 0) {
4252 GEN_COUNTER_INC(cb, send_keywords);
4253 return YJIT_CANT_COMPILE;
4255 if ((vm_ci_flag(ci) & VM_CALL_KW_SPLAT) != 0) {
4256 GEN_COUNTER_INC(cb, send_kw_splat);
4257 return YJIT_CANT_COMPILE;
4259 if ((vm_ci_flag(ci) & VM_CALL_ARGS_BLOCKARG) != 0) {
4260 GEN_COUNTER_INC(cb, send_block_arg);
4261 return YJIT_CANT_COMPILE;
4269 VALUE comptime_recv = jit_peek_at_stack(jit, ctx, argc);
4271 return YJIT_CANT_COMPILE;
4278 return YJIT_CANT_COMPILE;
4282 switch (cme->def->type) {
4283 case VM_METHOD_TYPE_ISEQ:
4284 case VM_METHOD_TYPE_CFUNC:
4288 return YJIT_CANT_COMPILE;
4292 uint8_t *side_exit = yjit_side_exit(jit, ctx);
4294 if (jit->ec->cfp->ep[VM_ENV_DATA_INDEX_ME_CREF] != (
VALUE)me) {
4296 return YJIT_CANT_COMPILE;
4299 ADD_COMMENT(cb,
"guard known me");
4302 jit_mov_gc_ptr(jit, cb, REG1, (
VALUE)me);
4303 cmp(cb, ep_me_opnd, REG1);
4304 jne_ptr(cb, COUNTED_EXIT(jit, side_exit, invokesuper_me_changed));
4313 ADD_COMMENT(cb,
"guard no block given");
4316 cmp(cb, ep_specval_opnd, imm_opnd(VM_BLOCK_HANDLER_NONE));
4317 jne_ptr(cb, COUNTED_EXIT(jit, side_exit, invokesuper_block));
4321 x86opnd_t recv = ctx_stack_opnd(ctx, argc);
4322 mov(cb, REG0, recv);
4326 assume_method_lookup_stable(current_defined_class, me, jit);
4327 assume_method_lookup_stable(comptime_superclass, cme, jit);
4330 ctx_clear_local_types(ctx);
4332 switch (cme->def->type) {
4333 case VM_METHOD_TYPE_ISEQ:
4334 return gen_send_iseq(jit, ctx, ci, cme, block, argc);
4335 case VM_METHOD_TYPE_CFUNC:
4336 return gen_send_cfunc(jit, ctx, ci, cme, block, argc, NULL);
4344 static codegen_status_t
4351 uint8_t *side_exit = yjit_side_exit(jit, ctx);
4357 ADD_COMMENT(cb,
"check for interrupts");
4358 yjit_check_ints(cb, COUNTED_EXIT(jit, side_exit, leave_se_interrupt));
4361 mov(cb, REG0, ctx_stack_pop(ctx, 1));
4371 mov(cb, mem_opnd(64, REG_SP, 0), REG0);
4375 jmp_rm(cb, mem_opnd(64, REG_CFP, offset_to_jit_return));
4377 return YJIT_END_BLOCK;
4380 RUBY_EXTERN rb_serial_t ruby_vm_global_constant_state;
4382 static codegen_status_t
4385 ID gid = jit_get_arg(jit, 0);
4388 jit_prepare_routine_call(jit, ctx, REG0);
4390 mov(cb, C_ARG_REGS[0], imm_opnd(gid));
4392 call_ptr(cb, REG0, (
void *)&rb_gvar_get);
4394 x86opnd_t top = ctx_stack_push(ctx, TYPE_UNKNOWN);
4397 return YJIT_KEEP_COMPILING;
4400 static codegen_status_t
4403 ID gid = jit_get_arg(jit, 0);
4407 jit_prepare_routine_call(jit, ctx, REG0);
4409 mov(cb, C_ARG_REGS[0], imm_opnd(gid));
4413 mov(cb, C_ARG_REGS[1], val);
4415 call_ptr(cb, REG0, (
void *)&rb_gvar_set);
4417 return YJIT_KEEP_COMPILING;
4420 static codegen_status_t
4425 jit_prepare_routine_call(jit, ctx, REG0);
4430 mov(cb, C_ARG_REGS[0], str);
4431 mov(cb, C_ARG_REGS[1], val);
4433 call_ptr(cb, REG0, (
void *)&rb_obj_as_string_result);
4436 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_STRING);
4437 mov(cb, stack_ret, RAX);
4439 return YJIT_KEEP_COMPILING;
4442 static codegen_status_t
4445 if (!jit_at_current_insn(jit)) {
4446 defer_compilation(jit, ctx);
4447 return YJIT_END_BLOCK;
4450 x86opnd_t recv = ctx_stack_opnd(ctx, 0);
4451 VALUE comptime_recv = jit_peek_at_stack(jit, ctx, 0);
4454 uint8_t *side_exit = yjit_side_exit(jit, ctx);
4456 mov(cb, REG0, recv);
4457 jit_guard_known_klass(jit, ctx,
CLASS_OF(comptime_recv), OPND_STACK(0), comptime_recv, SEND_MAX_DEPTH, side_exit);
4459 return YJIT_KEEP_COMPILING;
4463 return gen_send_general(jit, ctx, cd, NULL);
4467 static codegen_status_t
4470 rb_num_t opt = jit_get_arg(jit, 0);
4471 rb_num_t cnt = jit_get_arg(jit, 1);
4475 jit_prepare_routine_call(jit, ctx, REG0);
4477 x86opnd_t values_ptr = ctx_sp_opnd(ctx, -(
sizeof(
VALUE) * (uint32_t)cnt));
4478 ctx_stack_pop(ctx, cnt);
4480 mov(cb, C_ARG_REGS[0], imm_opnd(0));
4481 mov(cb, C_ARG_REGS[1], imm_opnd(cnt));
4482 lea(cb, C_ARG_REGS[2], values_ptr);
4483 call_ptr(cb, REG0, (
void *)&rb_ary_tmp_new_from_values);
4488 mov(cb, C_ARG_REGS[0], RAX);
4489 mov(cb, C_ARG_REGS[1], imm_opnd(opt));
4490 call_ptr(cb, REG0, (
void *)&rb_reg_new_ary);
4495 pop(cb, C_ARG_REGS[0]);
4498 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
4499 mov(cb, stack_ret, RAX);
4504 return YJIT_KEEP_COMPILING;
4507 static codegen_status_t
4511 jit_prepare_routine_call(jit, ctx, REG0);
4515 mov(cb, C_ARG_REGS[0], str);
4520 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
4521 mov(cb, stack_ret, RAX);
4523 return YJIT_KEEP_COMPILING;
4526 static codegen_status_t
4533 rb_num_t
type = jit_get_arg(jit, 1);
4537 return YJIT_CANT_COMPILE;
4539 else if (
type & 0x01) {
4543 jit_prepare_routine_call(jit, ctx, REG0);
4546 ADD_COMMENT(cb,
"rb_backref_get");
4548 mov(cb, C_ARG_REGS[0], RAX);
4550 switch (
type >> 1) {
4552 ADD_COMMENT(cb,
"rb_reg_last_match");
4556 ADD_COMMENT(cb,
"rb_reg_match_pre");
4560 ADD_COMMENT(cb,
"rb_reg_match_post");
4564 ADD_COMMENT(cb,
"rb_reg_match_last");
4568 rb_bug(
"invalid back-ref");
4571 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
4572 mov(cb, stack_ret, RAX);
4574 return YJIT_KEEP_COMPILING;
4580 jit_prepare_routine_call(jit, ctx, REG0);
4583 ADD_COMMENT(cb,
"rb_backref_get");
4587 ADD_COMMENT(cb,
"rb_reg_nth_match");
4588 mov(cb, C_ARG_REGS[0], imm_opnd(
type >> 1));
4589 mov(cb, C_ARG_REGS[1], RAX);
4592 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
4593 mov(cb, stack_ret, RAX);
4595 return YJIT_KEEP_COMPILING;
4602 static codegen_status_t
4606 jit_prepare_routine_call(jit, ctx, REG0);
4609 mov(cb, C_ARG_REGS[1], REG_CFP);
4610 mov(cb, C_ARG_REGS[2], imm_opnd(jit_get_arg(jit, 0)));
4611 mov(cb, C_ARG_REGS[3], imm_opnd(jit_get_arg(jit, 1)));
4613 call_ptr(cb, REG0, (
void *)rb_vm_getclassvariable);
4615 x86opnd_t stack_top = ctx_stack_push(ctx, TYPE_UNKNOWN);
4616 mov(cb, stack_top, RAX);
4618 return YJIT_KEEP_COMPILING;
4624 static codegen_status_t
4628 jit_prepare_routine_call(jit, ctx, REG0);
4631 mov(cb, C_ARG_REGS[1], REG_CFP);
4632 mov(cb, C_ARG_REGS[2], imm_opnd(jit_get_arg(jit, 0)));
4633 mov(cb, C_ARG_REGS[3], ctx_stack_pop(ctx, 1));
4634 mov(cb, C_ARG_REGS[4], imm_opnd(jit_get_arg(jit, 1)));
4636 call_ptr(cb, REG0, (
void *)rb_vm_setclassvariable);
4638 return YJIT_KEEP_COMPILING;
4641 static codegen_status_t
4644 VALUE jump_offset = jit_get_arg(jit, 0);
4645 VALUE const_cache_as_value = jit_get_arg(jit, 1);
4646 IC ic = (
IC)const_cache_as_value;
4651 GET_IC_SERIAL(ice) != ruby_vm_global_constant_state ) {
4654 return YJIT_CANT_COMPILE;
4659 jit_ensure_block_entry_exit(jit);
4663 uint8_t *side_exit = yjit_side_exit(jit, ctx);
4666 bool rb_vm_ic_hit_p(
IC ic,
const VALUE *reg_ep);
4667 mov(cb, C_ARG_REGS[0], const_ptr_opnd((
void *)ic));
4669 call_ptr(cb, REG0, (
void *)rb_vm_ic_hit_p);
4673 jz_ptr(cb, COUNTED_EXIT(jit, side_exit, opt_getinlinecache_miss));
4676 mov(cb, REG0, const_ptr_opnd((
void *)ic));
4678 x86opnd_t stack_top = ctx_stack_push(ctx, TYPE_UNKNOWN);
4680 mov(cb, stack_top, REG0);
4685 if (!assume_single_ractor_mode(jit))
return YJIT_CANT_COMPILE;
4689 assume_stable_global_constant_state(jit);
4691 jit_putobject(jit, ctx, ice->value);
4695 uint32_t jump_idx = jit_next_insn_idx(jit) + (int32_t)jump_offset;
4699 (
blockid_t){ .iseq = jit->iseq, .idx = jump_idx }
4702 return YJIT_END_BLOCK;
4708 static codegen_status_t
4713 uint8_t *side_exit = yjit_side_exit(jit, ctx);
4716 uint32_t level = (uint32_t)jit_get_arg(jit, 1);
4719 gen_get_ep(cb, REG0, level);
4722 test(cb, mem_opnd(64, REG0,
SIZEOF_VALUE * VM_ENV_DATA_INDEX_FLAGS), imm_opnd(VM_FRAME_FLAG_MODIFIED_BLOCK_PARAM));
4723 jnz_ptr(cb, COUNTED_EXIT(jit, side_exit, gbpp_block_param_modified));
4727 mov(cb, REG0, mem_opnd(64, REG0,
SIZEOF_VALUE * VM_ENV_DATA_INDEX_SPECVAL));
4730 and(cb, REG0_8, imm_opnd(0x3));
4733 cmp(cb, REG0_8, imm_opnd(0x1));
4734 jnz_ptr(cb, COUNTED_EXIT(jit, side_exit, gbpp_block_handler_not_iseq));
4737 mov(cb, REG0, const_ptr_opnd((
void *)rb_block_param_proxy));
4739 x86opnd_t top = ctx_stack_push(ctx, TYPE_HEAP);
4742 return YJIT_KEEP_COMPILING;
4745 static codegen_status_t
4751 if (bf->argc + 2 > NUM_C_ARG_REGS) {
4752 return YJIT_CANT_COMPILE;
4756 jit_prepare_routine_call(jit, ctx, REG0);
4759 mov(cb, C_ARG_REGS[0], REG_EC);
4763 for (int32_t i = 0; i < bf->argc; i++) {
4764 x86opnd_t stack_opnd = ctx_stack_opnd(ctx, bf->argc - i - 1);
4765 x86opnd_t c_arg_reg = C_ARG_REGS[2 + i];
4766 mov(cb, c_arg_reg, stack_opnd);
4769 call_ptr(cb, REG0, (
void *)bf->func_ptr);
4772 ctx_stack_pop(ctx, bf->argc);
4773 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
4774 mov(cb, stack_ret, RAX);
4776 return YJIT_KEEP_COMPILING;
4782 static codegen_status_t
4786 int32_t start_index = (int32_t)jit_get_arg(jit, 1);
4789 if (bf->argc + 2 > NUM_C_ARG_REGS) {
4790 return YJIT_CANT_COMPILE;
4794 jit_prepare_routine_call(jit, ctx, REG0);
4802 mov(cb, C_ARG_REGS[0], REG_EC);
4806 for (int32_t i = 0; i < bf->argc; i++) {
4807 const int32_t offs = -jit->iseq->body->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index + i;
4809 x86opnd_t c_arg_reg = C_ARG_REGS[i + 2];
4810 mov(cb, c_arg_reg, local_opnd);
4812 call_ptr(cb, REG0, (
void *)bf->func_ptr);
4815 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
4816 mov(cb, stack_ret, RAX);
4818 return YJIT_KEEP_COMPILING;
4821 static int tracing_invalidate_all_i(
void *vstart,
void *vend,
size_t stride,
void *data);
4822 static void invalidate_all_blocks_for_tracing(
const rb_iseq_t *iseq);
4845 rb_yjit_tracing_invalidate_all(
void)
4847 if (!rb_yjit_enabled_p())
return;
4854 rb_objspace_each_objects(tracing_invalidate_all_i, NULL);
4857 const uint32_t old_pos = cb->write_pos;
4858 rb_darray_for(global_inval_patches, patch_idx) {
4859 struct codepage_patch patch = rb_darray_get(global_inval_patches, patch_idx);
4860 cb_set_pos(cb, patch.inline_patch_pos);
4861 uint8_t *jump_target = cb_get_ptr(ocb, patch.outlined_target_pos);
4862 jmp_ptr(cb, jump_target);
4864 cb_set_pos(cb, old_pos);
4871 RUBY_ASSERT_ALWAYS(yjit_codepage_frozen_bytes <= old_pos &&
"frozen bytes should increase monotonically");
4872 yjit_codepage_frozen_bytes = old_pos;
4874 cb_mark_all_executable(ocb);
4875 cb_mark_all_executable(cb);
4880 tracing_invalidate_all_i(
void *vstart,
void *vend,
size_t stride,
void *data)
4883 for (; v != (
VALUE)vend; v += stride) {
4884 void *ptr = asan_poisoned_object_p(v);
4885 asan_unpoison_object(v,
false);
4887 if (rb_obj_is_iseq(v)) {
4889 invalidate_all_blocks_for_tracing(iseq);
4892 asan_poison_object_if(ptr, v);
4898 invalidate_all_blocks_for_tracing(
const rb_iseq_t *iseq)
4903 ASSERT_vm_locking();
4910 rb_darray_for(body->yjit_blocks, version_array_idx) {
4911 rb_yjit_block_array_t version_array = rb_darray_get(body->yjit_blocks, version_array_idx);
4912 rb_darray_for(version_array, version_idx) {
4914 block_t *block = rb_darray_get(version_array, version_idx);
4915 yjit_unlink_method_lookup_dependency(block);
4916 yjit_block_assumptions_free(block);
4918 rb_darray_free(version_array);
4920 rb_darray_free(body->yjit_blocks);
4921 body->yjit_blocks = NULL;
4925 body->jit_func = NULL;
4930 yjit_reg_op(
int opcode, codegen_fn gen_fn)
4932 RUBY_ASSERT(opcode >= 0 && opcode < VM_INSTRUCTION_SIZE);
4936 gen_fns[opcode] = gen_fn;
4940 yjit_init_codegen(
void)
4943 uint32_t mem_size = rb_yjit_opts.exec_mem_size * 1024 * 1024;
4944 uint8_t *mem_block = alloc_exec_mem(mem_size);
4947 cb_init(cb, mem_block, mem_size/2);
4949 ocb = &outline_block;
4950 cb_init(ocb, mem_block + mem_size/2, mem_size/2);
4953 leave_exit_code = yjit_gen_leave_exit(cb);
4956 gen_full_cfunc_return();
4957 cb_mark_all_executable(cb);
4960 yjit_reg_op(BIN(nop), gen_nop);
4961 yjit_reg_op(BIN(dup), gen_dup);
4962 yjit_reg_op(BIN(dupn), gen_dupn);
4963 yjit_reg_op(BIN(swap), gen_swap);
4964 yjit_reg_op(BIN(setn), gen_setn);
4965 yjit_reg_op(BIN(topn), gen_topn);
4966 yjit_reg_op(BIN(pop), gen_pop);
4967 yjit_reg_op(BIN(adjuststack), gen_adjuststack);
4968 yjit_reg_op(BIN(newarray), gen_newarray);
4969 yjit_reg_op(BIN(duparray), gen_duparray);
4970 yjit_reg_op(BIN(duphash), gen_duphash);
4971 yjit_reg_op(BIN(splatarray), gen_splatarray);
4972 yjit_reg_op(BIN(expandarray), gen_expandarray);
4973 yjit_reg_op(BIN(newhash), gen_newhash);
4974 yjit_reg_op(BIN(newrange), gen_newrange);
4975 yjit_reg_op(BIN(concatstrings), gen_concatstrings);
4976 yjit_reg_op(BIN(putnil), gen_putnil);
4977 yjit_reg_op(BIN(putobject), gen_putobject);
4978 yjit_reg_op(BIN(putstring), gen_putstring);
4979 yjit_reg_op(BIN(putobject_INT2FIX_0_), gen_putobject_int2fix);
4980 yjit_reg_op(BIN(putobject_INT2FIX_1_), gen_putobject_int2fix);
4981 yjit_reg_op(BIN(putself), gen_putself);
4982 yjit_reg_op(BIN(putspecialobject), gen_putspecialobject);
4983 yjit_reg_op(BIN(getlocal), gen_getlocal);
4984 yjit_reg_op(BIN(getlocal_WC_0), gen_getlocal_wc0);
4985 yjit_reg_op(BIN(getlocal_WC_1), gen_getlocal_wc1);
4986 yjit_reg_op(BIN(setlocal), gen_setlocal);
4987 yjit_reg_op(BIN(setlocal_WC_0), gen_setlocal_wc0);
4988 yjit_reg_op(BIN(setlocal_WC_1), gen_setlocal_wc1);
4989 yjit_reg_op(BIN(getinstancevariable), gen_getinstancevariable);
4990 yjit_reg_op(BIN(setinstancevariable), gen_setinstancevariable);
4991 yjit_reg_op(BIN(defined), gen_defined);
4992 yjit_reg_op(BIN(checktype), gen_checktype);
4993 yjit_reg_op(BIN(checkkeyword), gen_checkkeyword);
4994 yjit_reg_op(BIN(opt_lt), gen_opt_lt);
4995 yjit_reg_op(BIN(opt_le), gen_opt_le);
4996 yjit_reg_op(BIN(opt_ge), gen_opt_ge);
4997 yjit_reg_op(BIN(opt_gt), gen_opt_gt);
4998 yjit_reg_op(BIN(opt_eq), gen_opt_eq);
4999 yjit_reg_op(BIN(opt_neq), gen_opt_neq);
5000 yjit_reg_op(BIN(opt_aref), gen_opt_aref);
5001 yjit_reg_op(BIN(opt_aset), gen_opt_aset);
5002 yjit_reg_op(BIN(opt_and), gen_opt_and);
5003 yjit_reg_op(BIN(opt_or), gen_opt_or);
5004 yjit_reg_op(BIN(opt_minus), gen_opt_minus);
5005 yjit_reg_op(BIN(opt_plus), gen_opt_plus);
5006 yjit_reg_op(BIN(opt_mult), gen_opt_mult);
5007 yjit_reg_op(BIN(opt_div), gen_opt_div);
5008 yjit_reg_op(BIN(opt_mod), gen_opt_mod);
5009 yjit_reg_op(BIN(opt_ltlt), gen_opt_ltlt);
5010 yjit_reg_op(BIN(opt_nil_p), gen_opt_nil_p);
5011 yjit_reg_op(BIN(opt_empty_p), gen_opt_empty_p);
5012 yjit_reg_op(BIN(opt_str_freeze), gen_opt_str_freeze);
5013 yjit_reg_op(BIN(opt_str_uminus), gen_opt_str_uminus);
5014 yjit_reg_op(BIN(opt_not), gen_opt_not);
5015 yjit_reg_op(BIN(opt_size), gen_opt_size);
5016 yjit_reg_op(BIN(opt_length), gen_opt_length);
5017 yjit_reg_op(BIN(opt_regexpmatch2), gen_opt_regexpmatch2);
5018 yjit_reg_op(BIN(opt_getinlinecache), gen_opt_getinlinecache);
5019 yjit_reg_op(BIN(invokebuiltin), gen_invokebuiltin);
5020 yjit_reg_op(BIN(opt_invokebuiltin_delegate), gen_opt_invokebuiltin_delegate);
5021 yjit_reg_op(BIN(opt_invokebuiltin_delegate_leave), gen_opt_invokebuiltin_delegate);
5022 yjit_reg_op(BIN(opt_case_dispatch), gen_opt_case_dispatch);
5023 yjit_reg_op(BIN(branchif), gen_branchif);
5024 yjit_reg_op(BIN(branchunless), gen_branchunless);
5025 yjit_reg_op(BIN(branchnil), gen_branchnil);
5026 yjit_reg_op(BIN(jump), gen_jump);
5027 yjit_reg_op(BIN(getblockparamproxy), gen_getblockparamproxy);
5028 yjit_reg_op(BIN(opt_send_without_block), gen_opt_send_without_block);
5029 yjit_reg_op(BIN(send), gen_send);
5030 yjit_reg_op(BIN(invokesuper), gen_invokesuper);
5031 yjit_reg_op(BIN(leave), gen_leave);
5032 yjit_reg_op(BIN(getglobal), gen_getglobal);
5033 yjit_reg_op(BIN(setglobal), gen_setglobal);
5034 yjit_reg_op(BIN(anytostring), gen_anytostring);
5035 yjit_reg_op(BIN(objtostring), gen_objtostring);
5036 yjit_reg_op(BIN(toregexp), gen_toregexp);
5037 yjit_reg_op(BIN(intern), gen_intern);
5038 yjit_reg_op(BIN(getspecial), gen_getspecial);
5039 yjit_reg_op(BIN(getclassvariable), gen_getclassvariable);
5040 yjit_reg_op(BIN(setclassvariable), gen_setclassvariable);
5042 yjit_method_codegen_table = st_init_numtable();
5048 yjit_reg_method(
rb_mKernel,
"nil?", jit_rb_false);
5052 yjit_reg_method(
rb_mKernel,
"eql?", jit_rb_obj_equal);
5053 yjit_reg_method(
rb_cModule,
"==", jit_rb_obj_equal);
5054 yjit_reg_method(
rb_cSymbol,
"==", jit_rb_obj_equal);
5055 yjit_reg_method(
rb_cSymbol,
"===", jit_rb_obj_equal);
5058 yjit_reg_method(
rb_cString,
"to_s", jit_rb_str_to_s);
5059 yjit_reg_method(
rb_cString,
"to_str", jit_rb_str_to_s);
5060 yjit_reg_method(
rb_cString,
"bytesize", jit_rb_str_bytesize);
#define RUBY_ASSERT(expr)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_ASSERT_ALWAYS(expr)
A variant of RUBY_ASSERT that does not interface with RUBY_DEBUG.
#define RUBY_EXTERN
Declaration of externally visible global variables.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
uint32_t rb_event_flag_t
Represents event(s).
static VALUE RB_FL_TEST_RAW(VALUE obj, VALUE flags)
This is an implenentation detail of RB_FL_TEST().
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
#define FL_SINGLETON
Old name of RUBY_FL_SINGLETON.
#define T_STRING
Old name of RUBY_T_STRING.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
#define UNREACHABLE_RETURN
Old name of RBIMPL_UNREACHABLE_RETURN.
#define SYM2ID
Old name of RB_SYM2ID.
#define CLASS_OF
Old name of rb_class_of.
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define T_ICLASS
Old name of RUBY_T_ICLASS.
#define T_HASH
Old name of RUBY_T_HASH.
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define FLONUM_P
Old name of RB_FLONUM_P.
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define FL_TEST
Old name of RB_FL_TEST.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
void rb_bug(const char *fmt,...)
Interpreter panic switch.
VALUE rb_mKernel
Kernel module.
VALUE rb_cArray
Array class.
VALUE rb_cInteger
Module class.
VALUE rb_cNilClass
NilClass class.
VALUE rb_cHash
Hash class.
VALUE rb_cFalseClass
FalseClass class.
VALUE rb_cSymbol
Sumbol class.
VALUE rb_cBasicObject
BasicObject class.
VALUE rb_cThread
Thread class.
VALUE rb_cModule
Module class.
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
VALUE rb_cFloat
Float class.
VALUE rb_cTrueClass
TrueClass class.
VALUE rb_cString
String class.
VALUE rb_ary_resurrect(VALUE ary)
I guess there is no use case of this function in extension libraries, but this is a routine identical...
VALUE rb_ary_clear(VALUE ary)
Destructively removes everything form an array.
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
void rb_hash_bulk_insert(long argc, const VALUE *argv, VALUE hash)
Inserts a list of key-value pairs into a hash table at once.
VALUE rb_hash_aref(VALUE hash, VALUE key)
Queries the given key in the given hash table.
VALUE rb_hash_aset(VALUE hash, VALUE key, VALUE val)
Inserts or replaces ("upsert"s) the objects into the given hash table.
VALUE rb_hash_new(void)
Creates a new, empty hash object.
VALUE rb_backref_get(void)
Queries the last match, or Regexp.last_match, or the $~.
VALUE rb_range_new(VALUE beg, VALUE end, int excl)
Creates a new Range.
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
VALUE rb_str_intern(VALUE str)
Identical to rb_to_symbol(), except it assumes the receiver being an instance of RString.
VALUE rb_attr_get(VALUE obj, ID name)
Identical to rb_ivar_get()
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
const char * rb_id2name(ID id)
Retrieves the name mapped to the given id.
ID rb_intern(const char *name)
Finds or creates a symbol of the given name.
#define RBIMPL_ATTR_MAYBE_UNUSED()
Wraps (or simulates) [[maybe_unused]]
#define ALLOCA_N(type, n)
VALUE type(ANYARGS)
ANYARGS-ed function type.
@ RARRAY_EMBED_LEN_SHIFT
Where ::RARRAY_EMBED_LEN_MASK resides.
#define RBASIC(obj)
Convenient casting macro.
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
@ ROBJECT_EMBED_LEN_MAX
Max possible number of instance variables that can be embedded.
#define RSTRING_GETMEM(str, ptrvar, lenvar)
Convenient macro to obtain the contents and length at once.
static long RSTRING_LEN(VALUE str)
Queries the length of the string.
static long RSTRUCT_LEN(VALUE st)
Returns the number of struct members.
static VALUE RSTRUCT_SET(VALUE st, int k, VALUE v)
Resembles Struct#[]=.
static bool RB_FIXNUM_P(VALUE obj)
Checks if the given object is a so-called Fixnum.
@ RUBY_SPECIAL_SHIFT
Least significant 8 bits are reserved.
@ RUBY_FIXNUM_FLAG
Flag to denote a fixnum.
@ RUBY_FLONUM_MASK
Bit mask detecting a flonum.
@ RUBY_FLONUM_FLAG
Flag to denote a flonum.
@ RUBY_SYMBOL_FLAG
Flag to denote a static symbol.
@ RUBY_IMMEDIATE_MASK
Bit mask detecting special consts.
struct RArray::@42::@43 heap
Arrays that use separated memory region for elements use this pattern.
const VALUE ary[RARRAY_EMBED_LEN_MAX]
Embedded elements.
Ruby's object's, base components.
struct RObject::@45::@46 heap
Object that use separated memory region for instance variables use this pattern.
VALUE ary[ROBJECT_EMBED_LEN_MAX]
Embedded instance variables.
struct rb_iseq_constant_body::@152 param
parameter information
Basic block version Represents a portion of an iseq compiled with a given context Note: care must be ...
Code generation context Contains information we can use to optimize code.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
#define SIZEOF_VALUE
Identical to sizeof(VALUE), except it is a macro that can also be used inside of preprocessor directi...
uintptr_t VALUE
Type that represents a Ruby object.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.
ruby_value_type
C-level type of an object.
@ RUBY_T_MASK
Bitmask of ruby_value_type.