11#include "ruby/internal/config.h"
16#include "debug_counter.h"
18#include "internal/class.h"
19#include "internal/compar.h"
20#include "internal/hash.h"
21#include "internal/numeric.h"
22#include "internal/proc.h"
23#include "internal/random.h"
24#include "internal/variable.h"
25#include "internal/struct.h"
31#include "insns_info.inc"
38 int argc,
const VALUE *argv,
int priv);
50ruby_vm_special_exception_copy(
VALUE exc)
53 rb_obj_copy_ivar(e, exc);
61 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
62 ec->raised_flag = RAISED_STACKOVERFLOW;
64 VALUE at = rb_ec_backtrace_object(ec);
65 mesg = ruby_vm_special_exception_copy(mesg);
70 EC_JUMP_TAG(ec, TAG_RAISE);
73NORETURN(
static void vm_stackoverflow(
void));
75NOINLINE(
static COLDFUNC
void vm_stackoverflow(
void));
81 ec_stack_overflow(GET_EC(), TRUE);
89 rb_bug(
"system stack overflow during GC. Faulty native extension?");
92 ec->raised_flag = RAISED_STACKOVERFLOW;
93 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
94 EC_JUMP_TAG(ec, TAG_RAISE);
97 ec_stack_overflow(ec, TRUE);
99 ec_stack_overflow(ec, FALSE);
106callable_class_p(
VALUE klass)
108#if VM_CHECK_MODE >= 2
109 if (!klass)
return FALSE;
137 VM_ASSERT(IMEMO_TYPE_P((
VALUE)cme, imemo_ment));
139 if (callable_class_p(cme->defined_class)) {
149vm_check_frame_detail(
VALUE type,
int req_block,
int req_me,
int req_cref,
VALUE specval,
VALUE cref_or_me,
int is_cframe,
const rb_iseq_t *iseq)
151 unsigned int magic = (
unsigned int)(
type & VM_FRAME_MAGIC_MASK);
152 enum imemo_type cref_or_me_type = imemo_env;
155 cref_or_me_type = imemo_type(cref_or_me);
157 if (
type & VM_FRAME_FLAG_BMETHOD) {
161 if (req_block && (
type & VM_ENV_FLAG_LOCAL) == 0) {
162 rb_bug(
"vm_push_frame: specval (%p) should be a block_ptr on %x frame", (
void *)specval, magic);
164 if (!req_block && (
type & VM_ENV_FLAG_LOCAL) != 0) {
165 rb_bug(
"vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (
void *)specval, magic);
169 if (cref_or_me_type != imemo_ment) {
170 rb_bug(
"vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
174 if (req_cref && cref_or_me_type != imemo_cref) {
175 rb_bug(
"vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
178 if (cref_or_me !=
Qfalse && cref_or_me_type != imemo_cref) {
179 if (((
type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC) && (cref_or_me_type == imemo_ment)) {
183 rb_bug(
"vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
189 if (cref_or_me_type == imemo_ment) {
192 if (!callable_method_entry_p(me)) {
193 rb_bug(
"vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
197 if ((
type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
198 VM_ASSERT(iseq == NULL ||
200 RUBY_VM_NORMAL_ISEQ_P(iseq)
204 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
214 VALUE given_magic =
type & VM_FRAME_MAGIC_MASK;
217#define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
219 vm_check_frame_detail(type, req_block, req_me, req_cref, \
220 specval, cref_or_me, is_cframe, iseq); \
222 switch (given_magic) {
224 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
225 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
226 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
227 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
228 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
229 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
230 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
231 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
232 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
234 rb_bug(
"vm_push_frame: unknown type (%x)", (
unsigned int)given_magic);
239static VALUE vm_stack_canary;
240static bool vm_stack_canary_was_born =
false;
243MJIT_FUNC_EXPORTED
void
249 if (! LIKELY(vm_stack_canary_was_born)) {
252 else if ((
VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
256 else if (! (iseq = GET_ISEQ())) {
259 else if (LIKELY(sp[0] != vm_stack_canary)) {
268 const VALUE *orig = rb_iseq_original_iseq(iseq);
269 const VALUE *encoded = ISEQ_BODY(iseq)->iseq_encoded;
270 const ptrdiff_t pos = GET_PC() - encoded;
271 const enum ruby_vminsn_type insn = (
enum ruby_vminsn_type)orig[pos];
272 const char *name = insn_name(insn);
273 const VALUE iseqw = rb_iseqw_new(iseq);
275 const char *stri = rb_str_to_cstr(inspection);
276 const VALUE disasm = rb_iseq_disasm(iseq);
277 const char *strd = rb_str_to_cstr(disasm);
283 "We are killing the stack canary set by %s, "
284 "at %s@pc=%"PRIdPTR
"\n"
285 "watch out the C stack trace.\n"
287 name, stri, pos, strd);
291#define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
294#define vm_check_canary(ec, sp)
295#define vm_check_frame(a, b, c, d)
300vm_push_frame_debug_counter_inc(
307 RB_DEBUG_COUNTER_INC(frame_push);
309 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
310 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
311 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
314 RB_DEBUG_COUNTER_INC(frame_R2R);
317 RB_DEBUG_COUNTER_INC(frame_R2C);
322 RB_DEBUG_COUNTER_INC(frame_C2R);
325 RB_DEBUG_COUNTER_INC(frame_C2C);
330 switch (
type & VM_FRAME_MAGIC_MASK) {
331 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method);
return;
332 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block);
return;
333 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class);
return;
334 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top);
return;
335 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc);
return;
336 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc);
return;
337 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval);
return;
338 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue);
return;
339 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy);
return;
345#define vm_push_frame_debug_counter_inc(ec, cfp, t)
348STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
349STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
350STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
366 vm_check_frame(
type, specval, cref_or_me, iseq);
367 VM_ASSERT(local_size >= 0);
370 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
371 vm_check_canary(ec, sp);
376 for (
int i=0; i < local_size; i++) {
405 vm_push_frame_debug_counter_inc(ec, cfp,
type);
413 if (VM_CHECK_MODE >= 4) rb_gc_verify_internal_consistency();
414 if (VMDEBUG == 2) SDR();
416 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
423 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
425 if (VM_CHECK_MODE >= 4) rb_gc_verify_internal_consistency();
426 if (VMDEBUG == 2) SDR();
428 RUBY_VM_CHECK_INTS(ec);
429 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
431 return flags & VM_FRAME_FLAG_FINISH;
437 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
444 VALUE tmpbuf = rb_imemo_tmpbuf_auto_free_pointer();
446 rb_imemo_tmpbuf_set_ptr(tmpbuf, ptr);
450 dmy_iseq->body = dmy_body;
451 dmy_body->type = ISEQ_TYPE_TOP;
452 dmy_body->location.pathobj = fname;
456 VM_FRAME_MAGIC_DUMMY | VM_ENV_FLAG_LOCAL | VM_FRAME_FLAG_FINISH,
458 VM_BLOCK_HANDLER_NONE,
470rb_arity_error_new(
int argc,
int min,
int max)
472 VALUE err_mess =
rb_sprintf(
"wrong number of arguments (given %d, expected %d", argc, min);
487rb_error_arity(
int argc,
int min,
int max)
494NOINLINE(
static void vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v));
497vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v)
500 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
501 VM_FORCE_WRITE(&ep[index], v);
502 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
503 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
507vm_env_write(
const VALUE *ep,
int index,
VALUE v)
509 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
510 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
511 VM_STACK_ENV_WRITE(ep, index, v);
514 vm_env_write_slowpath(ep, index, v);
521 if (block_handler == VM_BLOCK_HANDLER_NONE) {
525 switch (vm_block_handler_type(block_handler)) {
526 case block_handler_type_iseq:
527 case block_handler_type_ifunc:
528 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler),
rb_cProc);
529 case block_handler_type_symbol:
530 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
531 case block_handler_type_proc:
532 return VM_BH_TO_PROC(block_handler);
534 VM_UNREACHABLE(rb_vm_bh_to_procval);
543vm_svar_valid_p(
VALUE svar)
546 switch (imemo_type(svar)) {
555 rb_bug(
"vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
565 if (lep && (ec == NULL || ec->root_lep != lep)) {
566 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
569 svar = ec->root_svar;
572 VM_ASSERT(svar ==
Qfalse || vm_svar_valid_p(svar));
580 VM_ASSERT(vm_svar_valid_p((
VALUE)svar));
582 if (lep && (ec == NULL || ec->root_lep != lep)) {
583 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (
VALUE)svar);
586 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
593 const struct vm_svar *svar = lep_svar(ec, lep);
598 case VM_SVAR_LASTLINE:
599 return svar->lastline;
600 case VM_SVAR_BACKREF:
601 return svar->backref;
603 const VALUE ary = svar->others;
609 return rb_ary_entry(ary, key - VM_SVAR_EXTRA_START);
624 struct vm_svar *svar = lep_svar(ec, lep);
627 lep_svar_write(ec, lep, svar = svar_new((
VALUE)svar));
631 case VM_SVAR_LASTLINE:
634 case VM_SVAR_BACKREF:
638 VALUE ary = svar->others;
643 rb_ary_store(ary, key - VM_SVAR_EXTRA_START, val);
654 val = lep_svar_get(ec, lep, key);
657 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
674 rb_bug(
"unexpected back-ref");
686check_method_entry(
VALUE obj,
int can_be_svar)
688 if (obj ==
Qfalse)
return NULL;
694 switch (imemo_type(obj)) {
705 rb_bug(
"check_method_entry: svar should not be there:");
714 const VALUE *ep = cfp->ep;
717 while (!VM_ENV_LOCAL_P(ep)) {
718 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return me;
719 ep = VM_ENV_PREV_EP(ep);
722 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
728 switch (me->def->type) {
729 case VM_METHOD_TYPE_ISEQ:
730 return me->def->body.iseq.
iseqptr;
739 switch (me->def->type) {
740 case VM_METHOD_TYPE_ISEQ:
741 return me->def->body.iseq.
cref;
747#if VM_CHECK_MODE == 0
751check_cref(
VALUE obj,
int can_be_svar)
753 if (obj ==
Qfalse)
return NULL;
759 switch (imemo_type(obj)) {
770 rb_bug(
"check_method_entry: svar should not be there:");
777vm_env_cref(
const VALUE *ep)
781 while (!VM_ENV_LOCAL_P(ep)) {
782 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return cref;
783 ep = VM_ENV_PREV_EP(ep);
786 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
790is_cref(
const VALUE v,
int can_be_svar)
793 switch (imemo_type(v)) {
806vm_env_cref_by_cref(
const VALUE *ep)
808 while (!VM_ENV_LOCAL_P(ep)) {
809 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE))
return TRUE;
810 ep = VM_ENV_PREV_EP(ep);
812 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
816cref_replace_with_duplicated_cref_each_frame(
const VALUE *vptr,
int can_be_svar,
VALUE parent)
818 const VALUE v = *vptr;
822 switch (imemo_type(v)) {
825 new_cref = vm_cref_dup(cref);
830 VM_FORCE_WRITE(vptr, (
VALUE)new_cref);
835 return cref_replace_with_duplicated_cref_each_frame(&((
struct vm_svar *)v)->
cref_or_me, FALSE, v);
839 rb_bug(
"cref_replace_with_duplicated_cref_each_frame: unreachable");
848vm_cref_replace_with_duplicated_cref(
const VALUE *ep)
850 if (vm_env_cref_by_cref(ep)) {
854 while (!VM_ENV_LOCAL_P(ep)) {
855 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
856 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
859 ep = VM_ENV_PREV_EP(ep);
861 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
862 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
865 rb_bug(
"vm_cref_dup: unreachable");
870vm_get_cref(
const VALUE *ep)
878 rb_bug(
"vm_get_cref: unreachable");
883rb_vm_get_cref(
const VALUE *ep)
885 return vm_get_cref(ep);
896 return vm_get_cref(cfp->ep);
900vm_get_const_key_cref(
const VALUE *ep)
907 FL_TEST(CREF_CLASS(cref), RCLASS_CLONED)) {
910 cref = CREF_NEXT(cref);
923 if (CREF_CLASS(cref) == old_klass) {
924 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
925 *new_cref_ptr = new_cref;
928 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
929 cref = CREF_NEXT(cref);
930 *new_cref_ptr = new_cref;
931 new_cref_ptr = &new_cref->next;
933 *new_cref_ptr = NULL;
942 prev_cref = vm_env_cref(ep);
948 prev_cref = vm_env_cref(cfp->ep);
952 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
956vm_get_cbase(
const VALUE *ep)
960 return CREF_CLASS_FOR_DEFINITION(cref);
964vm_get_const_base(
const VALUE *ep)
969 if (!CREF_PUSHED_BY_EVAL(cref)) {
970 return CREF_CLASS_FOR_DEFINITION(cref);
972 cref = CREF_NEXT(cref);
979vm_check_if_namespace(
VALUE klass)
987vm_ensure_not_refinement_module(
VALUE self)
990 rb_warn(
"not defined at the refinement, but at the outer class/module");
1006 if (
NIL_P(orig_klass) && allow_nil) {
1008 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
1012 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
1013 root_cref = CREF_NEXT(root_cref);
1016 while (cref && CREF_NEXT(cref)) {
1017 if (CREF_PUSHED_BY_EVAL(cref)) {
1021 klass = CREF_CLASS(cref);
1023 cref = CREF_NEXT(cref);
1025 if (!
NIL_P(klass)) {
1029 if ((ce = rb_const_lookup(klass,
id))) {
1030 rb_const_warn_if_deprecated(ce, klass,
id);
1033 if (am == klass)
break;
1035 if (is_defined)
return 1;
1036 if (rb_autoloading_value(klass,
id, &av, NULL))
return av;
1038 goto search_continue;
1045 if (UNLIKELY(!rb_ractor_main_p())) {
1048 "can not access non-shareable objects in constant %"PRIsVALUE
"::%s by non-main ractor.",
rb_class_path(klass),
rb_id2name(
id));
1059 if (root_cref && !
NIL_P(CREF_CLASS(root_cref))) {
1060 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1074 vm_check_if_namespace(orig_klass);
1076 return rb_public_const_defined_from(orig_klass,
id);
1079 return rb_public_const_get_from(orig_klass,
id);
1087 return vm_get_ev_const(ec, orig_klass,
id, allow_nil ==
Qtrue, 0);
1095 int allow_nil = TRUE;
1096 if (segments[0] == idNULL) {
1101 while (segments[idx]) {
1102 ID id = segments[idx++];
1103 val = vm_get_ev_const(ec, val,
id, allow_nil, 0);
1116 rb_bug(
"vm_get_cvar_base: no cref");
1119 while (CREF_NEXT(cref) &&
1121 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1122 cref = CREF_NEXT(cref);
1124 if (top_level_raise && !CREF_NEXT(cref)) {
1128 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1136ALWAYS_INLINE(
static void fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id));
1138fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id)
1141 vm_cc_attr_index_set(cc, index, shape_id);
1144 vm_ic_attr_index_set(iseq, ic, index, shape_id);
1148#define ractor_incidental_shareable_p(cond, val) \
1149 (!(cond) || rb_ractor_shareable_p(val))
1150#define ractor_object_incidental_shareable_p(obj, val) \
1151 ractor_incidental_shareable_p(rb_ractor_shareable_p(obj), val)
1153#define ATTR_INDEX_NOT_SET (attr_index_t)-1
1161 shape_id_t shape_id;
1168#if SHAPE_IN_BASIC_FLAGS
1169 shape_id = RBASIC_SHAPE_ID(obj);
1177#if !SHAPE_IN_BASIC_FLAGS
1178 shape_id = ROBJECT_SHAPE_ID(obj);
1184 if (UNLIKELY(!rb_ractor_main_p())) {
1194 ivar_list = RCLASS_IVPTR(obj);
1196#if !SHAPE_IN_BASIC_FLAGS
1197 shape_id = RCLASS_SHAPE_ID(obj);
1205 rb_gen_ivtbl_get(obj,
id, &ivtbl);
1206#if !SHAPE_IN_BASIC_FLAGS
1207 shape_id = ivtbl->shape_id;
1209 ivar_list = ivtbl->ivptr;
1216 shape_id_t cached_id;
1220 vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
1223 vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
1226 if (LIKELY(cached_id == shape_id)) {
1227 RUBY_ASSERT(cached_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1229 if (index == ATTR_INDEX_NOT_SET) {
1233 val = ivar_list[index];
1239 if (cached_id != INVALID_SHAPE_ID) {
1240 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
1243 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
1247 if (cached_id != INVALID_SHAPE_ID) {
1248 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
1251 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
1256 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
1258 if (shape_id == OBJ_TOO_COMPLEX_SHAPE_ID) {
1259 if (!st_lookup(ROBJECT_IV_HASH(obj),
id, &val)) {
1264 if (rb_shape_get_iv_index(shape,
id, &index)) {
1267 fill_ivar_cache(iseq, ic, cc, is_attr, index, shape_id);
1270 val = ivar_list[index];
1275 vm_cc_attr_index_initialize(cc, shape_id);
1278 vm_ic_attr_index_initialize(ic, shape_id);
1293 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1304populate_cache(attr_index_t index, shape_id_t next_shape_id,
ID id,
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
bool is_attr)
1306 RUBY_ASSERT(next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1310 vm_cc_attr_index_set(cc, index, next_shape_id);
1313 vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
1330 attr_index_t index = rb_obj_ivar_set(obj,
id, val);
1332 shape_id_t next_shape_id = ROBJECT_SHAPE_ID(obj);
1334 if (next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID) {
1335 populate_cache(index, next_shape_id,
id, iseq, ic, cc, is_attr);
1338 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_iv_hit);
1347 shape_id_t next_shape_id = rb_shape_get_shape_id(obj);
1348 rb_shape_t *next_shape = rb_shape_get_shape_by_id(next_shape_id);
1351 if (rb_shape_get_iv_index(next_shape,
id, &index)) {
1352 if (index >= MAX_IVARS) {
1356 populate_cache(index, next_shape_id,
id, iseq, ic, cc, is_attr);
1359 rb_bug(
"didn't find the id\n");
1366 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1373 return vm_setivar_slowpath(obj,
id, val, iseq, ic, NULL,
false);
1379 return vm_setivar_slowpath(obj,
id, val, NULL, NULL, cc,
true);
1382NOINLINE(
static VALUE vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1384vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1386#if SHAPE_IN_BASIC_FLAGS
1387 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1389 shape_id_t shape_id = rb_generic_shape_id(obj);
1395 if (shape_id == dest_shape_id) {
1396 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1399 rb_gen_ivtbl_get(obj, 0, &ivtbl);
1401 else if (dest_shape_id != INVALID_SHAPE_ID) {
1402 rb_shape_t * dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1403 shape_id_t source_shape_id = dest_shape->parent_id;
1405 if (shape_id == source_shape_id && dest_shape->edge_name ==
id && dest_shape->type == SHAPE_IVAR) {
1406 ivtbl = rb_ensure_generic_iv_list_size(obj, dest_shape, index + 1);
1407#if SHAPE_IN_BASIC_FLAGS
1408 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1421 VALUE *ptr = ivtbl->ivptr;
1425 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1431vm_setivar(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1439 shape_id_t shape_id = ROBJECT_SHAPE_ID(obj);
1440 RUBY_ASSERT(dest_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1442 if (LIKELY(shape_id == dest_shape_id)) {
1443 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1446 else if (dest_shape_id != INVALID_SHAPE_ID) {
1447 rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1448 shape_id_t source_shape_id = dest_shape->parent_id;
1450 if (shape_id == source_shape_id && dest_shape->edge_name ==
id) {
1451 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1453 ROBJECT_SET_SHAPE_ID(obj, dest_shape_id);
1455 RUBY_ASSERT(rb_shape_get_next_iv_shape(rb_shape_get_shape_by_id(source_shape_id),
id) == dest_shape);
1471 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1477 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1489 VALUE defined_class = 0;
1493 defined_class =
RBASIC(defined_class)->klass;
1496 struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1498 rb_bug(
"the cvc table should be set");
1502 if (!rb_id_table_lookup(rb_cvc_tbl,
id, &ent_data)) {
1503 rb_bug(
"should have cvar cache entry");
1508 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1524 cref = vm_get_cref(GET_EP());
1526 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1527 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1529 VALUE v = rb_ivar_lookup(ic->entry->class_value,
id,
Qundef);
1535 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1537 return update_classvariable_cache(iseq, klass,
id, cref, ic);
1543 return vm_getclassvariable(iseq, cfp,
id, ic);
1550 cref = vm_get_cref(GET_EP());
1552 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1553 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1555 rb_class_ivar_set(ic->entry->class_value,
id, val);
1559 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1563 update_classvariable_cache(iseq, klass,
id, cref, ic);
1569 vm_setclassvariable(iseq, cfp,
id, val, ic);
1575 return vm_getivar(obj,
id, iseq, ic, NULL, FALSE);
1586 shape_id_t dest_shape_id;
1588 vm_ic_atomic_shape_and_index(ic, &dest_shape_id, &index);
1590 if (UNLIKELY(UNDEF_P(vm_setivar(obj,
id, val, dest_shape_id, index)))) {
1597 if (!UNDEF_P(vm_setivar_default(obj,
id, val, dest_shape_id, index))) {
1601 vm_setivar_slowpath_ivar(obj,
id, val, iseq, ic);
1608 vm_setinstancevariable(iseq, obj,
id, val, ic);
1617 ec->tag->state =
FIX2INT(err);
1620 ec->tag->state = TAG_THROW;
1622 else if (THROW_DATA_P(err)) {
1623 ec->tag->state = THROW_DATA_STATE((
struct vm_throw_data *)err);
1626 ec->tag->state = TAG_RAISE;
1633 const int flag,
const VALUE throwobj)
1641 else if (state == TAG_BREAK) {
1643 const VALUE *ep = GET_EP();
1644 const rb_iseq_t *base_iseq = GET_ISEQ();
1645 escape_cfp = reg_cfp;
1647 while (ISEQ_BODY(base_iseq)->
type != ISEQ_TYPE_BLOCK) {
1648 if (ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1649 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1650 ep = escape_cfp->ep;
1651 base_iseq = escape_cfp->iseq;
1654 ep = VM_ENV_PREV_EP(ep);
1655 base_iseq = ISEQ_BODY(base_iseq)->parent_iseq;
1656 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1657 VM_ASSERT(escape_cfp->iseq == base_iseq);
1661 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1667 ep = VM_ENV_PREV_EP(ep);
1669 while (escape_cfp < eocfp) {
1670 if (escape_cfp->ep == ep) {
1671 const rb_iseq_t *
const iseq = escape_cfp->iseq;
1672 const VALUE epc = escape_cfp->pc - ISEQ_BODY(iseq)->iseq_encoded;
1677 for (i=0; i < ct->size; i++) {
1679 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1681 if (entry->type == CATCH_TYPE_BREAK &&
1682 entry->iseq == base_iseq &&
1683 entry->start < epc && entry->end >= epc) {
1684 if (entry->cont == epc) {
1693 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1698 rb_vm_localjump_error(
"break from proc-closure", throwobj, TAG_BREAK);
1701 else if (state == TAG_RETRY) {
1702 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1704 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1706 else if (state == TAG_RETURN) {
1707 const VALUE *current_ep = GET_EP();
1708 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1709 int in_class_frame = 0;
1711 escape_cfp = reg_cfp;
1714 while (!VM_ENV_LOCAL_P(ep)) {
1715 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1718 ep = VM_ENV_PREV_EP(ep);
1722 while (escape_cfp < eocfp) {
1723 const VALUE *lep = VM_CF_LEP(escape_cfp);
1729 if (lep == target_lep &&
1730 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1731 ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1736 if (lep == target_lep) {
1737 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1739 if (in_class_frame) {
1744 const VALUE *tep = current_ep;
1746 while (target_lep != tep) {
1747 if (escape_cfp->ep == tep) {
1749 if (tep == target_ep) {
1753 goto unexpected_return;
1756 tep = VM_ENV_PREV_EP(tep);
1760 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1761 switch (ISEQ_BODY(escape_cfp->iseq)->type) {
1763 case ISEQ_TYPE_MAIN:
1765 if (in_class_frame)
goto unexpected_return;
1766 if (target_ep == NULL) {
1770 goto unexpected_return;
1774 case ISEQ_TYPE_EVAL:
1775 case ISEQ_TYPE_CLASS:
1784 if (escape_cfp->ep == target_lep && ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_METHOD) {
1785 if (target_ep == NULL) {
1789 goto unexpected_return;
1793 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1796 rb_vm_localjump_error(
"unexpected return", throwobj, TAG_RETURN);
1802 rb_bug(
"isns(throw): unsupported throw type");
1805 ec->tag->state = state;
1806 return (
VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1811 rb_num_t throw_state,
VALUE throwobj)
1813 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1814 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1817 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1820 return vm_throw_continue(ec, throwobj);
1825vm_expandarray(
VALUE *sp,
VALUE ary, rb_num_t num,
int flag)
1827 int is_splat = flag & 0x01;
1828 rb_num_t space_size = num + is_splat;
1829 VALUE *base = sp - 1;
1832 const VALUE obj = ary;
1844 if (space_size == 0) {
1847 else if (flag & 0x02) {
1852 for (i=0; i<num-len; i++) {
1856 for (j=0; i<num; i++, j++) {
1857 VALUE v = ptr[len - j - 1];
1867 VALUE *bptr = &base[space_size - 1];
1869 for (i=0; i<num; i++) {
1871 for (; i<num; i++) {
1880 *bptr = rb_ary_new();
1898#if VM_CHECK_MODE > 0
1899 ccs->debug_sig = ~(
VALUE)ccs;
1905 ccs->entries = NULL;
1907 rb_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
1915 if (! vm_cc_markable(cc)) {
1918 else if (! vm_ci_markable(ci)) {
1922 if (UNLIKELY(ccs->len == ccs->capa)) {
1923 if (ccs->capa == 0) {
1925 ccs->entries =
ALLOC_N(
struct rb_class_cc_entries_entry, ccs->capa);
1929 REALLOC_N(ccs->entries,
struct rb_class_cc_entries_entry, ccs->capa);
1932 VM_ASSERT(ccs->len < ccs->capa);
1934 const int pos = ccs->len++;
1938 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
1944#if VM_CHECK_MODE > 0
1948 ruby_debug_printf(
"ccs:%p (%d,%d)\n", (
void *)ccs, ccs->len, ccs->capa);
1949 for (
int i=0; i<ccs->len; i++) {
1950 vm_ci_dump(ccs->entries[i].ci);
1951 rp(ccs->entries[i].cc);
1958 VM_ASSERT(vm_ccs_p(ccs));
1959 VM_ASSERT(ccs->len <= ccs->capa);
1961 for (
int i=0; i<ccs->len; i++) {
1962 const struct rb_callinfo *ci = ccs->entries[i].ci;
1965 VM_ASSERT(vm_ci_p(ci));
1966 VM_ASSERT(vm_ci_mid(ci) == mid);
1967 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
1968 VM_ASSERT(vm_cc_class_check(cc, klass));
1969 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
1982 const ID mid = vm_ci_mid(ci);
1983 struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass);
1988 if (rb_id_table_lookup(cc_tbl, mid, &ccs_data)) {
1990 const int ccs_len = ccs->len;
1992 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
1993 rb_vm_ccs_free(ccs);
1994 rb_id_table_delete(cc_tbl, mid);
1998 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
2000 for (
int i=0; i<ccs_len; i++) {
2001 const struct rb_callinfo *ccs_ci = ccs->entries[i].ci;
2002 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
2004 VM_ASSERT(vm_ci_p(ccs_ci));
2005 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
2008 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
2010 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
2011 VM_ASSERT(ccs_cc->klass == klass);
2012 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
2021 cc_tbl = RCLASS_CC_TBL(klass) = rb_id_table_create(2);
2024 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
2030 cme = UNDEFINED_METHOD_ENTRY_P(cme) ? NULL : cme;
2032 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2035 cme = rb_callable_method_entry(klass, mid);
2038 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
2042 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
2043 return &vm_empty_cc;
2046 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2051 VM_ASSERT(cc_tbl != NULL);
2053 if (LIKELY(rb_id_table_lookup(cc_tbl, mid, &ccs_data))) {
2059 ccs = vm_ccs_create(klass, cc_tbl, mid, cme);
2063 cme = check_overloaded_cme(cme, ci);
2065 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general);
2066 vm_ccs_push(klass, ccs, ci, cc);
2068 VM_ASSERT(vm_cc_cme(cc) != NULL);
2069 VM_ASSERT(cme->called_id == mid);
2070 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
2084 cc = vm_search_cc(klass, ci);
2087 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2088 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
2089 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
2090 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
2091 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
2102#if USE_DEBUG_COUNTER
2106 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
2108#if OPT_INLINE_METHOD_CACHE
2119#if USE_DEBUG_COUNTER
2120 if (old_cc == empty_cc) {
2122 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
2124 else if (old_cc == cc) {
2125 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
2127 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
2128 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
2130 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
2131 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
2132 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
2135 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
2140 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2141 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2154#if OPT_INLINE_METHOD_CACHE
2155 if (LIKELY(vm_cc_class_check(cc, klass))) {
2156 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
2157 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
2158 RB_DEBUG_COUNTER_INC(mc_inline_hit);
2159 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2160 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) ||
2161 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2165 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
2168 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
2172 return vm_search_method_slowpath0(cd_owner, cd, klass);
2179 VM_ASSERT(klass !=
Qfalse);
2182 return vm_search_method_fastpath(cd_owner, cd, klass);
2185#if __has_attribute(transparent_union)
2198 VALUE (*f10)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2199 VALUE (*f11)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2200 VALUE (*f12)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2201 VALUE (*f13)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2202 VALUE (*f14)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2203 VALUE (*f15)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2217 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
2218 VM_ASSERT(callable_method_entry_p(me));
2220 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
2224#if __has_attribute(transparent_union)
2225 return me->def->body.cfunc.func == func.anyargs;
2227 return me->def->body.cfunc.func == func;
2236 VM_ASSERT(iseq != NULL);
2238 return check_cfunc(vm_cc_cme(cc), func);
2241#define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2273opt_equality_specialized(
VALUE recv,
VALUE obj)
2275 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2276 goto compare_by_identity;
2278 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2279 goto compare_by_identity;
2282 goto compare_by_identity;
2291#if MSC_VERSION_BEFORE(1300)
2295 else if (isnan(b)) {
2300 return RBOOL(a == b);
2307 return rb_str_eql_internal(obj, recv);
2312 compare_by_identity:
2313 return RBOOL(recv == obj);
2319 VM_ASSERT(cd_owner != NULL);
2321 VALUE val = opt_equality_specialized(recv, obj);
2322 if (!UNDEF_P(val))
return val;
2324 if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
2328 return RBOOL(recv == obj);
2332#undef EQ_UNREDEFINED_P
2337NOINLINE(
static VALUE opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid));
2340opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid)
2342 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, 1);
2344 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2345 return RBOOL(recv == obj);
2355 VALUE val = opt_equality_specialized(recv, obj);
2356 if (!UNDEF_P(val)) {
2360 return opt_equality_by_mid_slowpath(recv, obj, mid);
2367 return opt_equality_by_mid(obj1, obj2, idEq);
2373 return opt_equality_by_mid(obj1, obj2, idEqlP);
2385 case VM_CHECKMATCH_TYPE_WHEN:
2387 case VM_CHECKMATCH_TYPE_RESCUE:
2392 case VM_CHECKMATCH_TYPE_CASE: {
2393 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target,
RB_NO_KEYWORDS);
2396 rb_bug(
"check_match: unreachable");
2401#if MSC_VERSION_BEFORE(1300)
2402#define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
2404#define CHECK_CMP_NAN(a, b)
2408double_cmp_lt(
double a,
double b)
2410 CHECK_CMP_NAN(a, b);
2411 return RBOOL(a < b);
2415double_cmp_le(
double a,
double b)
2417 CHECK_CMP_NAN(a, b);
2418 return RBOOL(a <= b);
2422double_cmp_gt(
double a,
double b)
2424 CHECK_CMP_NAN(a, b);
2425 return RBOOL(a > b);
2429double_cmp_ge(
double a,
double b)
2431 CHECK_CMP_NAN(a, b);
2432 return RBOOL(a >= b);
2435static inline VALUE *
2441 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
2442 VALUE *bp = prev_cfp->sp + ISEQ_BODY(cfp->iseq)->local_table_size + VM_ENV_DATA_SIZE;
2443 if (ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_METHOD) {
2447#if VM_DEBUG_BP_CHECK
2448 if (bp != cfp->bp_check) {
2449 ruby_debug_printf(
"bp_check: %ld, bp: %ld\n",
2450 (
long)(cfp->bp_check - GET_EC()->vm_stack),
2451 (
long)(bp - GET_EC()->vm_stack));
2452 rb_bug(
"vm_base_ptr: unreachable");
2477static vm_call_handler vm_call_iseq_setup_func(
const struct rb_callinfo *ci,
const int param_size,
const int local_size);
2482 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2484 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2490 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2493 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2494 int param = ISEQ_BODY(iseq)->param.size;
2495 int local = ISEQ_BODY(iseq)->local_table_size;
2496 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2502 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2503 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2504 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2505 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2506 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2507 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2508 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2511MJIT_FUNC_EXPORTED
bool
2512rb_iseq_only_optparam_p(
const rb_iseq_t *iseq)
2514 return ISEQ_BODY(iseq)->param.flags.has_opt == TRUE &&
2515 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2516 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2517 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2518 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2519 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2520 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2523MJIT_FUNC_EXPORTED
bool
2524rb_iseq_only_kwparam_p(
const rb_iseq_t *iseq)
2526 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2527 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2528 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2529 ISEQ_BODY(iseq)->param.flags.has_kw == TRUE &&
2530 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2531 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2536rb_splat_or_kwargs_p(
const struct rb_callinfo *restrict ci)
2538 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
2547 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2552 vm_caller_setup_arg_splat(cfp, calling);
2553 if (!IS_ARGS_KW_OR_KW_SPLAT(ci) &&
2554 calling->argc > 0 &&
2556 (((
struct RHash *)final_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2557 *(cfp->sp - 1) = rb_hash_dup(final_hash);
2558 calling->kw_splat = 1;
2561 if (UNLIKELY(IS_ARGS_KW_OR_KW_SPLAT(ci))) {
2562 if (IS_ARGS_KEYWORD(ci)) {
2567 vm_caller_setup_arg_kw(cfp, calling, ci);
2570 VALUE keyword_hash = cfp->sp[-1];
2573 cfp->sp[-1] = rb_hash_dup(rb_to_hash_type(keyword_hash));
2575 else if (!IS_ARGS_KW_SPLAT_MUT(ci)) {
2579 cfp->sp[-1] = rb_hash_dup(keyword_hash);
2590 if (UNLIKELY(calling->kw_splat)) {
2597 calling->kw_splat = 0;
2602#define USE_OPT_HIST 0
2605#define OPT_HIST_MAX 64
2606static int opt_hist[OPT_HIST_MAX+1];
2610opt_hist_show_results_at_exit(
void)
2612 for (
int i=0; i<OPT_HIST_MAX; i++) {
2613 ruby_debug_printf(
"opt_hist\t%d\t%d\n", i, opt_hist[i]);
2623 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2624 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2625 const int opt = calling->argc - lead_num;
2626 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2627 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2628 const int param = ISEQ_BODY(iseq)->param.size;
2629 const int local = ISEQ_BODY(iseq)->local_table_size;
2630 const int delta = opt_num - opt;
2632 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2635 if (opt_pc < OPT_HIST_MAX) {
2639 opt_hist[OPT_HIST_MAX]++;
2643 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
2651 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2652 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2653 const int opt = calling->argc - lead_num;
2654 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2656 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2659 if (opt_pc < OPT_HIST_MAX) {
2663 opt_hist[OPT_HIST_MAX]++;
2667 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2672 VALUE *
const passed_values,
const int passed_keyword_len,
const VALUE *
const passed_keywords,
2673 VALUE *
const locals);
2682 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
2683 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
2685 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2686 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2688 const int ci_kw_len = kw_arg->keyword_len;
2689 const VALUE *
const ci_keywords = kw_arg->keywords;
2690 VALUE *argv = cfp->sp - calling->argc;
2691 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2692 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2694 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
2695 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2697 int param = ISEQ_BODY(iseq)->param.size;
2698 int local = ISEQ_BODY(iseq)->local_table_size;
2699 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2706 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->ci;
2709 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
2710 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
2712 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2713 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2714 VALUE *
const argv = cfp->sp - calling->argc;
2715 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2718 for (i=0; i<kw_param->num; i++) {
2719 klocals[i] = kw_param->default_values[i];
2726 int param = ISEQ_BODY(iseq)->param.size;
2727 int local = ISEQ_BODY(iseq)->local_table_size;
2728 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2733 const rb_iseq_t *iseq,
VALUE *argv,
int param_size,
int local_size)
2737 bool cacheable_ci = vm_ci_markable(ci);
2739 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
2740 if (LIKELY(rb_simple_iseq_p(iseq))) {
2742 CALLER_SETUP_ARG(cfp, calling, ci);
2743 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
2745 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
2746 argument_arity_error(ec, iseq, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
2749 VM_ASSERT(ci == calling->ci);
2750 VM_ASSERT(cc == calling->cc);
2751 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size), cacheable_ci && vm_call_iseq_optimizable_p(ci, cc));
2754 else if (rb_iseq_only_optparam_p(iseq)) {
2756 CALLER_SETUP_ARG(cfp, calling, ci);
2757 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
2759 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2760 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2761 const int argc = calling->argc;
2762 const int opt = argc - lead_num;
2764 if (opt < 0 || opt > opt_num) {
2765 argument_arity_error(ec, iseq, argc, lead_num, lead_num + opt_num);
2768 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
2769 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
2770 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
2771 cacheable_ci && vm_call_cacheable(ci, cc));
2774 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
2775 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
2776 cacheable_ci && vm_call_cacheable(ci, cc));
2780 VM_ASSERT((
int)ISEQ_BODY(iseq)->param.size == lead_num + opt_num);
2781 for (
int i=argc; i<lead_num + opt_num; i++) {
2784 return (
int)ISEQ_BODY(iseq)->param.opt_table[opt];
2786 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
2787 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2788 const int argc = calling->argc;
2789 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2791 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
2794 if (argc - kw_arg->keyword_len == lead_num) {
2795 const int ci_kw_len = kw_arg->keyword_len;
2796 const VALUE *
const ci_keywords = kw_arg->keywords;
2798 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
2800 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2801 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2803 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
2804 cacheable_ci && vm_call_cacheable(ci, cc));
2809 else if (argc == lead_num) {
2811 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2812 args_setup_kw_parameters(ec, iseq, NULL, 0, NULL, klocals);
2814 if (klocals[kw_param->num] ==
INT2FIX(0)) {
2816 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
2817 cacheable_ci && vm_call_cacheable(ci, cc));
2825 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
2831 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
2834 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2835 const int param_size = ISEQ_BODY(iseq)->param.size;
2836 const int local_size = ISEQ_BODY(iseq)->local_table_size;
2837 const int opt_pc = vm_callee_setup_arg(ec, calling, def_iseq_ptr(vm_cc_cme(cc)->def), cfp->sp - calling->argc, param_size, local_size);
2838 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
2843 int opt_pc,
int param_size,
int local_size)
2848 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
2849 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
2852 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2858 int opt_pc,
int param_size,
int local_size)
2860 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
2861 VALUE *argv = cfp->sp - calling->argc;
2862 VALUE *sp = argv + param_size;
2863 cfp->sp = argv - 1 ;
2865 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
2866 calling->block_handler, (
VALUE)me,
2867 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
2868 local_size - param_size,
2869 ISEQ_BODY(iseq)->stack_max);
2878 VALUE *argv = cfp->sp - calling->argc;
2880 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
2881 VALUE *src_argv = argv;
2882 VALUE *sp_orig, *sp;
2883 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
2885 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
2886 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
2887 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
2888 dst_captured->code.val = src_captured->code.val;
2889 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
2890 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
2893 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
2897 vm_pop_frame(ec, cfp, cfp->ep);
2900 sp_orig = sp = cfp->sp;
2903 sp[0] = calling->recv;
2907 for (i=0; i < ISEQ_BODY(iseq)->param.size; i++) {
2908 *sp++ = src_argv[i];
2911 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
2912 calling->recv, calling->block_handler, (
VALUE)me,
2913 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
2914 ISEQ_BODY(iseq)->local_table_size - ISEQ_BODY(iseq)->param.size,
2915 ISEQ_BODY(iseq)->stack_max);
2923ractor_unsafe_check(
void)
2925 if (!rb_ractor_main_p()) {
2926 rb_raise(rb_eRactorUnsafeError,
"ractor unsafe method called from not main ractor");
2933 ractor_unsafe_check();
2941 ractor_unsafe_check();
2943 return (*f)(argc, argv, recv);
2949 ractor_unsafe_check();
2957 ractor_unsafe_check();
2959 return (*f)(recv, argv[0]);
2965 ractor_unsafe_check();
2967 return (*f)(recv, argv[0], argv[1]);
2973 ractor_unsafe_check();
2975 return (*f)(recv, argv[0], argv[1], argv[2]);
2981 ractor_unsafe_check();
2983 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
2989 ractor_unsafe_check();
2990 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2991 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
2997 ractor_unsafe_check();
2998 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2999 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3005 ractor_unsafe_check();
3006 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3007 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3013 ractor_unsafe_check();
3014 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3015 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3021 ractor_unsafe_check();
3022 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3023 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3029 ractor_unsafe_check();
3030 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3031 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3037 ractor_unsafe_check();
3038 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3039 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3045 ractor_unsafe_check();
3046 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3047 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3053 ractor_unsafe_check();
3054 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3055 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3061 ractor_unsafe_check();
3062 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3063 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3069 ractor_unsafe_check();
3070 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3071 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3085 return (*f)(argc, argv, recv);
3099 return (*f)(recv, argv[0]);
3106 return (*f)(recv, argv[0], argv[1]);
3113 return (*f)(recv, argv[0], argv[1], argv[2]);
3120 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3126 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3127 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3133 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3134 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3140 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3141 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3147 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3148 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3154 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3155 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3161 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3162 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3168 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3169 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3175 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3176 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3182 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3183 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3189 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3190 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3196 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3197 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3203 const int ov_flags = RAISED_STACKOVERFLOW;
3204 if (LIKELY(reg_cfp == ec->cfp + 1))
return TRUE;
3205 if (rb_ec_raised_p(ec, ov_flags)) {
3206 rb_ec_raised_reset(ec, ov_flags);
3212#define CHECK_CFP_CONSISTENCY(func) \
3213 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
3214 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
3220#if VM_DEBUG_VERIFY_METHOD_CACHE
3221 switch (me->def->type) {
3222 case VM_METHOD_TYPE_CFUNC:
3223 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3225# define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
3227 METHOD_BUG(ATTRSET);
3229 METHOD_BUG(BMETHOD);
3232 METHOD_BUG(OPTIMIZED);
3233 METHOD_BUG(MISSING);
3234 METHOD_BUG(REFINED);
3238 rb_bug(
"wrong method type: %d", me->def->type);
3241 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3247 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3253 int len = cfunc->argc;
3255 VALUE recv = calling->recv;
3256 VALUE block_handler = calling->block_handler;
3257 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3258 int argc = calling->argc;
3259 int orig_argc = argc;
3261 if (UNLIKELY(calling->kw_splat)) {
3262 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3265 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3268 vm_push_frame(ec, NULL, frame_type, recv,
3269 block_handler, (
VALUE)me,
3270 0, ec->cfp->sp, 0, 0);
3274 reg_cfp->sp -= orig_argc + 1;
3275 val = (*cfunc->invoker)(recv, argc, reg_cfp->sp + 1, cfunc->func);
3277 CHECK_CFP_CONSISTENCY(
"vm_call_cfunc");
3279 rb_vm_pop_frame(ec);
3281 EXEC_EVENT_HOOK(ec,
RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3282 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3291 RB_DEBUG_COUNTER_INC(ccf_cfunc);
3293 CALLER_SETUP_ARG(reg_cfp, calling, ci);
3294 CALLER_REMOVE_EMPTY_KW_SPLAT(reg_cfp, calling, ci);
3295 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat);
3296 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
3303 RB_DEBUG_COUNTER_INC(ccf_ivar);
3305 VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE);
3312 RB_DEBUG_COUNTER_INC(ccf_attrset);
3313 VALUE val = *(cfp->sp - 1);
3315 attr_index_t index = vm_cc_attr_index(cc);
3316 shape_id_t dest_shape_id = vm_cc_attr_index_dest_shape_id(cc);
3317 ID id = vm_cc_cme(cc)->def->body.attr.id;
3319 VALUE res = vm_setivar(obj,
id, val, dest_shape_id, index);
3328 res = vm_setivar_default(obj,
id, val, dest_shape_id, index);
3329 if (!UNDEF_P(res)) {
3334 res = vm_setivar_slowpath_attr(obj,
id, val, cc);
3342 return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
3346rb_vm_call_ivar_attrset_p(
const vm_call_handler ch)
3348 return (ch == vm_call_ivar || ch == vm_call_attrset);
3358 VALUE procv = cme->def->body.bmethod.proc;
3361 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
3366 GetProcPtr(procv, proc);
3367 val = rb_vm_invoke_bmethod(ec, proc, calling->recv, calling->argc, argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
3375 RB_DEBUG_COUNTER_INC(ccf_bmethod);
3381 CALLER_SETUP_ARG(cfp, calling, ci);
3382 argc = calling->argc;
3385 cfp->sp += - argc - 1;
3387 return vm_call_bmethod_body(ec, calling, argv);
3390MJIT_FUNC_EXPORTED
VALUE
3391rb_find_defined_class_by_owner(
VALUE current_class,
VALUE target_owner)
3393 VALUE klass = current_class;
3401 while (
RTEST(klass)) {
3403 if (owner == target_owner) {
3409 return current_class;
3418 if (orig_me->defined_class == 0) {
3419 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
3421 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
3423 if (me->def->reference_count == 1) {
3424 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
3428 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
3436 VM_ASSERT(callable_method_entry_p(cme));
3443 return aliased_callable_method_entry(me);
3449 calling->cc = &VM_CC_ON_STACK(
Qundef,
3452 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
3454 return vm_call_method_each_type(ec, cfp, calling);
3457static enum method_missing_reason
3460 enum method_missing_reason stat = MISSING_NOENTRY;
3461 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
3462 if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
3463 if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
3473 ASSUME(calling->argc >= 0);
3476 enum method_missing_reason missing_reason = MISSING_NOENTRY;
3477 int argc = calling->argc;
3478 VALUE recv = calling->recv;
3481 flags |= VM_CALL_OPT_SEND | (calling->kw_splat ? VM_CALL_KW_SPLAT : 0);
3483 if (UNLIKELY(! mid)) {
3484 mid = idMethodMissing;
3485 missing_reason = ci_missing_reason(ci);
3486 ec->method_missing_reason = missing_reason;
3502 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
3505 argc = ++calling->argc;
3507 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
3510 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
3511 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
3512 VALUE exc = rb_make_no_method_exception(
3522 calling->ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci));
3523 calling->cc = &VM_CC_ON_STACK(klass,
3525 { .method_missing_reason = missing_reason },
3526 rb_callable_method_entry_with_refinements(klass, mid, NULL));
3528 if (flags & VM_CALL_FCALL) {
3529 return vm_call_method(ec, reg_cfp, calling);
3533 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
3535 if (vm_cc_cme(cc) != NULL) {
3536 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
3537 case METHOD_VISI_PUBLIC:
3538 return vm_call_method_each_type(ec, reg_cfp, calling);
3539 case METHOD_VISI_PRIVATE:
3540 vm_cc_method_missing_reason_set(cc, MISSING_PRIVATE);
3542 case METHOD_VISI_PROTECTED:
3543 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
3546 VM_UNREACHABLE(vm_call_method);
3548 return vm_call_method_missing(ec, reg_cfp, calling);
3551 return vm_call_method_nome(ec, reg_cfp, calling);
3557 RB_DEBUG_COUNTER_INC(ccf_opt_send);
3562 CALLER_SETUP_ARG(reg_cfp, calling, calling->ci);
3564 i = calling->argc - 1;
3566 if (calling->argc == 0) {
3591 return vm_call_symbol(ec, reg_cfp, calling, calling->ci, sym, VM_CALL_FCALL);
3597 const struct rb_callinfo *orig_ci,
enum method_missing_reason reason)
3599 RB_DEBUG_COUNTER_INC(ccf_method_missing);
3601 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
3604 CALLER_SETUP_ARG(reg_cfp, calling, orig_ci);
3605 argc = calling->argc + 1;
3607 unsigned int flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | (calling->kw_splat ? VM_CALL_KW_SPLAT : 0);
3608 calling->argc = argc;
3611 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
3612 vm_check_canary(ec, reg_cfp->sp);
3616 argv[0] =
ID2SYM(vm_ci_mid(orig_ci));
3619 ec->method_missing_reason = reason;
3620 calling->ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci));
3621 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }},
3622 rb_callable_method_entry_without_refinements(
CLASS_OF(calling->recv), idMethodMissing, NULL));
3623 return vm_call_method(ec, reg_cfp, calling);
3629 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->ci, vm_cc_cmethod_missing_reason(calling->cc));
3640 return vm_call_method_nome(ec, cfp, calling);
3642 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
3643 cme->def->body.refined.orig_me) {
3644 cme = refined_method_callable_without_refinement(cme);
3647 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, cme);
3649 return vm_call_method_each_type(ec, cfp, calling);
3653find_refinement(
VALUE refinements,
VALUE klass)
3655 if (
NIL_P(refinements)) {
3658 return rb_hash_lookup(refinements, klass);
3667 if (cfp->iseq && ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_BLOCK) {
3668 const rb_iseq_t *local_iseq = ISEQ_BODY(cfp->iseq)->local_iseq;
3671 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
3672 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
3676 }
while (cfp->iseq != local_iseq);
3687 if (orig_me->defined_class == 0) {
3695 VM_ASSERT(callable_method_entry_p(cme));
3697 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
3707 ID mid = vm_ci_mid(calling->ci);
3708 const rb_cref_t *cref = vm_get_cref(cfp->ep);
3712 for (; cref; cref = CREF_NEXT(cref)) {
3713 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
3714 if (
NIL_P(refinement))
continue;
3717 rb_callable_method_entry(refinement, mid);
3720 if (vm_cc_call(cc) == vm_call_super_method) {
3723 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
3728 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
3729 cme->def != ref_me->def) {
3732 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
3741 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
3742 return refined_method_callable_without_refinement(vm_cc_cme(cc));
3755 search_refined_method(ec, cfp, calling));
3757 if (vm_cc_cme(ref_cc)) {
3758 calling->cc= ref_cc;
3759 return vm_call_method(ec, cfp, calling);
3762 return vm_call_method_nome(ec, cfp, calling);
3768NOINLINE(
static VALUE
3776 int argc = calling->argc;
3779 if (argc > 0)
MEMMOVE(&TOPN(argc), &TOPN(argc-1),
VALUE, argc);
3782 return vm_invoke_block(ec, reg_cfp, calling, ci,
false, block_handler);
3788 RB_DEBUG_COUNTER_INC(ccf_opt_call);
3791 VALUE procval = calling->recv;
3792 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
3798 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
3800 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
3803 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
3804 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
3807 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
3808 calling->cc = rb_vm_search_method_slowpath(ci,
CLASS_OF(calling->recv));
3809 return vm_call_general(ec, reg_cfp, calling);
3816 VALUE recv = calling->recv;
3819 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
3820 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
3822 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
3823 return internal_RSTRUCT_GET(recv, off);
3829 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
3831 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
3839 VALUE recv = calling->recv;
3842 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
3843 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
3847 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
3848 internal_RSTRUCT_SET(recv, off, val);
3856 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
3858 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
3870 switch (vm_cc_cme(cc)->def->body.optimized.type) {
3871 case OPTIMIZED_METHOD_TYPE_SEND:
3872 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
3873 return vm_call_opt_send(ec, cfp, calling);
3874 case OPTIMIZED_METHOD_TYPE_CALL:
3875 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
3876 return vm_call_opt_call(ec, cfp, calling);
3877 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
3878 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
3879 return vm_call_opt_block_call(ec, cfp, calling);
3880 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF:
3881 CALLER_SETUP_ARG(cfp, calling, ci);
3882 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3884 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE));
3885 return vm_call_opt_struct_aref(ec, cfp, calling);
3887 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET:
3888 CALLER_SETUP_ARG(cfp, calling, ci);
3889 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3891 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE));
3892 return vm_call_opt_struct_aset(ec, cfp, calling);
3894 rb_bug(
"vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
3898#define VM_CALL_METHOD_ATTR(var, func, nohook) \
3899 if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
3900 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
3901 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
3903 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
3904 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
3919 switch (cme->def->type) {
3920 case VM_METHOD_TYPE_ISEQ:
3921 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
3922 return vm_call_iseq_setup(ec, cfp, calling);
3924 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3925 case VM_METHOD_TYPE_CFUNC:
3926 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
3927 return vm_call_cfunc(ec, cfp, calling);
3929 case VM_METHOD_TYPE_ATTRSET:
3930 CALLER_SETUP_ARG(cfp, calling, ci);
3931 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3935 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG);
3937 if (vm_cc_markable(cc)) {
3938 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
3939 VM_CALL_METHOD_ATTR(v,
3940 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
3941 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
3947 VM_CALLCACHE_UNMARKABLE |
3948 ((
VALUE)INVALID_SHAPE_ID << SHAPE_FLAG_SHIFT) |
3949 VM_CALLCACHE_ON_STACK,
3955 .value = INVALID_SHAPE_ID << SHAPE_FLAG_SHIFT,
3960 VM_CALL_METHOD_ATTR(v,
3961 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
3962 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
3966 case VM_METHOD_TYPE_IVAR:
3967 CALLER_SETUP_ARG(cfp, calling, ci);
3968 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3970 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
3971 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT);
3972 VM_CALL_METHOD_ATTR(v,
3973 vm_call_ivar(ec, cfp, calling),
3974 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
3977 case VM_METHOD_TYPE_MISSING:
3978 vm_cc_method_missing_reason_set(cc, 0);
3979 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
3980 return vm_call_method_missing(ec, cfp, calling);
3982 case VM_METHOD_TYPE_BMETHOD:
3983 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
3984 return vm_call_bmethod(ec, cfp, calling);
3986 case VM_METHOD_TYPE_ALIAS:
3987 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
3988 return vm_call_alias(ec, cfp, calling);
3990 case VM_METHOD_TYPE_OPTIMIZED:
3991 return vm_call_optimized(ec, cfp, calling, ci, cc);
3993 case VM_METHOD_TYPE_UNDEF:
3996 case VM_METHOD_TYPE_ZSUPER:
3997 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
3999 case VM_METHOD_TYPE_REFINED:
4002 return vm_call_refined(ec, cfp, calling);
4005 rb_bug(
"vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
4015 const int stat = ci_missing_reason(ci);
4017 if (vm_ci_mid(ci) == idMethodMissing) {
4019 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4020 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
4023 return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
4035 VALUE defined_class = me->defined_class;
4036 VALUE refined_class = RCLASS_REFINED_CLASS(defined_class);
4037 return NIL_P(refined_class) ? defined_class : refined_class;
4046 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4048 if (vm_cc_cme(cc) != NULL) {
4049 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4050 case METHOD_VISI_PUBLIC:
4051 return vm_call_method_each_type(ec, cfp, calling);
4053 case METHOD_VISI_PRIVATE:
4054 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
4055 enum method_missing_reason stat = MISSING_PRIVATE;
4056 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4058 vm_cc_method_missing_reason_set(cc, stat);
4059 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4060 return vm_call_method_missing(ec, cfp, calling);
4062 return vm_call_method_each_type(ec, cfp, calling);
4064 case METHOD_VISI_PROTECTED:
4065 if (!(vm_ci_flag(ci) & (VM_CALL_OPT_SEND | VM_CALL_FCALL))) {
4066 VALUE defined_class = vm_defined_class_for_protected_call(vm_cc_cme(cc));
4068 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4069 return vm_call_method_missing(ec, cfp, calling);
4073 VM_ASSERT(vm_cc_cme(cc) != NULL);
4076 calling->cc = &cc_on_stack;
4077 return vm_call_method_each_type(ec, cfp, calling);
4080 return vm_call_method_each_type(ec, cfp, calling);
4087 return vm_call_method_nome(ec, cfp, calling);
4094 RB_DEBUG_COUNTER_INC(ccf_general);
4095 return vm_call_method(ec, reg_cfp, calling);
4101 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
4102 VM_ASSERT(cc != vm_cc_empty());
4104 *(vm_call_handler *)&cc->call_ = vm_call_general;
4110 RB_DEBUG_COUNTER_INC(ccf_super_method);
4115 if (ec == NULL)
rb_bug(
"unreachable");
4118 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
4119 return vm_call_method(ec, reg_cfp, calling);
4125vm_search_normal_superclass(
VALUE klass)
4130 klass =
RBASIC(klass)->klass;
4132 klass = RCLASS_ORIGIN(klass);
4136NORETURN(
static void vm_super_outside(
void));
4139vm_super_outside(
void)
4145empty_cc_for_super(
void)
4148 return rb_vm_empty_cc_for_super();
4150 return &vm_empty_cc_for_super;
4157 VALUE current_defined_class;
4164 current_defined_class = vm_defined_class_for_protected_call(me);
4167 reg_cfp->iseq != method_entry_iseqptr(me) &&
4170 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
4174 "self has wrong type to call super in this context: "
4175 "%"PRIsVALUE
" (expected %"PRIsVALUE
")",
4180 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
4182 "implicit argument passing of super from method defined"
4183 " by define_method() is not supported."
4184 " Specify all arguments explicitly.");
4187 ID mid = me->def->original_id;
4190 cd->ci = vm_ci_new_runtime(mid,
4193 vm_ci_kwarg(cd->ci));
4199 VALUE klass = vm_search_normal_superclass(me->defined_class);
4203 cc = vm_cc_new(klass, NULL, vm_call_method_missing);
4207 cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd, klass);
4211 if (cached_cme == NULL) {
4213 cd->cc = empty_cc_for_super();
4215 else if (cached_cme->called_id != mid) {
4218 cc = vm_cc_new(klass, cme, vm_call_super_method);
4222 cd->cc = cc = empty_cc_for_super();
4226 switch (cached_cme->def->type) {
4228 case VM_METHOD_TYPE_REFINED:
4230 case VM_METHOD_TYPE_ATTRSET:
4231 case VM_METHOD_TYPE_IVAR:
4232 vm_cc_call_set(cc, vm_call_super_method);
4240 VM_ASSERT((vm_cc_cme(cc),
true));
4248block_proc_is_lambda(
const VALUE procval)
4253 GetProcPtr(procval, proc);
4254 return proc->is_lambda;
4264 VALUE self,
int argc,
const VALUE *argv,
int kw_splat,
VALUE block_handler,
4267 int is_lambda = FALSE;
4268 VALUE val, arg, blockarg;
4270 const struct vm_ifunc *ifunc = captured->code.ifunc;
4275 else if (argc == 0) {
4282 blockarg = rb_vm_bh_to_procval(ec, block_handler);
4284 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
4286 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
4289 vm_push_frame(ec, (
const rb_iseq_t *)captured->code.ifunc,
4292 VM_GUARDED_PREV_EP(captured->ep),
4294 0, ec->cfp->sp, 0, 0);
4295 val = (*ifunc->func)(arg, (
VALUE)ifunc->data, argc, argv, blockarg);
4296 rb_vm_pop_frame(ec);
4304 return rb_sym_proc_call(
SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
4313 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
4315 for (i=0; i<len && i<ISEQ_BODY(iseq)->param.lead_num; i++) {
4323vm_callee_setup_block_arg_arg0_check(
VALUE *argv)
4325 VALUE ary, arg0 = argv[0];
4326 ary = rb_check_array_type(arg0);
4330 VM_ASSERT(argv[0] == arg0);
4338 if (rb_simple_iseq_p(iseq)) {
4342 CALLER_SETUP_ARG(cfp, calling, ci);
4343 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
4345 if (arg_setup_type == arg_setup_block &&
4346 calling->argc == 1 &&
4347 ISEQ_BODY(iseq)->param.flags.has_lead &&
4348 !ISEQ_BODY(iseq)->param.flags.ambiguous_param0 &&
4349 !
NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
4350 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
4353 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
4354 if (arg_setup_type == arg_setup_block) {
4355 if (calling->argc < ISEQ_BODY(iseq)->param.lead_num) {
4357 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
4358 for (i=calling->argc; i<ISEQ_BODY(iseq)->param.lead_num; i++) argv[i] =
Qnil;
4359 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
4361 else if (calling->argc > ISEQ_BODY(iseq)->param.lead_num) {
4362 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
4366 argument_arity_error(ec, iseq, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
4373 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
4382 calling = &calling_entry;
4383 calling->argc = argc;
4384 calling->block_handler = block_handler;
4385 calling->kw_splat = kw_splat;
4387 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, (kw_splat ? VM_CALL_KW_SPLAT : 0), 0, 0);
4389 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
4397 bool is_lambda,
VALUE block_handler)
4400 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4401 const int arg_size = ISEQ_BODY(iseq)->param.size;
4402 VALUE *
const rsp = GET_SP() - calling->argc;
4403 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, rsp, is_lambda ? arg_setup_method : arg_setup_block);
4407 vm_push_frame(ec, iseq,
4408 VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0),
4410 VM_GUARDED_PREV_EP(captured->ep), 0,
4411 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
4413 ISEQ_BODY(iseq)->local_table_size - arg_size, ISEQ_BODY(iseq)->stack_max);
4421 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
4423 if (calling->argc < 1) {
4427 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
4428 CALLER_SETUP_ARG(reg_cfp, calling, ci);
4429 calling->recv = TOPN(--calling->argc);
4430 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol, 0);
4437 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
4442 CALLER_SETUP_ARG(ec->cfp, calling, ci);
4443 CALLER_REMOVE_EMPTY_KW_SPLAT(ec->cfp, calling, ci);
4444 argc = calling->argc;
4445 val = vm_yield_with_cfunc(ec, captured, captured->self, argc, STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
4451vm_proc_to_block_handler(
VALUE procval)
4453 const struct rb_block *block = vm_proc_block(procval);
4455 switch (vm_block_type(block)) {
4456 case block_type_iseq:
4457 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
4458 case block_type_ifunc:
4459 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
4460 case block_type_symbol:
4461 return VM_BH_FROM_SYMBOL(block->as.symbol);
4462 case block_type_proc:
4463 return VM_BH_FROM_PROC(block->as.proc);
4465 VM_UNREACHABLE(vm_yield_with_proc);
4472 bool is_lambda,
VALUE block_handler)
4474 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
4475 VALUE proc = VM_BH_TO_PROC(block_handler);
4476 is_lambda = block_proc_is_lambda(proc);
4477 block_handler = vm_proc_to_block_handler(proc);
4480 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
4486 bool is_lambda,
VALUE block_handler)
4490 bool is_lambda,
VALUE block_handler);
4492 switch (vm_block_handler_type(block_handler)) {
4493 case block_handler_type_iseq: func = vm_invoke_iseq_block;
break;
4494 case block_handler_type_ifunc: func = vm_invoke_ifunc_block;
break;
4495 case block_handler_type_proc: func = vm_invoke_proc_block;
break;
4496 case block_handler_type_symbol: func = vm_invoke_symbol_block;
break;
4497 default:
rb_bug(
"vm_invoke_block: unreachable");
4500 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
4504vm_make_proc_with_iseq(
const rb_iseq_t *blockiseq)
4511 rb_bug(
"vm_make_proc_with_iseq: unreachable");
4514 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
4515 captured->code.iseq = blockiseq;
4517 return rb_vm_make_proc(ec, captured,
rb_cProc);
4521vm_once_exec(
VALUE iseq)
4528vm_once_clear(
VALUE data)
4531 is->once.running_thread = NULL;
4543 args[0] = obj; args[1] =
Qfalse;
4545 if (!UNDEF_P(r) &&
RTEST(r)) {
4557 enum defined_type
type = (
enum defined_type)op_type;
4564 return rb_gvar_defined(
SYM2ID(obj));
4566 case DEFINED_CVAR: {
4567 const rb_cref_t *cref = vm_get_cref(GET_EP());
4568 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
4573 case DEFINED_CONST_FROM: {
4574 bool allow_nil =
type == DEFINED_CONST;
4576 return vm_get_ev_const(ec, klass,
SYM2ID(obj), allow_nil,
true);
4581 return rb_ec_obj_respond_to(ec, v,
SYM2ID(obj), TRUE);
4583 case DEFINED_METHOD:{
4588 switch (METHOD_ENTRY_VISI(me)) {
4589 case METHOD_VISI_PRIVATE:
4591 case METHOD_VISI_PROTECTED:
4595 case METHOD_VISI_PUBLIC:
4599 rb_bug(
"vm_defined: unreachable: %u", (
unsigned int)METHOD_ENTRY_VISI(me));
4603 return check_respond_to_missing(obj, v);
4608 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
4612 case DEFINED_ZSUPER:
4617 VALUE klass = vm_search_normal_superclass(me->defined_class);
4618 ID id = me->def->original_id;
4629 rb_bug(
"unimplemented defined? type (VM)");
4639 return vm_defined(ec, reg_cfp, op_type, obj, v);
4643vm_get_ep(
const VALUE *
const reg_ep, rb_num_t lv)
4646 const VALUE *ep = reg_ep;
4647 for (i = 0; i < lv; i++) {
4648 ep = GET_PREV_EP(ep);
4654vm_get_special_object(
const VALUE *
const reg_ep,
4655 enum vm_special_object_type
type)
4658 case VM_SPECIAL_OBJECT_VMCORE:
4659 return rb_mRubyVMFrozenCore;
4660 case VM_SPECIAL_OBJECT_CBASE:
4661 return vm_get_cbase(reg_ep);
4662 case VM_SPECIAL_OBJECT_CONST_BASE:
4663 return vm_get_const_base(reg_ep);
4665 rb_bug(
"putspecialobject insn: unknown value_type %d",
type);
4672 const VALUE ary2 = ary2st;
4673 VALUE tmp1 = rb_check_to_array(ary1);
4674 VALUE tmp2 = rb_check_to_array(ary2);
4685 tmp1 = rb_ary_dup(ary1);
4687 return rb_ary_concat(tmp1, tmp2);
4695 return vm_concat_array(ary1, ary2st);
4701 VALUE tmp = rb_check_to_array(ary);
4705 else if (
RTEST(flag)) {
4706 return rb_ary_dup(tmp);
4718 return vm_splat_array(flag, ary);
4724 enum vm_check_match_type
type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
4726 if (flag & VM_CHECKMATCH_ARRAY) {
4730 for (i = 0; i < n; i++) {
4732 VALUE c = check_match(ec, v, target,
type);
4741 return check_match(ec, pattern, target,
type);
4746vm_check_keyword(lindex_t bits, lindex_t idx,
const VALUE *ep)
4748 const VALUE kw_bits = *(ep - bits);
4751 unsigned int b = (
unsigned int)
FIX2ULONG(kw_bits);
4752 if ((idx < KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
4765 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
4766 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
4767 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
4768 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
4772 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
4775 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
4778 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
4781 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
4788vm_const_get_under(
ID id, rb_num_t flags,
VALUE cbase)
4793 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
4794 return rb_public_const_get_at(cbase,
id);
4802vm_check_if_class(
ID id, rb_num_t flags,
VALUE super,
VALUE klass)
4807 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
4812 "superclass mismatch for class %"PRIsVALUE
"",
4825vm_check_if_module(
ID id,
VALUE mod)
4844vm_declare_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
4847 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
4855vm_declare_module(
ID id,
VALUE cbase)
4861NORETURN(
static void unmatched_redefinition(
const char *
type,
VALUE cbase,
ID id,
VALUE old));
4865 VALUE name = rb_id2str(
id);
4868 VALUE location = rb_const_source_location_at(cbase,
id);
4869 if (!
NIL_P(location)) {
4870 rb_str_catf(message,
"\n%"PRIsVALUE
":%"PRIsVALUE
":"
4871 " previous definition of %"PRIsVALUE
" was here",
4872 rb_ary_entry(location, 0), rb_ary_entry(location, 1), name);
4878vm_define_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
4882 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !
RB_TYPE_P(super,
T_CLASS)) {
4884 "superclass must be an instance of Class (given an instance of %"PRIsVALUE
")",
4888 vm_check_if_namespace(cbase);
4892 if ((klass = vm_const_get_under(
id, flags, cbase)) != 0) {
4893 if (!vm_check_if_class(
id, flags, super, klass))
4894 unmatched_redefinition(
"class", cbase,
id, klass);
4898 return vm_declare_class(
id, flags, cbase, super);
4903vm_define_module(
ID id, rb_num_t flags,
VALUE cbase)
4907 vm_check_if_namespace(cbase);
4908 if ((mod = vm_const_get_under(
id, flags, cbase)) != 0) {
4909 if (!vm_check_if_module(
id, mod))
4910 unmatched_redefinition(
"module", cbase,
id, mod);
4914 return vm_declare_module(
id, cbase);
4919vm_find_or_create_class_by_id(
ID id,
4924 rb_vm_defineclass_type_t
type = VM_DEFINECLASS_TYPE(flags);
4927 case VM_DEFINECLASS_TYPE_CLASS:
4929 return vm_define_class(
id, flags, cbase, super);
4931 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
4935 case VM_DEFINECLASS_TYPE_MODULE:
4937 return vm_define_module(
id, flags, cbase);
4940 rb_bug(
"unknown defineclass type: %d", (
int)
type);
4944static rb_method_visibility_t
4949 if (!vm_env_cref_by_cref(cfp->ep)) {
4950 return METHOD_VISI_PUBLIC;
4953 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
4962 if (!vm_env_cref_by_cref(cfp->ep)) {
4966 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
4974 rb_method_visibility_t visi;
4979 visi = METHOD_VISI_PUBLIC;
4982 klass = CREF_CLASS_FOR_DEFINITION(cref);
4983 visi = vm_scope_visibility_get(ec);
4990 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, visi);
4994 RCLASS_EXT(klass)->max_iv_count = rb_estimate_iv_count(klass, (
const rb_iseq_t *)iseqval);
4997 if (!is_singleton && vm_scope_module_func_check(ec)) {
4999 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
5009 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
5011 if (block_handler == VM_BLOCK_HANDLER_NONE) {
5012 rb_vm_localjump_error(
"no block given (yield)",
Qnil, 0);
5015 return vm_invoke_block(ec, GET_CFP(), calling, ci,
false, block_handler);
5023 return vm_search_method((
VALUE)reg_cfp->iseq, cd, recv);
5030 .flags =
T_IMEMO | (imemo_callcache <<
FL_USHIFT) | VM_CALLCACHE_UNMARKABLE,
5033 .call_ = vm_invokeblock_i,
5039# define mexp_search_method vm_search_method_wrap
5040# define mexp_search_super vm_search_super_method
5041# define mexp_search_invokeblock vm_search_invokeblock
5043enum method_explorer_type {
5045 mexp_search_invokeblock,
5059 VALUE block_handler,
5063 enum method_explorer_type method_explorer
5069 int argc = vm_ci_argc(ci);
5070 VALUE recv = TOPN(argc);
5072 .block_handler = block_handler,
5073 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
5081 calling.cc = cc = method_explorer(GET_CFP(), cd, recv);
5082 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5084 switch (method_explorer) {
5085 case mexp_search_method:
5086 calling.cc = cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd,
CLASS_OF(recv));
5087 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5089 case mexp_search_super:
5090 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
5091 calling.ci = cd->ci;
5092 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5094 case mexp_search_invokeblock:
5095 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
5100 if (!UNDEF_P(val)) {
5114 if (ISEQ_BODY(GET_ISEQ())->catch_except_p) {
5115 VM_ENV_FLAGS_SET(GET_EP(), VM_FRAME_FLAG_FINISH);
5116 return vm_exec(ec,
true);
5118 else if (UNDEF_P(val = jit_exec(ec))) {
5119 VM_ENV_FLAGS_SET(GET_EP(), VM_FRAME_FLAG_FINISH);
5120 return vm_exec(ec,
false);
5128 return jit_exec(ec);
5164 if (check_cfunc(vm_cc_cme(cc), rb_mod_to_s)) {
5170 val = rb_mod_to_s(recv);
5176 if (check_cfunc(vm_cc_cme(cc), rb_nil_to_s)) {
5177 return rb_nil_to_s(recv);
5181 if (check_cfunc(vm_cc_cme(cc), rb_true_to_s)) {
5182 return rb_true_to_s(recv);
5186 if (check_cfunc(vm_cc_cme(cc), rb_false_to_s)) {
5187 return rb_false_to_s(recv);
5191 if (check_cfunc(vm_cc_cme(cc), rb_int_to_s)) {
5192 return rb_fix_to_s(recv);
5200vm_opt_str_freeze(
VALUE str,
int bop,
ID id)
5202 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
5216 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
5221 VALUE result = *ptr;
5222 rb_snum_t i = num - 1;
5224 const VALUE v = *++ptr;
5225 if (OPTIMIZED_CMP(v, result) > 0) {
5240 return vm_opt_newarray_max(ec, num, ptr);
5246 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
5251 VALUE result = *ptr;
5252 rb_snum_t i = num - 1;
5254 const VALUE v = *++ptr;
5255 if (OPTIMIZED_CMP(v, result) < 0) {
5270 return vm_opt_newarray_min(ec, num, ptr);
5275#define IMEMO_CONST_CACHE_SHAREABLE IMEMO_FL_USER0
5278vm_track_constant_cache(
ID id,
void *ic)
5280 struct rb_id_table *const_cache = GET_VM()->constant_cache;
5281 VALUE lookup_result;
5284 if (rb_id_table_lookup(const_cache,
id, &lookup_result)) {
5288 ics = st_init_numtable();
5289 rb_id_table_insert(const_cache,
id, (
VALUE)ics);
5292 st_insert(ics, (st_data_t) ic, (st_data_t)
Qtrue);
5300 for (
int i = 0; segments[i]; i++) {
5301 ID id = segments[i];
5302 if (
id == idNULL)
continue;
5303 vm_track_constant_cache(
id, ic);
5313 if ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p()) {
5314 VM_ASSERT(ractor_incidental_shareable_p(flags & IMEMO_CONST_CACHE_SHAREABLE, value));
5316 return (ic_cref == NULL ||
5317 ic_cref == vm_get_cref(reg_ep));
5325 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
5326 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, reg_ep);
5331rb_vm_ic_hit_p(
IC ic,
const VALUE *reg_ep)
5333 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
5339 if (ruby_vm_const_missing_count > 0) {
5340 ruby_vm_const_missing_count = 0;
5347 ice->ic_cref = vm_get_const_key_cref(reg_ep);
5352 unsigned pos = (unsigned)(pc - ISEQ_BODY(iseq)->iseq_encoded);
5353 rb_yjit_constant_ic_update(iseq, ic, pos);
5354 rb_mjit_constant_ic_update(iseq, ic, pos);
5364 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
5365 return is->once.value;
5367 else if (is->once.running_thread == NULL) {
5369 is->once.running_thread = th;
5373 is->once.running_thread = RUNNING_THREAD_ONCE_DONE;
5376 else if (is->once.running_thread == th) {
5378 return vm_once_exec((
VALUE)iseq);
5382 RUBY_VM_CHECK_INTS(ec);
5389vm_case_dispatch(CDHASH hash, OFFSET else_offset,
VALUE key)
5391 switch (OBJ_BUILTIN_TYPE(key)) {
5397 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
5398 SYMBOL_REDEFINED_OP_FLAG |
5399 INTEGER_REDEFINED_OP_FLAG |
5400 FLOAT_REDEFINED_OP_FLAG |
5401 NIL_REDEFINED_OP_FLAG |
5402 TRUE_REDEFINED_OP_FLAG |
5403 FALSE_REDEFINED_OP_FLAG |
5404 STRING_REDEFINED_OP_FLAG)) {
5408 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
5412 if (rb_hash_stlike_lookup(hash, key, &val)) {
5432 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
5433 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
5434 static const char stack_consistency_error[] =
5435 "Stack consistency error (sp: %"PRIdPTRDIFF
", bp: %"PRIdPTRDIFF
")";
5436#if defined RUBY_DEVEL
5442 rb_bug(stack_consistency_error, nsp, nbp);
5449 if (FIXNUM_2_P(recv, obj) &&
5450 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
5451 return rb_fix_plus_fix(recv, obj);
5453 else if (FLONUM_2_P(recv, obj) &&
5454 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
5462 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
5467 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
5468 return rb_str_opt_plus(recv, obj);
5472 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
5473 return rb_ary_plus(recv, obj);
5483 if (FIXNUM_2_P(recv, obj) &&
5484 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
5485 return rb_fix_minus_fix(recv, obj);
5487 else if (FLONUM_2_P(recv, obj) &&
5488 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
5496 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
5507 if (FIXNUM_2_P(recv, obj) &&
5508 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
5509 return rb_fix_mul_fix(recv, obj);
5511 else if (FLONUM_2_P(recv, obj) &&
5512 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
5520 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
5531 if (FIXNUM_2_P(recv, obj) &&
5532 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
5533 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_div_fix(recv, obj);
5535 else if (FLONUM_2_P(recv, obj) &&
5536 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
5537 return rb_flo_div_flo(recv, obj);
5544 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
5545 return rb_flo_div_flo(recv, obj);
5555 if (FIXNUM_2_P(recv, obj) &&
5556 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
5557 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_mod_fix(recv, obj);
5559 else if (FLONUM_2_P(recv, obj) &&
5560 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
5568 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
5579 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not_equal)) {
5580 VALUE val = opt_equality(iseq, recv, obj, cd_eq);
5582 if (!UNDEF_P(val)) {
5583 return RBOOL(!
RTEST(val));
5593 if (FIXNUM_2_P(recv, obj) &&
5594 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
5597 else if (FLONUM_2_P(recv, obj) &&
5598 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
5606 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
5618 if (FIXNUM_2_P(recv, obj) &&
5619 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
5622 else if (FLONUM_2_P(recv, obj) &&
5623 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
5631 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
5643 if (FIXNUM_2_P(recv, obj) &&
5644 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
5647 else if (FLONUM_2_P(recv, obj) &&
5648 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
5656 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
5668 if (FIXNUM_2_P(recv, obj) &&
5669 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
5672 else if (FLONUM_2_P(recv, obj) &&
5673 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
5681 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
5698 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
5707 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
5708 return rb_ary_push(recv, obj);
5725 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
5736 if (FIXNUM_2_P(recv, obj) &&
5737 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
5749 if (FIXNUM_2_P(recv, obj) &&
5750 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
5751 return rb_fix_aref(recv, obj);
5756 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
5758 return rb_ary_entry_internal(recv,
FIX2LONG(obj));
5761 return rb_ary_aref1(recv, obj);
5765 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
5766 return rb_hash_aref(recv, obj);
5780 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
5782 rb_ary_store(recv,
FIX2LONG(obj), set);
5786 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
5787 rb_hash_aset(recv, obj, set);
5799 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG) &&
5800 rb_hash_compare_by_id_p(recv) ==
Qfalse &&
5801 !
FL_TEST(recv, RHASH_PROC_DEFAULT)) {
5802 return rb_hash_aref(recv, key);
5813 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG) &&
5814 rb_hash_compare_by_id_p(recv) ==
Qfalse) {
5815 return rb_hash_aset(recv, key, val);
5823vm_opt_length(
VALUE recv,
int bop)
5829 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
5830 if (bop == BOP_EMPTY_P) {
5838 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
5842 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
5851vm_opt_empty_p(
VALUE recv)
5853 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
5866 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
5869 else if (vm_method_cfunc_is(iseq, cd, recv, rb_false)) {
5885 case RSHIFT(~0UL, 1):
5888 return rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
5906vm_opt_succ(
VALUE recv)
5909 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
5910 return fix_succ(recv);
5916 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
5927 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not)) {
5928 return RBOOL(!
RTEST(recv));
5943 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
5947 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
5965 VALUE self = GET_SELF();
5967 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
5969 if (event & global_hooks->events) {
5972 vm_dtrace(event, ec);
5973 rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
5979 if (local_hooks != NULL) {
5980 if (event & local_hooks->events) {
5983 rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
5995 return check_cfunc(vm_cc_cme(cc), rb_obj_equal);
5996 case BIN(opt_nil_p):
5997 return check_cfunc(vm_cc_cme(cc), rb_false);
5999 return check_cfunc(vm_cc_cme(cc), rb_obj_not);
6005#define VM_TRACE_HOOK(target_event, val) do { \
6006 if ((pc_events & (target_event)) & enabled_flags) { \
6007 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks_ptr, (val)); \
6014 const VALUE *pc = reg_cfp->pc;
6015 rb_event_flag_t enabled_flags = ruby_vm_event_flags & ISEQ_TRACE_EVENTS;
6018 if (enabled_flags == 0 && ruby_vm_event_local_num == 0) {
6024 size_t pos = pc - ISEQ_BODY(iseq)->iseq_encoded;
6027 rb_hook_list_t *
const *local_hooks_ptr = &iseq->aux.exec.local_hooks;
6028 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
6032 const bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
6033 enabled_flags |= iseq_local_events;
6035 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
6037 if (bmethod_frame) {
6039 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
6040 bmethod_local_hooks = me->def->body.bmethod.hooks;
6041 bmethod_local_hooks_ptr = &me->def->body.bmethod.hooks;
6042 if (bmethod_local_hooks) {
6043 bmethod_local_events = bmethod_local_hooks->events;
6048 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
6052 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
6060 else if (ec->trace_arg != NULL) {
6068 rb_event_flag_t bmethod_events = global_events | bmethod_local_events;
6071 ruby_debug_printf(
"vm_trace>>%4d (%4x) - %s:%d %s\n",
6075 (
int)rb_iseq_line_no(iseq, pos),
6078 VM_ASSERT(reg_cfp->pc == pc);
6079 VM_ASSERT(pc_events != 0);
6088 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE,
Qundef);
6089 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH,
Qundef);
6107#if VM_CHECK_MODE > 0
6108NORETURN( NOINLINE( COLDFUNC
6109void rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)));
6112Init_vm_stack_canary(
void)
6115 int n = ruby_fill_random_bytes(&vm_stack_canary,
sizeof vm_stack_canary,
false);
6116 vm_stack_canary |= 0x01;
6118 vm_stack_canary_was_born =
true;
6123MJIT_FUNC_EXPORTED
void
6124rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)
6128 const char *insn = rb_insns_name(i);
6132 rb_bug(
"dead canary found at %s: %s", insn, str);
6137void Init_vm_stack_canary(
void) { }
6169 return (*(rb_invoke_funcptr0_t)funcptr)(ec, self);
6176 return (*(rb_invoke_funcptr1_t)funcptr)(ec, self, argv[0]);
6183 return (*(rb_invoke_funcptr2_t)funcptr)(ec, self, argv[0], argv[1]);
6190 return (*(rb_invoke_funcptr3_t)funcptr)(ec, self, argv[0], argv[1], argv[2]);
6197 return (*(rb_invoke_funcptr4_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3]);
6204 return (*(rb_invoke_funcptr5_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4]);
6211 return (*(rb_invoke_funcptr6_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
6218 return (*(rb_invoke_funcptr7_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
6225 return (*(rb_invoke_funcptr8_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
6231 typedef VALUE (*rb_invoke_funcptr9_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9);
6232 return (*(rb_invoke_funcptr9_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
6238 typedef VALUE (*rb_invoke_funcptr10_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10);
6239 return (*(rb_invoke_funcptr10_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
6245 typedef VALUE (*rb_invoke_funcptr11_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11);
6246 return (*(rb_invoke_funcptr11_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
6252 typedef VALUE (*rb_invoke_funcptr12_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12);
6253 return (*(rb_invoke_funcptr12_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
6259 typedef VALUE (*rb_invoke_funcptr13_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13);
6260 return (*(rb_invoke_funcptr13_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
6266 typedef VALUE (*rb_invoke_funcptr14_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14);
6267 return (*(rb_invoke_funcptr14_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
6273 typedef VALUE (*rb_invoke_funcptr15_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14,
VALUE v15);
6274 return (*(rb_invoke_funcptr15_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
6279static builtin_invoker
6280lookup_builtin_invoker(
int argc)
6282 static const builtin_invoker invokers[] = {
6301 return invokers[argc];
6307 const bool canary_p = ISEQ_BODY(reg_cfp->iseq)->builtin_inline_p;
6308 SETUP_CANARY(canary_p);
6309 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, (rb_insn_func_t)bf->func_ptr);
6310 CHECK_CANARY(canary_p, BIN(invokebuiltin));
6317 return invoke_bf(ec, cfp, bf, argv);
6324 fputs(
"vm_invoke_builtin_delegate: passing -> ", stderr);
6325 for (
int i=0; i<bf->argc; i++) {
6326 ruby_debug_printf(
":%s ",
rb_id2name(ISEQ_BODY(cfp->iseq)->local_table[i+start_index]));
6328 ruby_debug_printf(
"\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc, bf->func_ptr);
6331 if (bf->argc == 0) {
6332 return invoke_bf(ec, cfp, bf, NULL);
6335 const VALUE *argv = cfp->ep - ISEQ_BODY(cfp->iseq)->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
6336 return invoke_bf(ec, cfp, bf, argv);
6346 return cfp->ep[index];
#define RUBY_ASSERT(expr)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_EVENT_END
Encountered an end of a class clause.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
#define RUBY_EVENT_CLASS
Encountered a new class.
#define RUBY_EVENT_LINE
Encountered a new line.
#define RUBY_EVENT_RETURN
Encountered a return statement.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
uint32_t rb_event_flag_t
Represents event(s).
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
VALUE rb_module_new(void)
Creates a new, anonymous module.
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class::inherited.
VALUE rb_define_class_id(ID id, VALUE super)
This is a very badly designed API that creates an anonymous class.
#define TYPE(_)
Old name of rb_type.
#define FL_SINGLETON
Old name of RUBY_FL_SINGLETON.
#define FL_EXIVAR
Old name of RUBY_FL_EXIVAR.
#define REALLOC_N
Old name of RB_REALLOC_N.
#define ALLOC
Old name of RB_ALLOC.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define T_STRING
Old name of RUBY_T_STRING.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define T_NIL
Old name of RUBY_T_NIL.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define T_IMEMO
Old name of RUBY_T_IMEMO.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
#define SYM2ID
Old name of RB_SYM2ID.
#define CLASS_OF
Old name of rb_class_of.
#define rb_ary_new4
Old name of rb_ary_new_from_values.
#define FIXABLE
Old name of RB_FIXABLE.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2INT
Old name of RB_FIX2INT.
#define T_MODULE
Old name of RUBY_T_MODULE.
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
#define FIX2ULONG
Old name of RB_FIX2ULONG.
#define T_TRUE
Old name of RUBY_T_TRUE.
#define T_ICLASS
Old name of RUBY_T_ICLASS.
#define T_HASH
Old name of RUBY_T_HASH.
#define ALLOC_N
Old name of RB_ALLOC_N.
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
#define rb_ary_new3
Old name of rb_ary_new_from_args.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define rb_exc_new3
Old name of rb_exc_new_str.
#define T_FALSE
Old name of RUBY_T_FALSE.
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
#define DBL2NUM
Old name of rb_float_new.
#define T_CLASS
Old name of RUBY_T_CLASS.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define FL_TEST
Old name of RB_FL_TEST.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
void rb_notimplement(void)
void rb_raise(VALUE exc, const char *fmt,...)
Exception entry point.
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
void rb_bug(const char *fmt,...)
Interpreter panic switch.
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eFatal
fatal exception.
VALUE rb_eNoMethodError
NoMethodError exception.
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
VALUE rb_eRuntimeError
RuntimeError exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports always regardless of runtime -W flag.
void rb_error_frozen_object(VALUE frozen_obj)
Identical to rb_error_frozen(), except it takes arbitrary Ruby object instead of C's string.
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
VALUE rb_eArgError
ArgumentError exception.
VALUE rb_cClass
Class class.
VALUE rb_cArray
Array class.
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
VALUE rb_cRegexp
Regexp class.
VALUE rb_obj_frozen_p(VALUE obj)
Just calls RB_OBJ_FROZEN() inside.
VALUE rb_cHash
Hash class.
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
VALUE rb_cBasicObject
BasicObject class.
VALUE rb_cModule
Module class.
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
VALUE rb_cFloat
Float class.
VALUE rb_cProc
Proc class.
VALUE rb_cString
String class.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
#define rb_check_frozen
Just another name of rb_check_frozen.
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
#define rb_check_frozen_internal(obj)
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
VALUE rb_sym_to_s(VALUE sym)
This is an rb_sym2str() + rb_str_dup() combo.
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
#define rb_str_cat_cstr(buf, str)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
VALUE rb_str_intern(VALUE str)
Identical to rb_to_symbol(), except it assumes the receiver being an instance of RString.
void rb_thread_schedule(void)
Tries to switch to another thread.
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
VALUE rb_attr_get(VALUE obj, ID name)
Identical to rb_ivar_get()
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
ID rb_intern(const char *name)
Finds or creates a symbol of the given name.
VALUE rb_sym2str(VALUE id)
Identical to rb_id2str(), except it takes an instance of rb_cSymbol rather than an ID.
const char * rb_id2name(ID id)
Retrieves the name mapped to the given id.
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
VALUE rb_sprintf(const char *fmt,...)
Ruby's extended sprintf(3).
VALUE rb_str_catf(VALUE dst, const char *fmt,...)
Identical to rb_sprintf(), except it renders the output to the specified object rather than creating ...
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
#define ALLOCA_N(type, n)
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
VALUE type(ANYARGS)
ANYARGS-ed function type.
VALUE rb_ensure(type *q, VALUE w, type *e, VALUE r)
An equivalent of ensure clause.
#define RARRAY_LEN
Just another name of rb_array_len.
#define RARRAY_CONST_PTR_TRANSIENT
Just another name of rb_array_const_ptr_transient.
#define RARRAY_AREF(a, i)
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
#define RBASIC(obj)
Convenient casting macro.
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
#define RHASH_SIZE(h)
Queries the size of the hash.
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
static VALUE * ROBJECT_IVPTR(VALUE obj)
Queries the instance variables.
static long RSTRING_LEN(VALUE str)
Queries the length of the string.
static char * RSTRING_PTR(VALUE str)
Queries the contents pointer of the string.
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
#define RB_NO_KEYWORDS
Do not pass keywords.
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
rb_cref_t * cref
class reference, should be marked
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
IFUNC (Internal FUNCtion)
const VALUE cref_or_me
class reference or rb_method_entry_t
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
uintptr_t VALUE
Type that represents a Ruby object.
static enum ruby_value_type RB_BUILTIN_TYPE(VALUE obj)
Queries the type of the object.
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.