Ruby  2.5.0dev(2017-10-22revision60238)
vm_insnhelper.c
Go to the documentation of this file.
1 /**********************************************************************
2 
3  vm_insnhelper.c - instruction helper functions.
4 
5  $Author$
6 
7  Copyright (C) 2007 Koichi Sasada
8 
9 **********************************************************************/
10 
11 /* finish iseq array */
12 #include "insns.inc"
13 #include <math.h>
14 #include "constant.h"
15 #include "internal.h"
16 #include "probes.h"
17 #include "probes_helper.h"
18 #include "ruby/config.h"
19 #include "debug_counter.h"
20 
21 /* control stack frame */
22 
23 static rb_control_frame_t *vm_get_ruby_level_caller_cfp(const rb_thread_t *th, const rb_control_frame_t *cfp);
24 
25 VALUE
27 {
29  rb_obj_copy_ivar(e, exc);
30  return e;
31 }
32 
33 NORETURN(static void threadptr_stack_overflow(rb_thread_t *, int));
34 static void
35 threadptr_stack_overflow(rb_thread_t *th, int setup)
36 {
39  if (setup) {
41  mesg = ruby_vm_special_exception_copy(mesg);
42  rb_ivar_set(mesg, idBt, at);
43  rb_ivar_set(mesg, idBt_locations, at);
44  }
45  th->ec.errinfo = mesg;
47 }
48 
49 static void
50 vm_stackoverflow(void)
51 {
52  threadptr_stack_overflow(GET_THREAD(), TRUE);
53 }
54 
56 void
58 {
59  if (crit || rb_during_gc()) {
63  }
64 #ifdef USE_SIGALTSTACK
65  threadptr_stack_overflow(th, TRUE);
66 #else
67  threadptr_stack_overflow(th, FALSE);
68 #endif
69 }
70 
71 
72 #if VM_CHECK_MODE > 0
73 static int
74 callable_class_p(VALUE klass)
75 {
76 #if VM_CHECK_MODE >= 2
77  if (!klass) return FALSE;
78  switch (RB_BUILTIN_TYPE(klass)) {
79  case T_ICLASS:
80  if (!RB_TYPE_P(RCLASS_SUPER(klass), T_MODULE)) break;
81  case T_MODULE:
82  return TRUE;
83  }
84  while (klass) {
85  if (klass == rb_cBasicObject) {
86  return TRUE;
87  }
88  klass = RCLASS_SUPER(klass);
89  }
90  return FALSE;
91 #else
92  return klass != 0;
93 #endif
94 }
95 
96 static int
97 callable_method_entry_p(const rb_callable_method_entry_t *me)
98 {
99  if (me == NULL || callable_class_p(me->defined_class)) {
100  return TRUE;
101  }
102  else {
103  return FALSE;
104  }
105 }
106 
107 static void
108 vm_check_frame_detail(VALUE type, int req_block, int req_me, int req_cref, VALUE specval, VALUE cref_or_me, int is_cframe, const rb_iseq_t *iseq)
109 {
110  unsigned int magic = (unsigned int)(type & VM_FRAME_MAGIC_MASK);
111  enum imemo_type cref_or_me_type = imemo_env; /* impossible value */
112 
113  if (RB_TYPE_P(cref_or_me, T_IMEMO)) {
114  cref_or_me_type = imemo_type(cref_or_me);
115  }
116  if (type & VM_FRAME_FLAG_BMETHOD) {
117  req_me = TRUE;
118  }
119 
120  if (req_block && (type & VM_ENV_FLAG_LOCAL) == 0) {
121  rb_bug("vm_push_frame: specval (%p) should be a block_ptr on %x frame", (void *)specval, magic);
122  }
123  if (!req_block && (type & VM_ENV_FLAG_LOCAL) != 0) {
124  rb_bug("vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (void *)specval, magic);
125  }
126 
127  if (req_me) {
128  if (cref_or_me_type != imemo_ment) {
129  rb_bug("vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
130  }
131  }
132  else {
133  if (req_cref && cref_or_me_type != imemo_cref) {
134  rb_bug("vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
135  }
136  else { /* cref or Qfalse */
137  if (cref_or_me != Qfalse && cref_or_me_type != imemo_cref) {
138  if (((type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC) && (cref_or_me_type == imemo_ment)) {
139  /* ignore */
140  }
141  else {
142  rb_bug("vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
143  }
144  }
145  }
146  }
147 
148  if (cref_or_me_type == imemo_ment) {
149  const rb_callable_method_entry_t *me = (const rb_callable_method_entry_t *)cref_or_me;
150 
151  if (!callable_method_entry_p(me)) {
152  rb_bug("vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
153  }
154  }
155 
156  if ((type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
157  VM_ASSERT(iseq == NULL ||
158  RUBY_VM_NORMAL_ISEQ_P(iseq) /* argument error. it shold be fixed */);
159  }
160  else {
161  VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
162  }
163 }
164 
165 static void
166 vm_check_frame(VALUE type,
167  VALUE specval,
168  VALUE cref_or_me,
169  const rb_iseq_t *iseq)
170 {
171  VALUE given_magic = type & VM_FRAME_MAGIC_MASK;
172  VM_ASSERT(FIXNUM_P(type));
173 
174 #define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
175  case magic: \
176  vm_check_frame_detail(type, req_block, req_me, req_cref, \
177  specval, cref_or_me, is_cframe, iseq); \
178  break
179  switch (given_magic) {
180  /* BLK ME CREF CFRAME */
190  default:
191  rb_bug("vm_push_frame: unknown type (%x)", (unsigned int)given_magic);
192  }
193 #undef CHECK
194 }
195 #else
196 #define vm_check_frame(a, b, c, d)
197 #endif /* VM_CHECK_MODE > 0 */
198 
199 static inline rb_control_frame_t *
200 vm_push_frame_(rb_execution_context_t *ec,
201  const rb_iseq_t *iseq,
202  VALUE type,
203  VALUE self,
204  VALUE specval,
205  VALUE cref_or_me,
206  const VALUE *pc,
207  VALUE *sp,
208  int local_size,
209  int stack_max)
210 {
211  rb_control_frame_t *const cfp = ec->cfp - 1;
212  int i;
213 
214  vm_check_frame(type, specval, cref_or_me, iseq);
215  VM_ASSERT(local_size >= 0);
216 
217  /* check stack overflow */
218  CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
219 
220  ec->cfp = cfp;
221 
222  /* setup new frame */
223  cfp->pc = (VALUE *)pc;
224  cfp->iseq = (rb_iseq_t *)iseq;
225  cfp->self = self;
226  cfp->block_code = NULL;
227 
228  /* setup vm value stack */
229 
230  /* initialize local variables */
231  for (i=0; i < local_size; i++) {
232  *sp++ = Qnil;
233  }
234 
235  /* setup ep with managing data */
239  *sp++ = cref_or_me; /* ep[-2] / Qnil or T_IMEMO(cref) or T_IMEMO(ment) */
240  *sp++ = specval /* ep[-1] / block handler or prev env ptr */;
241  *sp = type; /* ep[-0] / ENV_FLAGS */
242 
243  cfp->ep = sp;
244  cfp->sp = sp + 1;
245 
246 #if VM_DEBUG_BP_CHECK
247  cfp->bp_check = sp + 1;
248 #endif
249 
250  if (VMDEBUG == 2) {
251  SDR();
252  }
253 
254  return cfp;
255 }
256 
257 static rb_control_frame_t *
258 vm_push_frame(rb_thread_t *th,
259  const rb_iseq_t *iseq,
260  VALUE type,
261  VALUE self,
262  VALUE specval,
263  VALUE cref_or_me,
264  const VALUE *pc,
265  VALUE *sp,
266  int local_size,
267  int stack_max)
268 {
269  return vm_push_frame_(&th->ec, iseq, type, self, specval, cref_or_me, pc, sp, local_size, stack_max);
270 }
271 
274  const rb_iseq_t *iseq,
275  VALUE type,
276  VALUE self,
277  VALUE specval,
278  VALUE cref_or_me,
279  const VALUE *pc,
280  VALUE *sp,
281  int local_size,
282  int stack_max)
283 {
284  return vm_push_frame_(ec, iseq, type, self, specval, cref_or_me, pc, sp, local_size, stack_max);
285 }
286 
287 /* return TRUE if the frame is finished */
288 static inline int
289 vm_pop_frame(rb_thread_t *th, rb_control_frame_t *cfp, const VALUE *ep)
290 {
291  VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
292 
294  if (VMDEBUG == 2) SDR();
295 
297 
298  return flags & VM_FRAME_FLAG_FINISH;
299 }
300 
301 void
303 {
304  vm_pop_frame(th, th->ec.cfp, th->ec.cfp->ep);
305 }
306 
307 /* method dispatch */
308 static inline VALUE
309 rb_arity_error_new(int argc, int min, int max)
310 {
311  VALUE err_mess = 0;
312  if (min == max) {
313  err_mess = rb_sprintf("wrong number of arguments (given %d, expected %d)", argc, min);
314  }
315  else if (max == UNLIMITED_ARGUMENTS) {
316  err_mess = rb_sprintf("wrong number of arguments (given %d, expected %d+)", argc, min);
317  }
318  else {
319  err_mess = rb_sprintf("wrong number of arguments (given %d, expected %d..%d)", argc, min, max);
320  }
321  return rb_exc_new3(rb_eArgError, err_mess);
322 }
323 
324 void
325 rb_error_arity(int argc, int min, int max)
326 {
327  rb_exc_raise(rb_arity_error_new(argc, min, max));
328 }
329 
330 /* lvar */
331 
332 NOINLINE(static void vm_env_write_slowpath(const VALUE *ep, int index, VALUE v));
333 
334 static void
335 vm_env_write_slowpath(const VALUE *ep, int index, VALUE v)
336 {
337  /* remember env value forcely */
338  rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
339  VM_FORCE_WRITE(&ep[index], v);
340  VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
341  RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
342 }
343 
344 static inline void
345 vm_env_write(const VALUE *ep, int index, VALUE v)
346 {
347  VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
348  if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
349  VM_STACK_ENV_WRITE(ep, index, v);
350  }
351  else {
352  vm_env_write_slowpath(ep, index, v);
353  }
354 }
355 
356 void
357 rb_vm_env_write(const VALUE *ep, int index, VALUE v)
358 {
359  vm_env_write(ep, index, v);
360 }
361 
362 
363 /* svar */
364 
365 #if VM_CHECK_MODE > 0
366 static int
367 vm_svar_valid_p(VALUE svar)
368 {
369  if (RB_TYPE_P((VALUE)svar, T_IMEMO)) {
370  switch (imemo_type(svar)) {
371  case imemo_svar:
372  case imemo_cref:
373  case imemo_ment:
374  return TRUE;
375  default:
376  break;
377  }
378  }
379  rb_bug("vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
380  return FALSE;
381 }
382 #endif
383 
384 static inline struct vm_svar *
385 lep_svar(rb_thread_t *th, const VALUE *lep)
386 {
387  VALUE svar;
388 
389  if (lep && (th == NULL || th->ec.root_lep != lep)) {
390  svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
391  }
392  else {
393  svar = th->ec.root_svar;
394  }
395 
396  VM_ASSERT(svar == Qfalse || vm_svar_valid_p(svar));
397 
398  return (struct vm_svar *)svar;
399 }
400 
401 static inline void
402 lep_svar_write(rb_thread_t *th, const VALUE *lep, const struct vm_svar *svar)
403 {
404  VM_ASSERT(vm_svar_valid_p((VALUE)svar));
405 
406  if (lep && (th == NULL || th->ec.root_lep != lep)) {
407  vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (VALUE)svar);
408  }
409  else {
410  RB_OBJ_WRITE(th->self, &th->ec.root_svar, svar);
411  }
412 }
413 
414 static VALUE
415 lep_svar_get(rb_thread_t *th, const VALUE *lep, rb_num_t key)
416 {
417  const struct vm_svar *svar = lep_svar(th, lep);
418 
419  if ((VALUE)svar == Qfalse || imemo_type((VALUE)svar) != imemo_svar) return Qnil;
420 
421  switch (key) {
422  case VM_SVAR_LASTLINE:
423  return svar->lastline;
424  case VM_SVAR_BACKREF:
425  return svar->backref;
426  default: {
427  const VALUE ary = svar->others;
428 
429  if (NIL_P(ary)) {
430  return Qnil;
431  }
432  else {
433  return rb_ary_entry(ary, key - VM_SVAR_EXTRA_START);
434  }
435  }
436  }
437 }
438 
439 static struct vm_svar *
440 svar_new(VALUE obj)
441 {
442  return (struct vm_svar *)rb_imemo_new(imemo_svar, Qnil, Qnil, Qnil, obj);
443 }
444 
445 static void
446 lep_svar_set(rb_thread_t *th, const VALUE *lep, rb_num_t key, VALUE val)
447 {
448  struct vm_svar *svar = lep_svar(th, lep);
449 
450  if ((VALUE)svar == Qfalse || imemo_type((VALUE)svar) != imemo_svar) {
451  lep_svar_write(th, lep, svar = svar_new((VALUE)svar));
452  }
453 
454  switch (key) {
455  case VM_SVAR_LASTLINE:
456  RB_OBJ_WRITE(svar, &svar->lastline, val);
457  return;
458  case VM_SVAR_BACKREF:
459  RB_OBJ_WRITE(svar, &svar->backref, val);
460  return;
461  default: {
462  VALUE ary = svar->others;
463 
464  if (NIL_P(ary)) {
465  RB_OBJ_WRITE(svar, &svar->others, ary = rb_ary_new());
466  }
467  rb_ary_store(ary, key - VM_SVAR_EXTRA_START, val);
468  }
469  }
470 }
471 
472 static inline VALUE
473 vm_getspecial(rb_thread_t *th, const VALUE *lep, rb_num_t key, rb_num_t type)
474 {
475  VALUE val;
476 
477  if (type == 0) {
478  val = lep_svar_get(th, lep, key);
479  }
480  else {
481  VALUE backref = lep_svar_get(th, lep, VM_SVAR_BACKREF);
482 
483  if (type & 0x01) {
484  switch (type >> 1) {
485  case '&':
486  val = rb_reg_last_match(backref);
487  break;
488  case '`':
489  val = rb_reg_match_pre(backref);
490  break;
491  case '\'':
492  val = rb_reg_match_post(backref);
493  break;
494  case '+':
495  val = rb_reg_match_last(backref);
496  break;
497  default:
498  rb_bug("unexpected back-ref");
499  }
500  }
501  else {
502  val = rb_reg_nth_match((int)(type >> 1), backref);
503  }
504  }
505  return val;
506 }
507 
508 PUREFUNC(static rb_callable_method_entry_t *check_method_entry(VALUE obj, int can_be_svar));
510 check_method_entry(VALUE obj, int can_be_svar)
511 {
512  if (obj == Qfalse) return NULL;
513 
514 #if VM_CHECK_MODE > 0
515  if (!RB_TYPE_P(obj, T_IMEMO)) rb_bug("check_method_entry: unknown type: %s", rb_obj_info(obj));
516 #endif
517 
518  switch (imemo_type(obj)) {
519  case imemo_ment:
520  return (rb_callable_method_entry_t *)obj;
521  case imemo_cref:
522  return NULL;
523  case imemo_svar:
524  if (can_be_svar) {
525  return check_method_entry(((struct vm_svar *)obj)->cref_or_me, FALSE);
526  }
527  default:
528 #if VM_CHECK_MODE > 0
529  rb_bug("check_method_entry: svar should not be there:");
530 #endif
531  return NULL;
532  }
533 }
534 
537 {
538  const VALUE *ep = cfp->ep;
540 
541  while (!VM_ENV_LOCAL_P(ep)) {
542  if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL) return me;
543  ep = VM_ENV_PREV_EP(ep);
544  }
545 
546  return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
547 }
548 
549 static rb_cref_t *
550 method_entry_cref(rb_callable_method_entry_t *me)
551 {
552  switch (me->def->type) {
553  case VM_METHOD_TYPE_ISEQ:
554  return me->def->body.iseq.cref;
555  default:
556  return NULL;
557  }
558 }
559 
560 #if VM_CHECK_MODE == 0
561 PUREFUNC(static rb_cref_t *check_cref(VALUE, int));
562 #endif
563 static rb_cref_t *
564 check_cref(VALUE obj, int can_be_svar)
565 {
566  if (obj == Qfalse) return NULL;
567 
568 #if VM_CHECK_MODE > 0
569  if (!RB_TYPE_P(obj, T_IMEMO)) rb_bug("check_cref: unknown type: %s", rb_obj_info(obj));
570 #endif
571 
572  switch (imemo_type(obj)) {
573  case imemo_ment:
574  return method_entry_cref((rb_callable_method_entry_t *)obj);
575  case imemo_cref:
576  return (rb_cref_t *)obj;
577  case imemo_svar:
578  if (can_be_svar) {
579  return check_cref(((struct vm_svar *)obj)->cref_or_me, FALSE);
580  }
581  default:
582 #if VM_CHECK_MODE > 0
583  rb_bug("check_method_entry: svar should not be there:");
584 #endif
585  return NULL;
586  }
587 }
588 
589 static inline rb_cref_t *
590 vm_env_cref(const VALUE *ep)
591 {
592  rb_cref_t *cref;
593 
594  while (!VM_ENV_LOCAL_P(ep)) {
595  if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL) return cref;
596  ep = VM_ENV_PREV_EP(ep);
597  }
598 
599  return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
600 }
601 
602 static int
603 is_cref(const VALUE v, int can_be_svar)
604 {
605  if (RB_TYPE_P(v, T_IMEMO)) {
606  switch (imemo_type(v)) {
607  case imemo_cref:
608  return TRUE;
609  case imemo_svar:
610  if (can_be_svar) return is_cref(((struct vm_svar *)v)->cref_or_me, FALSE);
611  default:
612  break;
613  }
614  }
615  return FALSE;
616 }
617 
618 static int
619 vm_env_cref_by_cref(const VALUE *ep)
620 {
621  while (!VM_ENV_LOCAL_P(ep)) {
622  if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) return TRUE;
623  ep = VM_ENV_PREV_EP(ep);
624  }
625  return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
626 }
627 
628 static rb_cref_t *
629 cref_replace_with_duplicated_cref_each_frame(const VALUE *vptr, int can_be_svar, VALUE parent)
630 {
631  const VALUE v = *vptr;
632  rb_cref_t *cref, *new_cref;
633 
634  if (RB_TYPE_P(v, T_IMEMO)) {
635  switch (imemo_type(v)) {
636  case imemo_cref:
637  cref = (rb_cref_t *)v;
638  new_cref = vm_cref_dup(cref);
639  if (parent) {
640  RB_OBJ_WRITE(parent, vptr, new_cref);
641  }
642  else {
643  VM_FORCE_WRITE(vptr, (VALUE)new_cref);
644  }
645  return (rb_cref_t *)new_cref;
646  case imemo_svar:
647  if (can_be_svar) {
648  return cref_replace_with_duplicated_cref_each_frame((const VALUE *)&((struct vm_svar *)v)->cref_or_me, FALSE, v);
649  }
650  case imemo_ment:
651  rb_bug("cref_replace_with_duplicated_cref_each_frame: unreachable");
652  default:
653  break;
654  }
655  }
656  return FALSE;
657 }
658 
659 static rb_cref_t *
660 vm_cref_replace_with_duplicated_cref(const VALUE *ep)
661 {
662  if (vm_env_cref_by_cref(ep)) {
663  rb_cref_t *cref;
664  VALUE envval;
665 
666  while (!VM_ENV_LOCAL_P(ep)) {
667  envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) : Qfalse;
668  if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
669  return cref;
670  }
671  ep = VM_ENV_PREV_EP(ep);
672  }
673  envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) : Qfalse;
674  return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
675  }
676  else {
677  rb_bug("vm_cref_dup: unreachable");
678  }
679 }
680 
681 
682 static rb_cref_t *
683 rb_vm_get_cref(const VALUE *ep)
684 {
685  rb_cref_t *cref = vm_env_cref(ep);
686 
687  if (cref != NULL) {
688  return cref;
689  }
690  else {
691  rb_bug("rb_vm_get_cref: unreachable");
692  }
693 }
694 
695 static const rb_cref_t *
696 vm_get_const_key_cref(const VALUE *ep)
697 {
698  const rb_cref_t *cref = rb_vm_get_cref(ep);
699  const rb_cref_t *key_cref = cref;
700 
701  while (cref) {
702  if (FL_TEST(CREF_CLASS(cref), FL_SINGLETON)) {
703  return key_cref;
704  }
705  cref = CREF_NEXT(cref);
706  }
707 
708  /* does not include singleton class */
709  return NULL;
710 }
711 
712 void
713 rb_vm_rewrite_cref(rb_cref_t *cref, VALUE old_klass, VALUE new_klass, rb_cref_t **new_cref_ptr)
714 {
715  rb_cref_t *new_cref;
716 
717  while (cref) {
718  if (CREF_CLASS(cref) == old_klass) {
719  new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
720  *new_cref_ptr = new_cref;
721  return;
722  }
723  new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
724  cref = CREF_NEXT(cref);
725  *new_cref_ptr = new_cref;
726  new_cref_ptr = (rb_cref_t **)&new_cref->next;
727  }
728  *new_cref_ptr = NULL;
729 }
730 
731 static rb_cref_t *
732 vm_cref_push(rb_thread_t *th, VALUE klass, const VALUE *ep, int pushed_by_eval)
733 {
734  rb_cref_t *prev_cref = NULL;
735 
736  if (ep) {
737  prev_cref = vm_env_cref(ep);
738  }
739  else {
740  rb_control_frame_t *cfp = vm_get_ruby_level_caller_cfp(th, th->ec.cfp);
741 
742  if (cfp) {
743  prev_cref = vm_env_cref(cfp->ep);
744  }
745  }
746 
747  return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval);
748 }
749 
750 static inline VALUE
751 vm_get_cbase(const VALUE *ep)
752 {
753  const rb_cref_t *cref = rb_vm_get_cref(ep);
754  VALUE klass = Qundef;
755 
756  while (cref) {
757  if ((klass = CREF_CLASS(cref)) != 0) {
758  break;
759  }
760  cref = CREF_NEXT(cref);
761  }
762 
763  return klass;
764 }
765 
766 static inline VALUE
767 vm_get_const_base(const VALUE *ep)
768 {
769  const rb_cref_t *cref = rb_vm_get_cref(ep);
770  VALUE klass = Qundef;
771 
772  while (cref) {
773  if (!CREF_PUSHED_BY_EVAL(cref) &&
774  (klass = CREF_CLASS(cref)) != 0) {
775  break;
776  }
777  cref = CREF_NEXT(cref);
778  }
779 
780  return klass;
781 }
782 
783 static inline void
784 vm_check_if_namespace(VALUE klass)
785 {
786  if (!RB_TYPE_P(klass, T_CLASS) && !RB_TYPE_P(klass, T_MODULE)) {
787  rb_raise(rb_eTypeError, "%+"PRIsVALUE" is not a class/module", klass);
788  }
789 }
790 
791 static inline void
792 vm_ensure_not_refinement_module(VALUE self)
793 {
794  if (RB_TYPE_P(self, T_MODULE) && FL_TEST(self, RMODULE_IS_REFINEMENT)) {
795  rb_warn("not defined at the refinement, but at the outer class/module");
796  }
797 }
798 
799 static inline VALUE
800 vm_get_iclass(rb_control_frame_t *cfp, VALUE klass)
801 {
802  return klass;
803 }
804 
805 static inline VALUE
806 vm_get_ev_const(rb_thread_t *th, VALUE orig_klass, ID id, int is_defined)
807 {
808  void rb_const_warn_if_deprecated(const rb_const_entry_t *ce, VALUE klass, ID id);
809  VALUE val;
810 
811  if (orig_klass == Qnil) {
812  /* in current lexical scope */
813  const rb_cref_t *root_cref = rb_vm_get_cref(th->ec.cfp->ep);
814  const rb_cref_t *cref;
815  VALUE klass = Qnil;
816 
817  while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
818  root_cref = CREF_NEXT(root_cref);
819  }
820  cref = root_cref;
821  while (cref && CREF_NEXT(cref)) {
822  if (CREF_PUSHED_BY_EVAL(cref)) {
823  klass = Qnil;
824  }
825  else {
826  klass = CREF_CLASS(cref);
827  }
828  cref = CREF_NEXT(cref);
829 
830  if (!NIL_P(klass)) {
831  VALUE av, am = 0;
832  rb_const_entry_t *ce;
833  search_continue:
834  if ((ce = rb_const_lookup(klass, id))) {
835  rb_const_warn_if_deprecated(ce, klass, id);
836  val = ce->value;
837  if (val == Qundef) {
838  if (am == klass) break;
839  am = klass;
840  if (is_defined) return 1;
841  if (rb_autoloading_value(klass, id, &av)) return av;
842  rb_autoload_load(klass, id);
843  goto search_continue;
844  }
845  else {
846  if (is_defined) {
847  return 1;
848  }
849  else {
850  return val;
851  }
852  }
853  }
854  }
855  }
856 
857  /* search self */
858  if (root_cref && !NIL_P(CREF_CLASS(root_cref))) {
859  klass = vm_get_iclass(th->ec.cfp, CREF_CLASS(root_cref));
860  }
861  else {
862  klass = CLASS_OF(th->ec.cfp->self);
863  }
864 
865  if (is_defined) {
866  return rb_const_defined(klass, id);
867  }
868  else {
869  return rb_const_get(klass, id);
870  }
871  }
872  else {
873  vm_check_if_namespace(orig_klass);
874  if (is_defined) {
875  return rb_public_const_defined_from(orig_klass, id);
876  }
877  else {
878  return rb_public_const_get_from(orig_klass, id);
879  }
880  }
881 }
882 
883 static inline VALUE
884 vm_get_cvar_base(const rb_cref_t *cref, rb_control_frame_t *cfp)
885 {
886  VALUE klass;
887 
888  if (!cref) {
889  rb_bug("vm_get_cvar_base: no cref");
890  }
891 
892  while (CREF_NEXT(cref) &&
893  (NIL_P(CREF_CLASS(cref)) || FL_TEST(CREF_CLASS(cref), FL_SINGLETON) ||
894  CREF_PUSHED_BY_EVAL(cref))) {
895  cref = CREF_NEXT(cref);
896  }
897  if (!CREF_NEXT(cref)) {
898  rb_warn("class variable access from toplevel");
899  }
900 
901  klass = vm_get_iclass(cfp, CREF_CLASS(cref));
902 
903  if (NIL_P(klass)) {
904  rb_raise(rb_eTypeError, "no class variables available");
905  }
906  return klass;
907 }
908 
909 static VALUE
910 vm_search_const_defined_class(const VALUE cbase, ID id)
911 {
912  if (rb_const_defined_at(cbase, id)) return cbase;
913  if (cbase == rb_cObject) {
914  VALUE tmp = RCLASS_SUPER(cbase);
915  while (tmp) {
916  if (rb_const_defined_at(tmp, id)) return tmp;
917  tmp = RCLASS_SUPER(tmp);
918  }
919  }
920  return 0;
921 }
922 
923 #ifndef USE_IC_FOR_IVAR
924 #define USE_IC_FOR_IVAR 1
925 #endif
926 
927 ALWAYS_INLINE(static VALUE vm_getivar(VALUE, ID, IC, struct rb_call_cache *, int));
928 static inline VALUE
929 vm_getivar(VALUE obj, ID id, IC ic, struct rb_call_cache *cc, int is_attr)
930 {
931 #if USE_IC_FOR_IVAR
932  if (LIKELY(RB_TYPE_P(obj, T_OBJECT))) {
933  VALUE val = Qundef;
934  if (LIKELY(is_attr ?
935  RB_DEBUG_COUNTER_INC_UNLESS(ivar_get_ic_miss_unset, cc->aux.index > 0) :
936  RB_DEBUG_COUNTER_INC_UNLESS(ivar_get_ic_miss_serial,
937  ic->ic_serial == RCLASS_SERIAL(RBASIC(obj)->klass)))) {
938  st_index_t index = !is_attr ? ic->ic_value.index : (cc->aux.index - 1);
939  if (LIKELY(index < ROBJECT_NUMIV(obj))) {
940  val = ROBJECT_IVPTR(obj)[index];
941  }
942  undef_check:
943  if (UNLIKELY(val == Qundef)) {
944  if (!is_attr && RTEST(ruby_verbose))
945  rb_warning("instance variable %"PRIsVALUE" not initialized", QUOTE_ID(id));
946  val = Qnil;
947  }
948  RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
949  return val;
950  }
951  else {
952  st_data_t index;
953  struct st_table *iv_index_tbl = ROBJECT_IV_INDEX_TBL(obj);
954 
955  if (iv_index_tbl) {
956  if (st_lookup(iv_index_tbl, id, &index)) {
957  if (index < ROBJECT_NUMIV(obj)) {
958  val = ROBJECT_IVPTR(obj)[index];
959  }
960  if (!is_attr) {
961  ic->ic_value.index = index;
962  ic->ic_serial = RCLASS_SERIAL(RBASIC(obj)->klass);
963  }
964  else { /* call_info */
965  cc->aux.index = (int)index + 1;
966  }
967  }
968  }
969  goto undef_check;
970  }
971  }
972  else {
973  RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_noobject);
974  }
975 #endif /* USE_IC_FOR_IVAR */
976  RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
977 
978  if (is_attr)
979  return rb_attr_get(obj, id);
980  return rb_ivar_get(obj, id);
981 }
982 
983 static inline VALUE
984 vm_setivar(VALUE obj, ID id, VALUE val, IC ic, struct rb_call_cache *cc, int is_attr)
985 {
986 #if USE_IC_FOR_IVAR
987  rb_check_frozen(obj);
988 
989  if (LIKELY(RB_TYPE_P(obj, T_OBJECT))) {
990  VALUE klass = RBASIC(obj)->klass;
991  st_data_t index;
992 
993  if (LIKELY(
994  (!is_attr && RB_DEBUG_COUNTER_INC_UNLESS(ivar_set_ic_miss_serial, ic->ic_serial == RCLASS_SERIAL(klass))) ||
995  ( is_attr && RB_DEBUG_COUNTER_INC_UNLESS(ivar_set_ic_miss_unset, cc->aux.index > 0)))) {
996  VALUE *ptr = ROBJECT_IVPTR(obj);
997  index = !is_attr ? ic->ic_value.index : cc->aux.index-1;
998 
999  if (RB_DEBUG_COUNTER_INC_UNLESS(ivar_set_ic_miss_oorange, index < ROBJECT_NUMIV(obj))) {
1000  RB_OBJ_WRITE(obj, &ptr[index], val);
1001  RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1002  return val; /* inline cache hit */
1003  }
1004  }
1005  else {
1006  struct st_table *iv_index_tbl = ROBJECT_IV_INDEX_TBL(obj);
1007 
1008  if (iv_index_tbl && st_lookup(iv_index_tbl, (st_data_t)id, &index)) {
1009  if (!is_attr) {
1010  ic->ic_value.index = index;
1011  ic->ic_serial = RCLASS_SERIAL(klass);
1012  }
1013  else if (index >= INT_MAX) {
1014  rb_raise(rb_eArgError, "too many instance variables");
1015  }
1016  else {
1017  cc->aux.index = (int)(index + 1);
1018  }
1019  }
1020  /* fall through */
1021  }
1022  }
1023  else {
1024  RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1025  }
1026 #endif /* USE_IC_FOR_IVAR */
1027  RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1028  return rb_ivar_set(obj, id, val);
1029 }
1030 
1031 static inline VALUE
1032 vm_getinstancevariable(VALUE obj, ID id, IC ic)
1033 {
1034  return vm_getivar(obj, id, ic, 0, 0);
1035 }
1036 
1037 static inline void
1038 vm_setinstancevariable(VALUE obj, ID id, VALUE val, IC ic)
1039 {
1040  vm_setivar(obj, id, val, ic, 0, 0);
1041 }
1042 
1043 static VALUE
1044 vm_throw_continue(rb_thread_t *th, VALUE err)
1045 {
1046  /* continue throw */
1047 
1048  if (FIXNUM_P(err)) {
1049  th->ec.tag->state = FIX2INT(err);
1050  }
1051  else if (SYMBOL_P(err)) {
1052  th->ec.tag->state = TAG_THROW;
1053  }
1054  else if (THROW_DATA_P(err)) {
1055  th->ec.tag->state = THROW_DATA_STATE((struct vm_throw_data *)err);
1056  }
1057  else {
1058  th->ec.tag->state = TAG_RAISE;
1059  }
1060  return err;
1061 }
1062 
1063 static VALUE
1064 vm_throw_start(rb_thread_t *const th, rb_control_frame_t *const reg_cfp, enum ruby_tag_type state,
1065  const int flag, const rb_num_t level, const VALUE throwobj)
1066 {
1067  const rb_control_frame_t *escape_cfp = NULL;
1068  const rb_control_frame_t * const eocfp = RUBY_VM_END_CONTROL_FRAME(th); /* end of control frame pointer */
1069 
1070  if (flag != 0) {
1071  /* do nothing */
1072  }
1073  else if (state == TAG_BREAK) {
1074  int is_orphan = 1;
1075  const VALUE *ep = GET_EP();
1076  const rb_iseq_t *base_iseq = GET_ISEQ();
1077  escape_cfp = reg_cfp;
1078 
1079  while (base_iseq->body->type != ISEQ_TYPE_BLOCK) {
1080  if (escape_cfp->iseq->body->type == ISEQ_TYPE_CLASS) {
1081  escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1082  ep = escape_cfp->ep;
1083  base_iseq = escape_cfp->iseq;
1084  }
1085  else {
1086  ep = VM_ENV_PREV_EP(ep);
1087  base_iseq = base_iseq->body->parent_iseq;
1088  escape_cfp = rb_vm_search_cf_from_ep(th, escape_cfp, ep);
1089  VM_ASSERT(escape_cfp->iseq == base_iseq);
1090  }
1091  }
1092 
1093  if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1094  /* lambda{... break ...} */
1095  is_orphan = 0;
1096  state = TAG_RETURN;
1097  }
1098  else {
1099  ep = VM_ENV_PREV_EP(ep);
1100 
1101  while (escape_cfp < eocfp) {
1102  if (escape_cfp->ep == ep) {
1103  const rb_iseq_t *const iseq = escape_cfp->iseq;
1104  const VALUE epc = escape_cfp->pc - iseq->body->iseq_encoded;
1105  const struct iseq_catch_table *const ct = iseq->body->catch_table;
1106  unsigned int i;
1107 
1108  if (!ct) break;
1109  for (i=0; i < ct->size; i++) {
1110  const struct iseq_catch_table_entry * const entry = &ct->entries[i];
1111 
1112  if (entry->type == CATCH_TYPE_BREAK &&
1113  entry->iseq == base_iseq &&
1114  entry->start < epc && entry->end >= epc) {
1115  if (entry->cont == epc) { /* found! */
1116  is_orphan = 0;
1117  }
1118  break;
1119  }
1120  }
1121  break;
1122  }
1123 
1124  escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1125  }
1126  }
1127 
1128  if (is_orphan) {
1129  rb_vm_localjump_error("break from proc-closure", throwobj, TAG_BREAK);
1130  }
1131  }
1132  else if (state == TAG_RETRY) {
1133  rb_num_t i;
1134  const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1135 
1136  for (i = 0; i < level; i++) {
1137  ep = VM_ENV_PREV_EP(ep);
1138  }
1139 
1140  escape_cfp = rb_vm_search_cf_from_ep(th, reg_cfp, ep);
1141  }
1142  else if (state == TAG_RETURN) {
1143  const VALUE *current_ep = GET_EP();
1144  const VALUE *target_lep = VM_EP_LEP(current_ep);
1145  int in_class_frame = 0;
1146  int toplevel = 1;
1147  escape_cfp = reg_cfp;
1148 
1149  while (escape_cfp < eocfp) {
1150  const VALUE *lep = VM_CF_LEP(escape_cfp);
1151 
1152  if (!target_lep) {
1153  target_lep = lep;
1154  }
1155 
1156  if (lep == target_lep &&
1157  VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1158  escape_cfp->iseq->body->type == ISEQ_TYPE_CLASS) {
1159  in_class_frame = 1;
1160  target_lep = 0;
1161  }
1162 
1163  if (lep == target_lep) {
1164  if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1165  toplevel = 0;
1166  if (in_class_frame) {
1167  /* lambda {class A; ... return ...; end} */
1168  goto valid_return;
1169  }
1170  else {
1171  const VALUE *tep = current_ep;
1172 
1173  while (target_lep != tep) {
1174  if (escape_cfp->ep == tep) {
1175  /* in lambda */
1176  goto valid_return;
1177  }
1178  tep = VM_ENV_PREV_EP(tep);
1179  }
1180  }
1181  }
1182  else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1183  switch (escape_cfp->iseq->body->type) {
1184  case ISEQ_TYPE_TOP:
1185  case ISEQ_TYPE_MAIN:
1186  if (toplevel) goto valid_return;
1187  break;
1188  case ISEQ_TYPE_EVAL:
1189  case ISEQ_TYPE_CLASS:
1190  toplevel = 0;
1191  break;
1192  default:
1193  break;
1194  }
1195  }
1196  }
1197 
1198  if (escape_cfp->ep == target_lep && escape_cfp->iseq->body->type == ISEQ_TYPE_METHOD) {
1199  goto valid_return;
1200  }
1201 
1202  escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1203  }
1204  rb_vm_localjump_error("unexpected return", throwobj, TAG_RETURN);
1205 
1206  valid_return:;
1207  /* do nothing */
1208  }
1209  else {
1210  rb_bug("isns(throw): unsupport throw type");
1211  }
1212 
1213  th->ec.tag->state = state;
1214  return (VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1215 }
1216 
1217 static VALUE
1218 vm_throw(rb_thread_t *th, rb_control_frame_t *reg_cfp,
1219  rb_num_t throw_state, VALUE throwobj)
1220 {
1221  const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1222  const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1223  const rb_num_t level = throw_state >> VM_THROW_LEVEL_SHIFT;
1224 
1225  if (state != 0) {
1226  return vm_throw_start(th, reg_cfp, state, flag, level, throwobj);
1227  }
1228  else {
1229  return vm_throw_continue(th, throwobj);
1230  }
1231 }
1232 
1233 static inline void
1234 vm_expandarray(rb_control_frame_t *cfp, VALUE ary, rb_num_t num, int flag)
1235 {
1236  int is_splat = flag & 0x01;
1237  rb_num_t space_size = num + is_splat;
1238  VALUE *base = cfp->sp;
1239  const VALUE *ptr;
1240  rb_num_t len;
1241 
1242  if (!RB_TYPE_P(ary, T_ARRAY)) {
1243  ary = rb_ary_to_ary(ary);
1244  }
1245 
1246  cfp->sp += space_size;
1247 
1248  ptr = RARRAY_CONST_PTR(ary);
1249  len = (rb_num_t)RARRAY_LEN(ary);
1250 
1251  if (flag & 0x02) {
1252  /* post: ..., nil ,ary[-1], ..., ary[0..-num] # top */
1253  rb_num_t i = 0, j;
1254 
1255  if (len < num) {
1256  for (i=0; i<num-len; i++) {
1257  *base++ = Qnil;
1258  }
1259  }
1260  for (j=0; i<num; i++, j++) {
1261  VALUE v = ptr[len - j - 1];
1262  *base++ = v;
1263  }
1264  if (is_splat) {
1265  *base = rb_ary_new4(len - j, ptr);
1266  }
1267  }
1268  else {
1269  /* normal: ary[num..-1], ary[num-2], ary[num-3], ..., ary[0] # top */
1270  rb_num_t i;
1271  VALUE *bptr = &base[space_size - 1];
1272 
1273  for (i=0; i<num; i++) {
1274  if (len <= i) {
1275  for (; i<num; i++) {
1276  *bptr-- = Qnil;
1277  }
1278  break;
1279  }
1280  *bptr-- = ptr[i];
1281  }
1282  if (is_splat) {
1283  if (num > len) {
1284  *bptr = rb_ary_new();
1285  }
1286  else {
1287  *bptr = rb_ary_new4(len - num, ptr + num);
1288  }
1289  }
1290  }
1291  RB_GC_GUARD(ary);
1292 }
1293 
1294 static VALUE vm_call_general(rb_thread_t *th, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc);
1295 
1296 static void
1297 vm_search_method(const struct rb_call_info *ci, struct rb_call_cache *cc, VALUE recv)
1298 {
1299  VALUE klass = CLASS_OF(recv);
1300 
1301 #if OPT_INLINE_METHOD_CACHE
1302  if (LIKELY(RB_DEBUG_COUNTER_INC_UNLESS(mc_global_state_miss,
1304  RB_DEBUG_COUNTER_INC_UNLESS(mc_class_serial_miss,
1305  RCLASS_SERIAL(klass) == cc->class_serial))) {
1306  /* cache hit! */
1307  VM_ASSERT(cc->call != NULL);
1308  RB_DEBUG_COUNTER_INC(mc_inline_hit);
1309  return;
1310  }
1311  RB_DEBUG_COUNTER_INC(mc_inline_miss);
1312 #endif
1313  cc->me = rb_callable_method_entry(klass, ci->mid);
1314  VM_ASSERT(callable_method_entry_p(cc->me));
1315  cc->call = vm_call_general;
1316 #if OPT_INLINE_METHOD_CACHE
1318  cc->class_serial = RCLASS_SERIAL(klass);
1319 #endif
1320 }
1321 
1322 static inline int
1323 check_cfunc(const rb_callable_method_entry_t *me, VALUE (*func)())
1324 {
1325  if (me && me->def->type == VM_METHOD_TYPE_CFUNC &&
1326  me->def->body.cfunc.func == func) {
1327  return 1;
1328  }
1329  else {
1330  return 0;
1331  }
1332 }
1333 
1334 static inline int
1335 vm_method_cfunc_is(CALL_INFO ci, CALL_CACHE cc,
1336  VALUE recv, VALUE (*func)())
1337 {
1338  vm_search_method(ci, cc, recv);
1339  return check_cfunc(cc->me, func);
1340 }
1341 
1342 static VALUE
1343 opt_equal_fallback(VALUE recv, VALUE obj, CALL_INFO ci, CALL_CACHE cc)
1344 {
1345  if (vm_method_cfunc_is(ci, cc, recv, rb_obj_equal)) {
1346  return recv == obj ? Qtrue : Qfalse;
1347  }
1348 
1349  return Qundef;
1350 }
1351 
1352 #define BUILTIN_CLASS_P(x, k) (!SPECIAL_CONST_P(x) && RBASIC_CLASS(x) == k)
1353 #define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
1354 
1355 /* 1: compare by identity, 0: not applicable, -1: redefined */
1356 static inline int
1357 comparable_by_identity(VALUE recv, VALUE obj)
1358 {
1359  if (FIXNUM_2_P(recv, obj)) {
1360  return (EQ_UNREDEFINED_P(INTEGER) != 0) * 2 - 1;
1361  }
1362  if (FLONUM_2_P(recv, obj)) {
1363  return (EQ_UNREDEFINED_P(FLOAT) != 0) * 2 - 1;
1364  }
1365  if (SYMBOL_P(recv) && SYMBOL_P(obj)) {
1366  return (EQ_UNREDEFINED_P(SYMBOL) != 0) * 2 - 1;
1367  }
1368  return 0;
1369 }
1370 
1371 static
1372 #ifndef NO_BIG_INLINE
1373 inline
1374 #endif
1375 VALUE
1376 opt_eq_func(VALUE recv, VALUE obj, CALL_INFO ci, CALL_CACHE cc)
1377 {
1378  switch (comparable_by_identity(recv, obj)) {
1379  case 1:
1380  return (recv == obj) ? Qtrue : Qfalse;
1381  case -1:
1382  goto fallback;
1383  }
1384  if (0) {
1385  }
1386  else if (BUILTIN_CLASS_P(recv, rb_cFloat)) {
1387  if (EQ_UNREDEFINED_P(FLOAT)) {
1388  return rb_float_equal(recv, obj);
1389  }
1390  }
1391  else if (BUILTIN_CLASS_P(recv, rb_cString)) {
1392  if (EQ_UNREDEFINED_P(STRING)) {
1393  return rb_str_equal(recv, obj);
1394  }
1395  }
1396 
1397  fallback:
1398  return opt_equal_fallback(recv, obj, ci, cc);
1399 }
1400 
1401 static
1402 #ifndef NO_BIG_INLINE
1403 inline
1404 #endif
1405 VALUE
1406 opt_eql_func(VALUE recv, VALUE obj, CALL_INFO ci, CALL_CACHE cc)
1407 {
1408  switch (comparable_by_identity(recv, obj)) {
1409  case 1:
1410  return (recv == obj) ? Qtrue : Qfalse;
1411  case -1:
1412  goto fallback;
1413  }
1414  if (0) {
1415  }
1416  else if (BUILTIN_CLASS_P(recv, rb_cFloat)) {
1417  if (EQ_UNREDEFINED_P(FLOAT)) {
1418  return rb_float_eql(recv, obj);
1419  }
1420  }
1421  else if (BUILTIN_CLASS_P(recv, rb_cString)) {
1422  if (EQ_UNREDEFINED_P(STRING)) {
1423  return rb_str_eql(recv, obj);
1424  }
1425  }
1426 
1427  fallback:
1428  return opt_equal_fallback(recv, obj, ci, cc);
1429 }
1430 #undef BUILTIN_CLASS_P
1431 #undef EQ_UNREDEFINED_P
1432 
1433 VALUE
1435 {
1436  struct rb_call_info ci;
1437  struct rb_call_cache cc;
1438 
1439  ci.mid = idEq;
1440  cc.method_state = 0;
1441  cc.class_serial = 0;
1442  cc.me = NULL;
1443  return opt_eq_func(obj1, obj2, &ci, &cc);
1444 }
1445 
1446 VALUE
1448 {
1449  struct rb_call_info ci;
1450  struct rb_call_cache cc;
1451 
1452  ci.mid = idEqlP;
1453  cc.method_state = 0;
1454  cc.class_serial = 0;
1455  cc.me = NULL;
1456  return opt_eql_func(obj1, obj2, &ci, &cc);
1457 }
1458 
1459 static VALUE vm_call0(rb_thread_t*, VALUE, ID, int, const VALUE*, const rb_callable_method_entry_t *);
1460 
1461 static VALUE
1462 check_match(VALUE pattern, VALUE target, enum vm_check_match_type type)
1463 {
1464  switch (type) {
1466  return pattern;
1468  if (!rb_obj_is_kind_of(pattern, rb_cModule)) {
1469  rb_raise(rb_eTypeError, "class or module required for rescue clause");
1470  }
1471  /* fall through */
1472  case VM_CHECKMATCH_TYPE_CASE: {
1473  const rb_callable_method_entry_t *me =
1475  if (me) {
1476  return vm_call0(GET_THREAD(), pattern, idEqq, 1, &target, me);
1477  }
1478  else {
1479  /* fallback to funcall (e.g. method_missing) */
1480  return rb_funcallv(pattern, idEqq, 1, &target);
1481  }
1482  }
1483  default:
1484  rb_bug("check_match: unreachable");
1485  }
1486 }
1487 
1488 
1489 #if defined(_MSC_VER) && _MSC_VER < 1300
1490 #define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
1491 #else
1492 #define CHECK_CMP_NAN(a, b) /* do nothing */
1493 #endif
1494 
1495 static inline VALUE
1496 double_cmp_lt(double a, double b)
1497 {
1498  CHECK_CMP_NAN(a, b);
1499  return a < b ? Qtrue : Qfalse;
1500 }
1501 
1502 static inline VALUE
1503 double_cmp_le(double a, double b)
1504 {
1505  CHECK_CMP_NAN(a, b);
1506  return a <= b ? Qtrue : Qfalse;
1507 }
1508 
1509 static inline VALUE
1510 double_cmp_gt(double a, double b)
1511 {
1512  CHECK_CMP_NAN(a, b);
1513  return a > b ? Qtrue : Qfalse;
1514 }
1515 
1516 static inline VALUE
1517 double_cmp_ge(double a, double b)
1518 {
1519  CHECK_CMP_NAN(a, b);
1520  return a >= b ? Qtrue : Qfalse;
1521 }
1522 
1523 static VALUE *
1524 vm_base_ptr(const rb_control_frame_t *cfp)
1525 {
1526  const rb_control_frame_t *prev_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
1527 
1528  if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
1529  VALUE *bp = prev_cfp->sp + cfp->iseq->body->local_table_size + VM_ENV_DATA_SIZE;
1530  if (cfp->iseq->body->type == ISEQ_TYPE_METHOD) {
1531  /* adjust `self' */
1532  bp += 1;
1533  }
1534 #if VM_DEBUG_BP_CHECK
1535  if (bp != cfp->bp_check) {
1536  fprintf(stderr, "bp_check: %ld, bp: %ld\n",
1537  (long)(cfp->bp_check - GET_THREAD()->ec.vm_stack),
1538  (long)(bp - GET_THREAD()->ec.vm_stack));
1539  rb_bug("vm_base_ptr: unreachable");
1540  }
1541 #endif
1542  return bp;
1543  }
1544  else {
1545  return NULL;
1546  }
1547 }
1548 
1549 /* method call processes with call_info */
1550 
1551 #include "vm_args.c"
1552 
1553 static inline VALUE vm_call_iseq_setup_2(rb_thread_t *th, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc, int opt_pc, int param_size, int local_size);
1554 static inline VALUE vm_call_iseq_setup_normal(rb_thread_t *th, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc, int opt_pc, int param_size, int local_size);
1555 static inline VALUE vm_call_iseq_setup_tailcall(rb_thread_t *th, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc, int opt_pc);
1556 static VALUE vm_call_super_method(rb_thread_t *th, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc);
1557 static VALUE vm_call_method_nome(rb_thread_t *th, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc);
1558 static VALUE vm_call_method_each_type(rb_thread_t *th, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc);
1559 static inline VALUE vm_call_method(rb_thread_t *th, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc);
1560 
1561 static vm_call_handler vm_call_iseq_setup_func(const struct rb_call_info *ci, const int param_size, const int local_size);
1562 
1563 static rb_method_definition_t *method_definition_create(rb_method_type_t type, ID mid);
1564 static void method_definition_set(const rb_method_entry_t *me, rb_method_definition_t *def, void *opts);
1565 static int rb_method_definition_eq(const rb_method_definition_t *d1, const rb_method_definition_t *d2);
1566 
1567 static const rb_iseq_t *
1568 def_iseq_ptr(rb_method_definition_t *def)
1569 {
1570 #if VM_CHECK_MODE > 0
1571  if (def->type != VM_METHOD_TYPE_ISEQ) rb_bug("def_iseq_ptr: not iseq (%d)", def->type);
1572 #endif
1573  return rb_iseq_check(def->body.iseq.iseqptr);
1574 }
1575 
1576 static VALUE
1577 vm_call_iseq_setup_tailcall_0start(rb_thread_t *th, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
1578 {
1579  return vm_call_iseq_setup_tailcall(th, cfp, calling, ci, cc, 0);
1580 }
1581 
1582 static VALUE
1583 vm_call_iseq_setup_normal_0start(rb_thread_t *th, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
1584 {
1585  const rb_iseq_t *iseq = def_iseq_ptr(cc->me->def);
1586  int param = iseq->body->param.size;
1587  int local = iseq->body->local_table_size;
1588  return vm_call_iseq_setup_normal(th, cfp, calling, ci, cc, 0, param, local);
1589 }
1590 
1591 static inline int
1592 simple_iseq_p(const rb_iseq_t *iseq)
1593 {
1594  return iseq->body->param.flags.has_opt == FALSE &&
1595  iseq->body->param.flags.has_rest == FALSE &&
1596  iseq->body->param.flags.has_post == FALSE &&
1597  iseq->body->param.flags.has_kw == FALSE &&
1598  iseq->body->param.flags.has_kwrest == FALSE &&
1599  iseq->body->param.flags.has_block == FALSE;
1600 }
1601 
1602 static inline int
1603 vm_callee_setup_arg(rb_thread_t *th, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc,
1604  const rb_iseq_t *iseq, VALUE *argv, int param_size, int local_size)
1605 {
1606  if (LIKELY(simple_iseq_p(iseq) && !(ci->flag & VM_CALL_KW_SPLAT))) {
1607  rb_control_frame_t *cfp = th->ec.cfp;
1608 
1609  CALLER_SETUP_ARG(cfp, calling, ci); /* splat arg */
1610 
1611  if (calling->argc != iseq->body->param.lead_num) {
1612  argument_arity_error(th, iseq, calling->argc, iseq->body->param.lead_num, iseq->body->param.lead_num);
1613  }
1614 
1615  CI_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size),
1616  (!IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
1618  return 0;
1619  }
1620  else {
1621  return setup_parameters_complex(th, iseq, calling, ci, argv, arg_setup_method);
1622  }
1623 }
1624 
1625 static VALUE
1626 vm_call_iseq_setup(rb_thread_t *th, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
1627 {
1628  const rb_iseq_t *iseq = def_iseq_ptr(cc->me->def);
1629  const int param_size = iseq->body->param.size;
1630  const int local_size = iseq->body->local_table_size;
1631  const int opt_pc = vm_callee_setup_arg(th, calling, ci, cc, def_iseq_ptr(cc->me->def), cfp->sp - calling->argc, param_size, local_size);
1632  return vm_call_iseq_setup_2(th, cfp, calling, ci, cc, opt_pc, param_size, local_size);
1633 }
1634 
1635 static inline VALUE
1636 vm_call_iseq_setup_2(rb_thread_t *th, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc,
1637  int opt_pc, int param_size, int local_size)
1638 {
1639  if (LIKELY(!(ci->flag & VM_CALL_TAILCALL))) {
1640  return vm_call_iseq_setup_normal(th, cfp, calling, ci, cc, opt_pc, param_size, local_size);
1641  }
1642  else {
1643  return vm_call_iseq_setup_tailcall(th, cfp, calling, ci, cc, opt_pc);
1644  }
1645 }
1646 
1647 static inline VALUE
1648 vm_call_iseq_setup_normal(rb_thread_t *th, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc,
1649  int opt_pc, int param_size, int local_size)
1650 {
1651  const rb_callable_method_entry_t *me = cc->me;
1652  const rb_iseq_t *iseq = def_iseq_ptr(me->def);
1653  VALUE *argv = cfp->sp - calling->argc;
1654  VALUE *sp = argv + param_size;
1655  cfp->sp = argv - 1 /* recv */;
1656 
1657  vm_push_frame(th, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
1658  calling->block_handler, (VALUE)me,
1659  iseq->body->iseq_encoded + opt_pc, sp,
1660  local_size - param_size,
1661  iseq->body->stack_max);
1662  return Qundef;
1663 }
1664 
1665 static inline VALUE
1666 vm_call_iseq_setup_tailcall(rb_thread_t *th, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc,
1667  int opt_pc)
1668 {
1669  unsigned int i;
1670  VALUE *argv = cfp->sp - calling->argc;
1671  const rb_callable_method_entry_t *me = cc->me;
1672  const rb_iseq_t *iseq = def_iseq_ptr(me->def);
1673  VALUE *src_argv = argv;
1674  VALUE *sp_orig, *sp;
1675  VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
1676 
1677  if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
1678  struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
1679  const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
1680  dst_captured->code.val = src_captured->code.val;
1681  if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
1682  calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
1683  }
1684  else {
1685  calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
1686  }
1687  }
1688 
1689  vm_pop_frame(th, cfp, cfp->ep);
1690  cfp = th->ec.cfp;
1691 
1692  sp_orig = sp = cfp->sp;
1693 
1694  /* push self */
1695  sp[0] = calling->recv;
1696  sp++;
1697 
1698  /* copy arguments */
1699  for (i=0; i < iseq->body->param.size; i++) {
1700  *sp++ = src_argv[i];
1701  }
1702 
1703  vm_push_frame(th, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
1704  calling->recv, calling->block_handler, (VALUE)me,
1705  iseq->body->iseq_encoded + opt_pc, sp,
1706  iseq->body->local_table_size - iseq->body->param.size,
1707  iseq->body->stack_max);
1708 
1709  cfp->sp = sp_orig;
1710  RUBY_VM_CHECK_INTS(th);
1711 
1712  return Qundef;
1713 }
1714 
1715 static VALUE
1716 call_cfunc_m2(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1717 {
1718  return (*func)(recv, rb_ary_new4(argc, argv));
1719 }
1720 
1721 static VALUE
1722 call_cfunc_m1(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1723 {
1724  return (*func)(argc, argv, recv);
1725 }
1726 
1727 static VALUE
1728 call_cfunc_0(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1729 {
1730  return (*func)(recv);
1731 }
1732 
1733 static VALUE
1734 call_cfunc_1(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1735 {
1736  return (*func)(recv, argv[0]);
1737 }
1738 
1739 static VALUE
1740 call_cfunc_2(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1741 {
1742  return (*func)(recv, argv[0], argv[1]);
1743 }
1744 
1745 static VALUE
1746 call_cfunc_3(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1747 {
1748  return (*func)(recv, argv[0], argv[1], argv[2]);
1749 }
1750 
1751 static VALUE
1752 call_cfunc_4(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1753 {
1754  return (*func)(recv, argv[0], argv[1], argv[2], argv[3]);
1755 }
1756 
1757 static VALUE
1758 call_cfunc_5(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1759 {
1760  return (*func)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
1761 }
1762 
1763 static VALUE
1764 call_cfunc_6(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1765 {
1766  return (*func)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
1767 }
1768 
1769 static VALUE
1770 call_cfunc_7(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1771 {
1772  return (*func)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
1773 }
1774 
1775 static VALUE
1776 call_cfunc_8(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1777 {
1778  return (*func)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
1779 }
1780 
1781 static VALUE
1782 call_cfunc_9(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1783 {
1784  return (*func)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
1785 }
1786 
1787 static VALUE
1788 call_cfunc_10(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1789 {
1790  return (*func)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
1791 }
1792 
1793 static VALUE
1794 call_cfunc_11(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1795 {
1796  return (*func)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
1797 }
1798 
1799 static VALUE
1800 call_cfunc_12(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1801 {
1802  return (*func)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
1803 }
1804 
1805 static VALUE
1806 call_cfunc_13(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1807 {
1808  return (*func)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
1809 }
1810 
1811 static VALUE
1812 call_cfunc_14(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1813 {
1814  return (*func)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
1815 }
1816 
1817 static VALUE
1818 call_cfunc_15(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1819 {
1820  return (*func)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
1821 }
1822 
1823 #ifndef VM_PROFILE
1824 #define VM_PROFILE 0
1825 #endif
1826 
1827 #if VM_PROFILE
1828 enum {
1829  VM_PROFILE_R2C_CALL,
1830  VM_PROFILE_R2C_POPF,
1831  VM_PROFILE_C2C_CALL,
1832  VM_PROFILE_C2C_POPF,
1833  VM_PROFILE_COUNT
1834 };
1835 static int vm_profile_counter[VM_PROFILE_COUNT];
1836 #define VM_PROFILE_UP(x) (vm_profile_counter[VM_PROFILE_##x]++)
1837 #define VM_PROFILE_ATEXIT() atexit(vm_profile_show_result)
1838 static void
1839 vm_profile_show_result(void)
1840 {
1841  fprintf(stderr, "VM Profile results: \n");
1842  fprintf(stderr, "r->c call: %d\n", vm_profile_counter[VM_PROFILE_R2C_CALL]);
1843  fprintf(stderr, "r->c popf: %d\n", vm_profile_counter[VM_PROFILE_R2C_POPF]);
1844  fprintf(stderr, "c->c call: %d\n", vm_profile_counter[VM_PROFILE_C2C_CALL]);
1845  fprintf(stderr, "c->c popf: %d\n", vm_profile_counter[VM_PROFILE_C2C_POPF]);
1846 }
1847 #else
1848 #define VM_PROFILE_UP(x)
1849 #define VM_PROFILE_ATEXIT()
1850 #endif
1851 
1852 static inline int
1853 vm_cfp_consistent_p(rb_thread_t *th, const rb_control_frame_t *reg_cfp)
1854 {
1855  const int ov_flags = RAISED_STACKOVERFLOW;
1856  if (LIKELY(reg_cfp == th->ec.cfp + 1)) return TRUE;
1857  if (rb_thread_raised_p(th, ov_flags)) {
1858  rb_thread_raised_reset(th, ov_flags);
1859  return TRUE;
1860  }
1861  return FALSE;
1862 }
1863 
1864 #define CHECK_CFP_CONSISTENCY(func) \
1865  (LIKELY(vm_cfp_consistent_p(th, reg_cfp)) ? (void)0 : \
1866  rb_bug(func ": cfp consistency error (%p, %p)", reg_cfp, th->ec.cfp+1))
1867 
1868 static inline
1869 const rb_method_cfunc_t *
1870 vm_method_cfunc_entry(const rb_callable_method_entry_t *me)
1871 {
1872 #if VM_DEBUG_VERIFY_METHOD_CACHE
1873  switch (me->def->type) {
1874  case VM_METHOD_TYPE_CFUNC:
1876  break;
1877 # define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
1878  METHOD_BUG(ISEQ);
1879  METHOD_BUG(ATTRSET);
1880  METHOD_BUG(IVAR);
1881  METHOD_BUG(BMETHOD);
1882  METHOD_BUG(ZSUPER);
1883  METHOD_BUG(UNDEF);
1884  METHOD_BUG(OPTIMIZED);
1885  METHOD_BUG(MISSING);
1886  METHOD_BUG(REFINED);
1887  METHOD_BUG(ALIAS);
1888 # undef METHOD_BUG
1889  default:
1890  rb_bug("wrong method type: %d", me->def->type);
1891  }
1892 #endif
1893  return &me->def->body.cfunc;
1894 }
1895 
1896 static VALUE
1897 vm_call_cfunc_with_frame(rb_thread_t *th, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
1898 {
1899  VALUE val;
1900  const rb_callable_method_entry_t *me = cc->me;
1901  const rb_method_cfunc_t *cfunc = vm_method_cfunc_entry(me);
1902  int len = cfunc->argc;
1903 
1904  VALUE recv = calling->recv;
1905  VALUE block_handler = calling->block_handler;
1906  int argc = calling->argc;
1907 
1908  RUBY_DTRACE_CMETHOD_ENTRY_HOOK(th, me->owner, me->def->original_id);
1909  EXEC_EVENT_HOOK(th, RUBY_EVENT_C_CALL, recv, me->def->original_id, ci->mid, me->owner, Qundef);
1910 
1911  vm_push_frame(th, NULL, VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL, recv,
1912  block_handler, (VALUE)me,
1913  0, th->ec.cfp->sp, 0, 0);
1914 
1915  if (len >= 0) rb_check_arity(argc, len, len);
1916 
1917  reg_cfp->sp -= argc + 1;
1918  VM_PROFILE_UP(R2C_CALL);
1919  val = (*cfunc->invoker)(cfunc->func, recv, argc, reg_cfp->sp + 1);
1920 
1921  CHECK_CFP_CONSISTENCY("vm_call_cfunc");
1922 
1923  rb_vm_pop_frame(th);
1924 
1925  EXEC_EVENT_HOOK(th, RUBY_EVENT_C_RETURN, recv, me->def->original_id, ci->mid, me->owner, val);
1926  RUBY_DTRACE_CMETHOD_RETURN_HOOK(th, me->owner, me->def->original_id);
1927 
1928  return val;
1929 }
1930 
1931 static VALUE
1932 vm_call_cfunc(rb_thread_t *th, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
1933 {
1934  CALLER_SETUP_ARG(reg_cfp, calling, ci);
1935  return vm_call_cfunc_with_frame(th, reg_cfp, calling, ci, cc);
1936 }
1937 
1938 static VALUE
1939 vm_call_ivar(rb_thread_t *th, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
1940 {
1941  cfp->sp -= 1;
1942  return vm_getivar(calling->recv, cc->me->def->body.attr.id, NULL, cc, 1);
1943 }
1944 
1945 static VALUE
1946 vm_call_attrset(rb_thread_t *th, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
1947 {
1948  VALUE val = *(cfp->sp - 1);
1949  cfp->sp -= 2;
1950  return vm_setivar(calling->recv, cc->me->def->body.attr.id, val, NULL, cc, 1);
1951 }
1952 
1953 static inline VALUE
1954 vm_call_bmethod_body(rb_thread_t *th, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc, const VALUE *argv)
1955 {
1956  rb_proc_t *proc;
1957  VALUE val;
1958 
1959  /* control block frame */
1960  th->passed_bmethod_me = cc->me;
1961  GetProcPtr(cc->me->def->body.proc, proc);
1962  val = vm_invoke_bmethod(th, proc, calling->recv, calling->argc, argv, calling->block_handler);
1963 
1964  return val;
1965 }
1966 
1967 static VALUE
1968 vm_call_bmethod(rb_thread_t *th, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
1969 {
1970  VALUE *argv;
1971  int argc;
1972 
1973  CALLER_SETUP_ARG(cfp, calling, ci);
1974  argc = calling->argc;
1975  argv = ALLOCA_N(VALUE, argc);
1976  MEMCPY(argv, cfp->sp - argc, VALUE, argc);
1977  cfp->sp += - argc - 1;
1978 
1979  return vm_call_bmethod_body(th, calling, ci, cc, argv);
1980 }
1981 
1982 static enum method_missing_reason
1983 ci_missing_reason(const struct rb_call_info *ci)
1984 {
1986  if (ci->flag & VM_CALL_VCALL) stat |= MISSING_VCALL;
1987  if (ci->flag & VM_CALL_FCALL) stat |= MISSING_FCALL;
1988  if (ci->flag & VM_CALL_SUPER) stat |= MISSING_SUPER;
1989  return stat;
1990 }
1991 
1992 static VALUE
1993 vm_call_opt_send(rb_thread_t *th, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_call_info *orig_ci, struct rb_call_cache *orig_cc)
1994 {
1995  int i;
1996  VALUE sym;
1997  struct rb_call_info *ci;
1998  struct rb_call_info_with_kwarg ci_entry;
1999  struct rb_call_cache cc_entry, *cc;
2000 
2001  CALLER_SETUP_ARG(reg_cfp, calling, orig_ci);
2002 
2003  i = calling->argc - 1;
2004 
2005  if (calling->argc == 0) {
2006  rb_raise(rb_eArgError, "no method name given");
2007  }
2008 
2009  /* setup new ci */
2010  if (orig_ci->flag & VM_CALL_KWARG) {
2011  ci = (struct rb_call_info *)&ci_entry;
2012  ci_entry = *(struct rb_call_info_with_kwarg *)orig_ci;
2013  }
2014  else {
2015  ci = &ci_entry.ci;
2016  ci_entry.ci = *orig_ci;
2017  }
2018  ci->flag = ci->flag & ~VM_CALL_KWARG; /* TODO: delegate kw_arg without making a Hash object */
2019 
2020  /* setup new cc */
2021  cc_entry = *orig_cc;
2022  cc = &cc_entry;
2023 
2024  sym = TOPN(i);
2025 
2026  if (!(ci->mid = rb_check_id(&sym))) {
2027  if (rb_method_basic_definition_p(CLASS_OF(calling->recv), idMethodMissing)) {
2028  VALUE exc = make_no_method_exception(rb_eNoMethodError, 0, calling->recv,
2029  rb_long2int(calling->argc), &TOPN(i),
2030  ci->flag & (VM_CALL_FCALL|VM_CALL_VCALL));
2031  rb_exc_raise(exc);
2032  }
2033  TOPN(i) = rb_str_intern(sym);
2034  ci->mid = idMethodMissing;
2035  th->method_missing_reason = cc->aux.method_missing_reason = ci_missing_reason(ci);
2036  }
2037  else {
2038  /* shift arguments */
2039  if (i > 0) {
2040  MEMMOVE(&TOPN(i), &TOPN(i-1), VALUE, i);
2041  }
2042  calling->argc -= 1;
2043  DEC_SP(1);
2044  }
2045 
2048  return vm_call_method(th, reg_cfp, calling, ci, cc);
2049 }
2050 
2051 static VALUE
2052 vm_call_opt_call(rb_thread_t *th, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
2053 {
2054  rb_proc_t *proc;
2055  int argc;
2056  VALUE *argv;
2057 
2058  CALLER_SETUP_ARG(cfp, calling, ci);
2059 
2060  argc = calling->argc;
2061  argv = ALLOCA_N(VALUE, argc);
2062  GetProcPtr(calling->recv, proc);
2063  MEMCPY(argv, cfp->sp - argc, VALUE, argc);
2064  cfp->sp -= argc + 1;
2065 
2066  return rb_vm_invoke_proc(th, proc, argc, argv, calling->block_handler);
2067 }
2068 
2069 static VALUE
2070 vm_call_method_missing(rb_thread_t *th, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_call_info *orig_ci, struct rb_call_cache *orig_cc)
2071 {
2072  VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
2073  struct rb_call_info ci_entry;
2074  const struct rb_call_info *ci;
2075  struct rb_call_cache cc_entry, *cc;
2076  unsigned int argc;
2077 
2078  CALLER_SETUP_ARG(reg_cfp, calling, orig_ci);
2079  argc = calling->argc+1;
2080 
2081  ci_entry.flag = VM_CALL_FCALL | VM_CALL_OPT_SEND;
2082  ci_entry.mid = idMethodMissing;
2083  ci_entry.orig_argc = argc;
2084  ci = &ci_entry;
2085 
2086  cc_entry = *orig_cc;
2087  cc_entry.me =
2089  idMethodMissing, NULL);
2090  cc = &cc_entry;
2091 
2092  calling->argc = argc;
2093 
2094  /* shift arguments: m(a, b, c) #=> method_missing(:m, a, b, c) */
2095  CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
2096  if (argc > 1) {
2097  MEMMOVE(argv+1, argv, VALUE, argc-1);
2098  }
2099  argv[0] = ID2SYM(orig_ci->mid);
2100  INC_SP(1);
2101 
2103  return vm_call_method(th, reg_cfp, calling, ci, cc);
2104 }
2105 
2106 static const rb_callable_method_entry_t *refined_method_callable_without_refinement(const rb_callable_method_entry_t *me);
2107 static VALUE
2108 vm_call_zsuper(rb_thread_t *th, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc, VALUE klass)
2109 {
2110  klass = RCLASS_SUPER(klass);
2111  cc->me = klass ? rb_callable_method_entry(klass, ci->mid) : NULL;
2112 
2113  if (!cc->me) {
2114  return vm_call_method_nome(th, cfp, calling, ci, cc);
2115  }
2116  if (cc->me->def->type == VM_METHOD_TYPE_REFINED &&
2117  cc->me->def->body.refined.orig_me) {
2118  cc->me = refined_method_callable_without_refinement(cc->me);
2119  }
2120  return vm_call_method_each_type(th, cfp, calling, ci, cc);
2121 }
2122 
2123 static inline VALUE
2124 find_refinement(VALUE refinements, VALUE klass)
2125 {
2126  if (NIL_P(refinements)) {
2127  return Qnil;
2128  }
2129  return rb_hash_lookup(refinements, klass);
2130 }
2131 
2132 PUREFUNC(static rb_control_frame_t * current_method_entry(rb_thread_t *th, rb_control_frame_t *cfp));
2133 static rb_control_frame_t *
2134 current_method_entry(rb_thread_t *th, rb_control_frame_t *cfp)
2135 {
2136  rb_control_frame_t *top_cfp = cfp;
2137 
2138  if (cfp->iseq && cfp->iseq->body->type == ISEQ_TYPE_BLOCK) {
2139  const rb_iseq_t *local_iseq = cfp->iseq->body->local_iseq;
2140 
2141  do {
2142  cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
2144  /* TODO: orphan block */
2145  return top_cfp;
2146  }
2147  } while (cfp->iseq != local_iseq);
2148  }
2149  return cfp;
2150 }
2151 
2152 static VALUE
2153 find_defined_class_by_owner(VALUE current_class, VALUE target_owner)
2154 {
2155  VALUE klass = current_class;
2156 
2157  /* for prepended Module, then start from cover class */
2158  if (RB_TYPE_P(klass, T_ICLASS) && FL_TEST(klass, RICLASS_IS_ORIGIN)) klass = RBASIC_CLASS(klass);
2159 
2160  while (RTEST(klass)) {
2161  VALUE owner = RB_TYPE_P(klass, T_ICLASS) ? RBASIC_CLASS(klass) : klass;
2162  if (owner == target_owner) {
2163  return klass;
2164  }
2165  klass = RCLASS_SUPER(klass);
2166  }
2167 
2168  return current_class; /* maybe module function */
2169 }
2170 
2171 static const rb_callable_method_entry_t *
2172 aliased_callable_method_entry(const rb_callable_method_entry_t *me)
2173 {
2174  const rb_method_entry_t *orig_me = me->def->body.alias.original_me;
2175  const rb_callable_method_entry_t *cme;
2176 
2177  if (orig_me->defined_class == 0) {
2178  VALUE defined_class = find_defined_class_by_owner(me->defined_class, orig_me->owner);
2179  VM_ASSERT(RB_TYPE_P(orig_me->owner, T_MODULE));
2180  cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
2181 
2182  if (me->def->alias_count + me->def->complemented_count == 0) {
2183  RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
2184  }
2185  else {
2186  method_definition_set((rb_method_entry_t *)me,
2187  method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id),
2188  (void *)cme);
2189  }
2190  }
2191  else {
2192  cme = (const rb_callable_method_entry_t *)orig_me;
2193  }
2194 
2195  VM_ASSERT(callable_method_entry_p(cme));
2196  return cme;
2197 }
2198 
2199 static const rb_callable_method_entry_t *
2200 refined_method_callable_without_refinement(const rb_callable_method_entry_t *me)
2201 {
2202  const rb_method_entry_t *orig_me = me->def->body.refined.orig_me;
2203  const rb_callable_method_entry_t *cme;
2204 
2205  if (orig_me->defined_class == 0) {
2206  cme = NULL;
2207  rb_notimplement();
2208  }
2209  else {
2210  cme = (const rb_callable_method_entry_t *)orig_me;
2211  }
2212 
2213  VM_ASSERT(callable_method_entry_p(cme));
2214 
2215  if (UNDEFINED_METHOD_ENTRY_P(cme)) {
2216  cme = NULL;
2217  }
2218 
2219  return cme;
2220 }
2221 
2222 static VALUE
2223 vm_call_method_each_type(rb_thread_t *th, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
2224 {
2225  switch (cc->me->def->type) {
2226  case VM_METHOD_TYPE_ISEQ:
2227  CI_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
2228  return vm_call_iseq_setup(th, cfp, calling, ci, cc);
2229 
2231  case VM_METHOD_TYPE_CFUNC:
2232  CI_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
2233  return vm_call_cfunc(th, cfp, calling, ci, cc);
2234 
2236  CALLER_SETUP_ARG(cfp, calling, ci);
2237  rb_check_arity(calling->argc, 1, 1);
2238  cc->aux.index = 0;
2239  CI_SET_FASTPATH(cc, vm_call_attrset, !((ci->flag & VM_CALL_ARGS_SPLAT) || (ci->flag & VM_CALL_KWARG)));
2240  return vm_call_attrset(th, cfp, calling, ci, cc);
2241 
2242  case VM_METHOD_TYPE_IVAR:
2243  CALLER_SETUP_ARG(cfp, calling, ci);
2244  rb_check_arity(calling->argc, 0, 0);
2245  cc->aux.index = 0;
2246  CI_SET_FASTPATH(cc, vm_call_ivar, !(ci->flag & VM_CALL_ARGS_SPLAT));
2247  return vm_call_ivar(th, cfp, calling, ci, cc);
2248 
2250  cc->aux.method_missing_reason = 0;
2251  CI_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
2252  return vm_call_method_missing(th, cfp, calling, ci, cc);
2253 
2255  CI_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
2256  return vm_call_bmethod(th, cfp, calling, ci, cc);
2257 
2258  case VM_METHOD_TYPE_ALIAS:
2259  cc->me = aliased_callable_method_entry(cc->me);
2260  VM_ASSERT(cc->me != NULL);
2261  return vm_call_method_each_type(th, cfp, calling, ci, cc);
2262 
2264  switch (cc->me->def->body.optimize_type) {
2266  CI_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
2267  return vm_call_opt_send(th, cfp, calling, ci, cc);
2269  CI_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
2270  return vm_call_opt_call(th, cfp, calling, ci, cc);
2271  default:
2272  rb_bug("vm_call_method: unsupported optimized method type (%d)",
2273  cc->me->def->body.optimize_type);
2274  }
2275 
2276  case VM_METHOD_TYPE_UNDEF:
2277  break;
2278 
2279  case VM_METHOD_TYPE_ZSUPER:
2280  return vm_call_zsuper(th, cfp, calling, ci, cc, RCLASS_ORIGIN(cc->me->owner));
2281 
2282  case VM_METHOD_TYPE_REFINED: {
2283  const rb_cref_t *cref = rb_vm_get_cref(cfp->ep);
2284  VALUE refinements = cref ? CREF_REFINEMENTS(cref) : Qnil;
2285  VALUE refinement;
2286  const rb_callable_method_entry_t *ref_me;
2287 
2288  refinement = find_refinement(refinements, cc->me->owner);
2289 
2290  if (NIL_P(refinement)) {
2291  goto no_refinement_dispatch;
2292  }
2293  ref_me = rb_callable_method_entry(refinement, ci->mid);
2294 
2295  if (ref_me) {
2296  if (cc->call == vm_call_super_method) {
2297  const rb_control_frame_t *top_cfp = current_method_entry(th, cfp);
2298  const rb_callable_method_entry_t *top_me = rb_vm_frame_method_entry(top_cfp);
2299  if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
2300  goto no_refinement_dispatch;
2301  }
2302  }
2303  cc->me = ref_me;
2304  if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
2305  return vm_call_method(th, cfp, calling, ci, cc);
2306  }
2307  }
2308  else {
2309  cc->me = NULL;
2310  return vm_call_method_nome(th, cfp, calling, ci, cc);
2311  }
2312 
2313  no_refinement_dispatch:
2314  if (cc->me->def->body.refined.orig_me) {
2315  cc->me = refined_method_callable_without_refinement(cc->me);
2316  }
2317  else {
2318  VALUE klass = RCLASS_SUPER(cc->me->owner);
2319  cc->me = klass ? rb_callable_method_entry(klass, ci->mid) : NULL;
2320  }
2321  return vm_call_method(th, cfp, calling, ci, cc);
2322  }
2323  }
2324 
2325  rb_bug("vm_call_method: unsupported method type (%d)", cc->me->def->type);
2326 }
2327 
2328 static VALUE
2329 vm_call_method_nome(rb_thread_t *th, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
2330 {
2331  /* method missing */
2332  const int stat = ci_missing_reason(ci);
2333 
2334  if (ci->mid == idMethodMissing) {
2335  rb_control_frame_t *reg_cfp = cfp;
2336  VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
2337  rb_raise_method_missing(th, calling->argc, argv, calling->recv, stat);
2338  }
2339  else {
2341  CI_SET_FASTPATH(cc, vm_call_method_missing, 1);
2342  return vm_call_method_missing(th, cfp, calling, ci, cc);
2343  }
2344 }
2345 
2346 static inline VALUE
2347 vm_call_method(rb_thread_t *th, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
2348 {
2349  VM_ASSERT(callable_method_entry_p(cc->me));
2350 
2351  if (cc->me != NULL) {
2352  switch (METHOD_ENTRY_VISI(cc->me)) {
2353  case METHOD_VISI_PUBLIC: /* likely */
2354  return vm_call_method_each_type(th, cfp, calling, ci, cc);
2355 
2356  case METHOD_VISI_PRIVATE:
2357  if (!(ci->flag & VM_CALL_FCALL)) {
2359  if (ci->flag & VM_CALL_VCALL) stat |= MISSING_VCALL;
2360 
2362  CI_SET_FASTPATH(cc, vm_call_method_missing, 1);
2363  return vm_call_method_missing(th, cfp, calling, ci, cc);
2364  }
2365  return vm_call_method_each_type(th, cfp, calling, ci, cc);
2366 
2367  case METHOD_VISI_PROTECTED:
2368  if (!(ci->flag & VM_CALL_OPT_SEND)) {
2369  if (!rb_obj_is_kind_of(cfp->self, cc->me->defined_class)) {
2371  return vm_call_method_missing(th, cfp, calling, ci, cc);
2372  }
2373  else {
2374  /* caching method info to dummy cc */
2375  struct rb_call_cache cc_entry;
2376  cc_entry = *cc;
2377  cc = &cc_entry;
2378 
2379  VM_ASSERT(cc->me != NULL);
2380  return vm_call_method_each_type(th, cfp, calling, ci, cc);
2381  }
2382  }
2383  return vm_call_method_each_type(th, cfp, calling, ci, cc);
2384 
2385  default:
2386  rb_bug("unreachable");
2387  }
2388  }
2389  else {
2390  return vm_call_method_nome(th, cfp, calling, ci, cc);
2391  }
2392 }
2393 
2394 static VALUE
2395 vm_call_general(rb_thread_t *th, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
2396 {
2397  return vm_call_method(th, reg_cfp, calling, ci, cc);
2398 }
2399 
2400 static VALUE
2401 vm_call_super_method(rb_thread_t *th, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
2402 {
2403  /* this check is required to distinguish with other functions. */
2404  if (cc->call != vm_call_super_method) rb_bug("bug");
2405  return vm_call_method(th, reg_cfp, calling, ci, cc);
2406 }
2407 
2408 /* super */
2409 
2410 static inline VALUE
2411 vm_search_normal_superclass(VALUE klass)
2412 {
2413  if (BUILTIN_TYPE(klass) == T_ICLASS &&
2414  FL_TEST(RBASIC(klass)->klass, RMODULE_IS_REFINEMENT)) {
2415  klass = RBASIC(klass)->klass;
2416  }
2417  klass = RCLASS_ORIGIN(klass);
2418  return RCLASS_SUPER(klass);
2419 }
2420 
2421 static void
2422 vm_super_outside(void)
2423 {
2424  rb_raise(rb_eNoMethodError, "super called outside of method");
2425 }
2426 
2427 static void
2428 vm_search_super_method(rb_thread_t *th, rb_control_frame_t *reg_cfp,
2429  struct rb_calling_info *calling, struct rb_call_info *ci, struct rb_call_cache *cc)
2430 {
2431  VALUE current_defined_class, klass;
2432  VALUE sigval = TOPN(calling->argc);
2434 
2435  if (!me) {
2436  vm_super_outside();
2437  }
2438 
2439  current_defined_class = me->defined_class;
2440 
2441  if (!NIL_P(RCLASS_REFINED_CLASS(current_defined_class))) {
2442  current_defined_class = RCLASS_REFINED_CLASS(current_defined_class);
2443  }
2444 
2445  if (BUILTIN_TYPE(current_defined_class) != T_MODULE &&
2446  BUILTIN_TYPE(current_defined_class) != T_ICLASS && /* bound UnboundMethod */
2447  !FL_TEST(current_defined_class, RMODULE_INCLUDED_INTO_REFINEMENT) &&
2448  !rb_obj_is_kind_of(calling->recv, current_defined_class)) {
2449  VALUE m = RB_TYPE_P(current_defined_class, T_ICLASS) ?
2450  RBASIC(current_defined_class)->klass : current_defined_class;
2451 
2453  "self has wrong type to call super in this context: "
2454  "%"PRIsVALUE" (expected %"PRIsVALUE")",
2455  rb_obj_class(calling->recv), m);
2456  }
2457 
2458  if (me->def->type == VM_METHOD_TYPE_BMETHOD && !sigval) {
2460  "implicit argument passing of super from method defined"
2461  " by define_method() is not supported."
2462  " Specify all arguments explicitly.");
2463  }
2464 
2465  ci->mid = me->def->original_id;
2466  klass = vm_search_normal_superclass(me->defined_class);
2467 
2468  if (!klass) {
2469  /* bound instance method of module */
2471  CI_SET_FASTPATH(cc, vm_call_method_missing, 1);
2472  }
2473  else {
2474  /* TODO: use inline cache */
2475  cc->me = rb_callable_method_entry(klass, ci->mid);
2476  CI_SET_FASTPATH(cc, vm_call_super_method, 1);
2477  }
2478 }
2479 
2480 /* yield */
2481 
2482 static inline int
2483 block_proc_is_lambda(const VALUE procval)
2484 {
2485  rb_proc_t *proc;
2486 
2487  if (procval) {
2488  GetProcPtr(procval, proc);
2489  return proc->is_lambda;
2490  }
2491  else {
2492  return 0;
2493  }
2494 }
2495 
2496 static VALUE
2497 vm_block_handler_to_proc(rb_thread_t *th, VALUE block_handler)
2498 {
2499  VALUE blockarg = Qnil;
2500 
2501  if (block_handler != VM_BLOCK_HANDLER_NONE) {
2502  switch (vm_block_handler_type(block_handler)) {
2504  blockarg = block_handler;
2505  break;
2507  blockarg = rb_sym_to_proc(block_handler);
2508  break;
2511  blockarg = rb_vm_make_proc(th, VM_BH_TO_CAPT_BLOCK(block_handler), rb_cProc);
2512  break;
2513  }
2514  }
2515 
2516  return blockarg;
2517 }
2518 
2519 static VALUE
2520 vm_yield_with_cfunc(rb_thread_t *th,
2521  const struct rb_captured_block *captured,
2522  VALUE self, int argc, const VALUE *argv, VALUE block_handler)
2523 {
2524  int is_lambda = FALSE; /* TODO */
2525  VALUE val, arg, blockarg;
2526  const struct vm_ifunc *ifunc = captured->code.ifunc;
2528  th->passed_bmethod_me = NULL;
2529 
2530  if (is_lambda) {
2531  arg = rb_ary_new4(argc, argv);
2532  }
2533  else if (argc == 0) {
2534  arg = Qnil;
2535  }
2536  else {
2537  arg = argv[0];
2538  }
2539 
2540  blockarg = vm_block_handler_to_proc(th, block_handler);
2541 
2542  vm_push_frame(th, (const rb_iseq_t *)captured->code.ifunc,
2544  self,
2545  VM_GUARDED_PREV_EP(captured->ep),
2546  (VALUE)me,
2547  0, th->ec.cfp->sp, 0, 0);
2548  val = (*ifunc->func)(arg, ifunc->data, argc, argv, blockarg);
2549  rb_vm_pop_frame(th);
2550 
2551  return val;
2552 }
2553 
2554 static VALUE
2555 vm_yield_with_symbol(rb_thread_t *th, VALUE symbol, int argc, const VALUE *argv, VALUE block_handler)
2556 {
2557  return rb_sym_proc_call(SYM2ID(symbol), argc, argv, vm_block_handler_to_proc(th, block_handler));
2558 }
2559 
2560 static inline int
2561 vm_callee_setup_block_arg_arg0_splat(rb_control_frame_t *cfp, const rb_iseq_t *iseq, VALUE *argv, VALUE ary)
2562 {
2563  int i;
2564  long len = RARRAY_LEN(ary);
2565 
2567 
2568  for (i=0; i<len && i<iseq->body->param.lead_num; i++) {
2569  argv[i] = RARRAY_AREF(ary, i);
2570  }
2571 
2572  return i;
2573 }
2574 
2575 static inline VALUE
2576 vm_callee_setup_block_arg_arg0_check(VALUE *argv)
2577 {
2578  VALUE ary, arg0 = argv[0];
2579  ary = rb_check_array_type(arg0);
2580 #if 0
2581  argv[0] = arg0;
2582 #else
2583  VM_ASSERT(argv[0] == arg0);
2584 #endif
2585  return ary;
2586 }
2587 
2588 static int
2589 vm_callee_setup_block_arg(rb_thread_t *th, struct rb_calling_info *calling, const struct rb_call_info *ci, const rb_iseq_t *iseq, VALUE *argv, const enum arg_setup_type arg_setup_type)
2590 {
2591  if (simple_iseq_p(iseq)) {
2592  rb_control_frame_t *cfp = th->ec.cfp;
2593  VALUE arg0;
2594 
2595  CALLER_SETUP_ARG(cfp, calling, ci); /* splat arg */
2596 
2597  if (arg_setup_type == arg_setup_block &&
2598  calling->argc == 1 &&
2599  iseq->body->param.flags.has_lead &&
2600  !iseq->body->param.flags.ambiguous_param0 &&
2601  !NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
2602  calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
2603  }
2604 
2605  if (calling->argc != iseq->body->param.lead_num) {
2606  if (arg_setup_type == arg_setup_block) {
2607  if (calling->argc < iseq->body->param.lead_num) {
2608  int i;
2610  for (i=calling->argc; i<iseq->body->param.lead_num; i++) argv[i] = Qnil;
2611  calling->argc = iseq->body->param.lead_num; /* fill rest parameters */
2612  }
2613  else if (calling->argc > iseq->body->param.lead_num) {
2614  calling->argc = iseq->body->param.lead_num; /* simply truncate arguments */
2615  }
2616  }
2617  else {
2618  argument_arity_error(th, iseq, calling->argc, iseq->body->param.lead_num, iseq->body->param.lead_num);
2619  }
2620  }
2621 
2622  return 0;
2623  }
2624  else {
2625  return setup_parameters_complex(th, iseq, calling, ci, argv, arg_setup_type);
2626  }
2627 }
2628 
2629 static int
2630 vm_yield_setup_args(rb_thread_t *th, const rb_iseq_t *iseq, const int argc, VALUE *argv, VALUE block_handler, enum arg_setup_type arg_setup_type)
2631 {
2632  struct rb_calling_info calling_entry, *calling;
2633  struct rb_call_info ci_entry, *ci;
2634 
2635  calling = &calling_entry;
2636  calling->argc = argc;
2637  calling->block_handler = block_handler;
2638 
2639  ci_entry.flag = 0;
2640  ci = &ci_entry;
2641 
2642  return vm_callee_setup_block_arg(th, calling, ci, iseq, argv, arg_setup_type);
2643 }
2644 
2645 /* ruby iseq -> ruby block */
2646 
2647 static VALUE
2648 vm_invoke_iseq_block(rb_thread_t *th, rb_control_frame_t *reg_cfp,
2649  struct rb_calling_info *calling, const struct rb_call_info *ci,
2650  int is_lambda, const struct rb_captured_block *captured)
2651 {
2652  const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
2653  const int arg_size = iseq->body->param.size;
2654  VALUE * const rsp = GET_SP() - calling->argc;
2655  int opt_pc = vm_callee_setup_block_arg(th, calling, ci, iseq, rsp, is_lambda ? arg_setup_method : arg_setup_block);
2656 
2657  SET_SP(rsp);
2658 
2659  vm_push_frame(th, iseq,
2660  VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0),
2661  captured->self,
2662  VM_GUARDED_PREV_EP(captured->ep), 0,
2663  iseq->body->iseq_encoded + opt_pc,
2664  rsp + arg_size,
2665  iseq->body->local_table_size - arg_size, iseq->body->stack_max);
2666 
2667  return Qundef;
2668 }
2669 
2670 static VALUE
2671 vm_invoke_symbol_block(rb_thread_t *th, rb_control_frame_t *reg_cfp,
2672  struct rb_calling_info *calling, const struct rb_call_info *ci,
2673  VALUE symbol)
2674 {
2675  VALUE val;
2676  int argc;
2677  CALLER_SETUP_ARG(th->ec.cfp, calling, ci);
2678  argc = calling->argc;
2679  val = vm_yield_with_symbol(th, symbol, argc, STACK_ADDR_FROM_TOP(argc), VM_BLOCK_HANDLER_NONE);
2680  POPN(argc);
2681  return val;
2682 }
2683 
2684 static VALUE
2685 vm_invoke_ifunc_block(rb_thread_t *th, rb_control_frame_t *reg_cfp,
2686  struct rb_calling_info *calling, const struct rb_call_info *ci,
2687  const struct rb_captured_block *captured)
2688 {
2689  VALUE val;
2690  int argc;
2691  CALLER_SETUP_ARG(th->ec.cfp, calling, ci);
2692  argc = calling->argc;
2693  val = vm_yield_with_cfunc(th, captured, captured->self, argc, STACK_ADDR_FROM_TOP(argc), VM_BLOCK_HANDLER_NONE);
2694  POPN(argc); /* TODO: should put before C/yield? */
2695  return val;
2696 }
2697 
2698 static VALUE
2699 vm_proc_to_block_handler(VALUE procval)
2700 {
2701  const struct rb_block *block = vm_proc_block(procval);
2702 
2703  switch (vm_block_type(block)) {
2704  case block_type_iseq:
2705  return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
2706  case block_type_ifunc:
2707  return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
2708  case block_type_symbol:
2709  return VM_BH_FROM_SYMBOL(block->as.symbol);
2710  case block_type_proc:
2711  return VM_BH_FROM_PROC(block->as.proc);
2712  }
2713  VM_UNREACHABLE(vm_yield_with_proc);
2714  return Qundef;
2715 }
2716 
2717 static VALUE
2718 vm_invoke_block(rb_thread_t *th, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_call_info *ci)
2719 {
2720  VALUE block_handler = VM_CF_BLOCK_HANDLER(reg_cfp);
2721  VALUE type = GET_ISEQ()->body->local_iseq->body->type;
2722  int is_lambda = FALSE;
2723 
2724  if ((type != ISEQ_TYPE_METHOD && type != ISEQ_TYPE_CLASS) ||
2725  block_handler == VM_BLOCK_HANDLER_NONE) {
2726  rb_vm_localjump_error("no block given (yield)", Qnil, 0);
2727  }
2728 
2729  again:
2730  switch (vm_block_handler_type(block_handler)) {
2732  {
2733  const struct rb_captured_block *captured = VM_BH_TO_ISEQ_BLOCK(block_handler);
2734  return vm_invoke_iseq_block(th, reg_cfp, calling, ci, is_lambda, captured);
2735  }
2737  {
2738  const struct rb_captured_block *captured = VM_BH_TO_IFUNC_BLOCK(block_handler);
2739  return vm_invoke_ifunc_block(th, reg_cfp, calling, ci, captured);
2740  }
2742  is_lambda = block_proc_is_lambda(VM_BH_TO_PROC(block_handler));
2743  block_handler = vm_proc_to_block_handler(VM_BH_TO_PROC(block_handler));
2744  goto again;
2746  return vm_invoke_symbol_block(th, reg_cfp, calling, ci, VM_BH_TO_SYMBOL(block_handler));
2747  }
2748  VM_UNREACHABLE(vm_invoke_block: unreachable);
2749  return Qnil;
2750 }
2751 
2752 static VALUE
2753 vm_make_proc_with_iseq(const rb_iseq_t *blockiseq)
2754 {
2755  rb_thread_t *th = GET_THREAD();
2757  struct rb_captured_block *captured;
2758 
2759  if (cfp == 0) {
2760  rb_bug("vm_make_proc_with_iseq: unreachable");
2761  }
2762 
2763  captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
2764  captured->code.iseq = blockiseq;
2765 
2766  return rb_vm_make_proc(th, captured, rb_cProc);
2767 }
2768 
2769 static VALUE
2770 vm_once_exec(VALUE iseq)
2771 {
2772  VALUE proc = vm_make_proc_with_iseq((rb_iseq_t *)iseq);
2773  return rb_proc_call_with_block(proc, 0, 0, Qnil);
2774 }
2775 
2776 static VALUE
2777 vm_once_clear(VALUE data)
2778 {
2779  union iseq_inline_storage_entry *is = (union iseq_inline_storage_entry *)data;
2780  is->once.running_thread = NULL;
2781  return Qnil;
2782 }
2783 
2786 {
2787  TOPN(0) = rb_struct_aref(GET_SELF(), TOPN(0));
2788  return reg_cfp;
2789 }
2790 
2793 {
2794  rb_struct_aset(GET_SELF(), TOPN(0), TOPN(1));
2795  return reg_cfp;
2796 }
2797 
2798 /* defined insn */
2799 
2800 static enum defined_type
2801 check_respond_to_missing(VALUE obj, VALUE v)
2802 {
2803  VALUE args[2];
2804  VALUE r;
2805 
2806  args[0] = obj; args[1] = Qfalse;
2807  r = rb_check_funcall(v, idRespond_to_missing, 2, args);
2808  if (r != Qundef && RTEST(r)) {
2809  return DEFINED_METHOD;
2810  }
2811  else {
2812  return 0;
2813  }
2814 }
2815 
2816 static VALUE
2817 vm_defined(rb_thread_t *th, rb_control_frame_t *reg_cfp, rb_num_t op_type, VALUE obj, VALUE needstr, VALUE v)
2818 {
2819  VALUE klass;
2820  enum defined_type expr_type = 0;
2821  enum defined_type type = (enum defined_type)op_type;
2822 
2823  switch (type) {
2824  case DEFINED_IVAR:
2825  if (rb_ivar_defined(GET_SELF(), SYM2ID(obj))) {
2826  expr_type = DEFINED_IVAR;
2827  }
2828  break;
2829  case DEFINED_IVAR2:
2830  klass = vm_get_cbase(GET_EP());
2831  break;
2832  case DEFINED_GVAR:
2833  if (rb_gvar_defined(rb_global_entry(SYM2ID(obj)))) {
2834  expr_type = DEFINED_GVAR;
2835  }
2836  break;
2837  case DEFINED_CVAR: {
2838  const rb_cref_t *cref = rb_vm_get_cref(GET_EP());
2839  klass = vm_get_cvar_base(cref, GET_CFP());
2840  if (rb_cvar_defined(klass, SYM2ID(obj))) {
2841  expr_type = DEFINED_CVAR;
2842  }
2843  break;
2844  }
2845  case DEFINED_CONST:
2846  klass = v;
2847  if (vm_get_ev_const(th, klass, SYM2ID(obj), 1)) {
2848  expr_type = DEFINED_CONST;
2849  }
2850  break;
2851  case DEFINED_FUNC:
2852  klass = CLASS_OF(v);
2853  if (rb_method_boundp(klass, SYM2ID(obj), 0)) {
2854  expr_type = DEFINED_METHOD;
2855  }
2856  else {
2857  expr_type = check_respond_to_missing(obj, v);
2858  }
2859  break;
2860  case DEFINED_METHOD:{
2861  VALUE klass = CLASS_OF(v);
2862  const rb_method_entry_t *me = rb_method_entry(klass, SYM2ID(obj));
2863 
2864  if (me) {
2865  switch (METHOD_ENTRY_VISI(me)) {
2866  case METHOD_VISI_PRIVATE:
2867  break;
2868  case METHOD_VISI_PROTECTED:
2869  if (!rb_obj_is_kind_of(GET_SELF(), rb_class_real(klass))) {
2870  break;
2871  }
2872  case METHOD_VISI_PUBLIC:
2873  expr_type = DEFINED_METHOD;
2874  break;
2875  default:
2876  rb_bug("vm_defined: unreachable: %u", (unsigned int)METHOD_ENTRY_VISI(me));
2877  }
2878  }
2879  else {
2880  expr_type = check_respond_to_missing(obj, v);
2881  }
2882  break;
2883  }
2884  case DEFINED_YIELD:
2886  expr_type = DEFINED_YIELD;
2887  }
2888  break;
2889  case DEFINED_ZSUPER:
2890  {
2892 
2893  if (me) {
2894  VALUE klass = vm_search_normal_superclass(me->defined_class);
2895  ID id = me->def->original_id;
2896 
2897  if (rb_method_boundp(klass, id, 0)) {
2898  expr_type = DEFINED_ZSUPER;
2899  }
2900  }
2901  }
2902  break;
2903  case DEFINED_REF:{
2904  if (vm_getspecial(th, GET_LEP(), Qfalse, FIX2INT(obj)) != Qnil) {
2905  expr_type = DEFINED_GVAR;
2906  }
2907  break;
2908  }
2909  default:
2910  rb_bug("unimplemented defined? type (VM)");
2911  break;
2912  }
2913 
2914  if (expr_type != 0) {
2915  if (needstr != Qfalse) {
2916  return rb_iseq_defined_string(expr_type);
2917  }
2918  else {
2919  return Qtrue;
2920  }
2921  }
2922  else {
2923  return Qnil;
2924  }
2925 }
2926 
2927 static const VALUE *
2928 vm_get_ep(const VALUE *const reg_ep, rb_num_t lv)
2929 {
2930  rb_num_t i;
2931  const VALUE *ep = reg_ep;
2932  for (i = 0; i < lv; i++) {
2933  ep = GET_PREV_EP(ep);
2934  }
2935  return ep;
2936 }
2937 
2938 static VALUE
2939 vm_get_special_object(const VALUE *const reg_ep,
2940  enum vm_special_object_type type)
2941 {
2942  switch (type) {
2944  return rb_mRubyVMFrozenCore;
2946  return vm_get_cbase(reg_ep);
2948  return vm_get_const_base(reg_ep);
2949  default:
2950  rb_bug("putspecialobject insn: unknown value_type %d", type);
2951  }
2952 }
2953 
2954 static void
2955 vm_freezestring(VALUE str, VALUE debug)
2956 {
2957  if (!NIL_P(debug)) {
2958  rb_ivar_set(str, id_debug_created_info, debug);
2959  }
2960  rb_str_freeze(str);
2961 }
2962 
2963 static VALUE
2964 vm_concat_array(VALUE ary1, VALUE ary2st)
2965 {
2966  const VALUE ary2 = ary2st;
2967  VALUE tmp1 = rb_check_convert_type_with_id(ary1, T_ARRAY, "Array", idTo_a);
2968  VALUE tmp2 = rb_check_convert_type_with_id(ary2, T_ARRAY, "Array", idTo_a);
2969 
2970  if (NIL_P(tmp1)) {
2971  tmp1 = rb_ary_new3(1, ary1);
2972  }
2973 
2974  if (NIL_P(tmp2)) {
2975  tmp2 = rb_ary_new3(1, ary2);
2976  }
2977 
2978  if (tmp1 == ary1) {
2979  tmp1 = rb_ary_dup(ary1);
2980  }
2981  return rb_ary_concat(tmp1, tmp2);
2982 }
2983 
2984 static VALUE
2985 vm_splat_array(VALUE flag, VALUE ary)
2986 {
2987  VALUE tmp = rb_check_convert_type_with_id(ary, T_ARRAY, "Array", idTo_a);
2988  if (NIL_P(tmp)) {
2989  return rb_ary_new3(1, ary);
2990  }
2991  else if (RTEST(flag)) {
2992  return rb_ary_dup(tmp);
2993  }
2994  else {
2995  return tmp;
2996  }
2997 }
2998 
2999 static VALUE
3000 vm_check_match(VALUE target, VALUE pattern, rb_num_t flag)
3001 {
3002  enum vm_check_match_type type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
3003 
3004  if (flag & VM_CHECKMATCH_ARRAY) {
3005  long i;
3006  const long n = RARRAY_LEN(pattern);
3007 
3008  for (i = 0; i < n; i++) {
3009  VALUE v = RARRAY_AREF(pattern, i);
3010  VALUE c = check_match(v, target, type);
3011 
3012  if (RTEST(c)) {
3013  return c;
3014  }
3015  }
3016  return Qfalse;
3017  }
3018  else {
3019  return check_match(pattern, target, type);
3020  }
3021 }
3022 
3023 static VALUE
3024 vm_check_keyword(lindex_t bits, lindex_t idx, const VALUE *ep)
3025 {
3026  const VALUE kw_bits = *(ep - bits);
3027 
3028  if (FIXNUM_P(kw_bits)) {
3029  int b = FIX2INT(kw_bits);
3030  return (b & (0x01 << idx)) ? Qfalse : Qtrue;
3031  }
3032  else {
3033  VM_ASSERT(RB_TYPE_P(kw_bits, T_HASH));
3034  return rb_hash_has_key(kw_bits, INT2FIX(idx));
3035  }
3036 }
3037 
3038 static void
3039 vm_dtrace(rb_event_flag_t flag, rb_thread_t *th)
3040 {
3045 
3046  switch (flag) {
3047  case RUBY_EVENT_CALL:
3049  return;
3050  case RUBY_EVENT_C_CALL:
3052  return;
3053  case RUBY_EVENT_RETURN:
3055  return;
3056  case RUBY_EVENT_C_RETURN:
3058  return;
3059  }
3060  }
3061 }
3062 
3063 static VALUE
3064 vm_const_get_under(ID id, rb_num_t flags, VALUE cbase)
3065 {
3066  VALUE ns;
3067 
3068  if ((ns = vm_search_const_defined_class(cbase, id)) == 0) {
3069  return ns;
3070  }
3071  else if (VM_DEFINECLASS_SCOPED_P(flags)) {
3072  return rb_public_const_get_at(ns, id);
3073  }
3074  else {
3075  return rb_const_get_at(ns, id);
3076  }
3077 }
3078 
3079 static VALUE
3080 vm_check_if_class(ID id, rb_num_t flags, VALUE super, VALUE klass)
3081 {
3082  if (!RB_TYPE_P(klass, T_CLASS)) {
3083  rb_raise(rb_eTypeError, "%"PRIsVALUE" is not a class", rb_id2str(id));
3084  }
3085  else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
3086  VALUE tmp = rb_class_real(RCLASS_SUPER(klass));
3087 
3088  if (tmp != super) {
3090  "superclass mismatch for class %"PRIsVALUE"",
3091  rb_id2str(id));
3092  }
3093  else {
3094  return klass;
3095  }
3096  }
3097  else {
3098  return klass;
3099  }
3100 }
3101 
3102 static VALUE
3103 vm_check_if_module(ID id, VALUE mod)
3104 {
3105  if (!RB_TYPE_P(mod, T_MODULE)) {
3106  rb_raise(rb_eTypeError, "%"PRIsVALUE" is not a module", rb_id2str(id));
3107  }
3108  else {
3109  return mod;
3110  }
3111 }
3112 
3113 static VALUE
3114 vm_declare_class(ID id, rb_num_t flags, VALUE cbase, VALUE super)
3115 {
3116  /* new class declaration */
3117  VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
3118  VALUE c = rb_define_class_id(id, s);
3119 
3120  rb_set_class_path_string(c, cbase, rb_id2str(id));
3121  rb_const_set(cbase, id, c);
3122  rb_class_inherited(s, c);
3123  return c;
3124 }
3125 
3126 static VALUE
3127 vm_declare_module(ID id, VALUE cbase)
3128 {
3129  /* new module declaration */
3130  VALUE mod = rb_define_module_id(id);
3131  rb_set_class_path_string(mod, cbase, rb_id2str(id));
3132  rb_const_set(cbase, id, mod);
3133  return mod;
3134 }
3135 
3136 static VALUE
3137 vm_define_class(ID id, rb_num_t flags, VALUE cbase, VALUE super)
3138 {
3139  VALUE klass;
3140 
3141  if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !RB_TYPE_P(super, T_CLASS)) {
3143  "superclass must be a Class (%"PRIsVALUE" given)",
3144  rb_obj_class(super));
3145  }
3146 
3147  vm_check_if_namespace(cbase);
3148 
3149  /* find klass */
3150  rb_autoload_load(cbase, id);
3151  if ((klass = vm_const_get_under(id, flags, cbase)) != 0) {
3152  return vm_check_if_class(id, flags, super, klass);
3153  }
3154  else {
3155  return vm_declare_class(id, flags, cbase, super);
3156  }
3157 }
3158 
3159 static VALUE
3160 vm_define_module(ID id, rb_num_t flags, VALUE cbase)
3161 {
3162  VALUE mod;
3163 
3164  vm_check_if_namespace(cbase);
3165  if ((mod = vm_const_get_under(id, flags, cbase)) != 0) {
3166  return vm_check_if_module(id, mod);
3167  }
3168  else {
3169  return vm_declare_module(id, cbase);
3170  }
3171 }
3172 
3173 static VALUE
3174 vm_find_or_create_class_by_id(ID id,
3175  rb_num_t flags,
3176  VALUE cbase,
3177  VALUE super)
3178 {
3180 
3181  switch (type) {
3183  /* classdef returns class scope value */
3184  return vm_define_class(id, flags, cbase, super);
3185 
3187  /* classdef returns class scope value */
3188  return rb_singleton_class(cbase);
3189 
3191  /* classdef returns class scope value */
3192  return vm_define_module(id, flags, cbase);
3193 
3194  default:
3195  rb_bug("unknown defineclass type: %d", (int)type);
3196  }
3197 }
3198 
3199 /* this macro is mandatory to use OPTIMIZED_CMP. What a design! */
3200 #define id_cmp idCmp
3201 
3202 static VALUE
3203 vm_opt_newarray_max(rb_num_t num, const VALUE *ptr)
3204 {
3206  if (num == 0) {
3207  return Qnil;
3208  }
3209  else {
3210  struct cmp_opt_data cmp_opt = { 0, 0 };
3211  VALUE result = Qundef;
3212  rb_num_t i = num - 1;
3213  result = ptr[i];
3214  while (i-- > 0) {
3215  const VALUE v = ptr[i];
3216  if (result == Qundef || OPTIMIZED_CMP(v, result, cmp_opt) > 0) {
3217  result = v;
3218  }
3219  }
3220  return result == Qundef ? Qnil : result;
3221  }
3222  }
3223  else {
3224  VALUE ary = rb_ary_new4(num, ptr);
3225  return rb_funcall(ary, idMax, 0);
3226  }
3227 }
3228 
3229 static VALUE
3230 vm_opt_newarray_min(rb_num_t num, const VALUE *ptr)
3231 {
3233  if (num == 0) {
3234  return Qnil;
3235  }
3236  else {
3237  struct cmp_opt_data cmp_opt = { 0, 0 };
3238  VALUE result = Qundef;
3239  rb_num_t i = num - 1;
3240  result = ptr[i];
3241  while (i-- > 0) {
3242  const VALUE v = ptr[i];
3243  if (result == Qundef || OPTIMIZED_CMP(v, result, cmp_opt) < 0) {
3244  result = v;
3245  }
3246  }
3247  return result == Qundef ? Qnil : result;
3248  }
3249  }
3250  else {
3251  VALUE ary = rb_ary_new4(num, ptr);
3252  return rb_funcall(ary, idMin, 0);
3253  }
3254 }
3255 
3256 #undef id_cmp
3257 
3258 static VALUE
3259 vm_ic_hit_p(IC ic, const VALUE *reg_ep)
3260 {
3261  if (ic->ic_serial == GET_GLOBAL_CONSTANT_STATE() &&
3262  (ic->ic_cref == NULL || ic->ic_cref == rb_vm_get_cref(reg_ep))) {
3263  return ic->ic_value.value;
3264  }
3265  else {
3266  return Qnil;
3267  }
3268 }
3269 
3270 static void
3271 vm_ic_update(IC ic, VALUE val, const VALUE *reg_ep)
3272 {
3273  VM_ASSERT(ic->ic_value.value != Qundef);
3274  ic->ic_value.value = val;
3276  ic->ic_cref = vm_get_const_key_cref(reg_ep);
3278 }
3279 
3280 static VALUE
3281 vm_once_dispatch(ISEQ iseq, IC ic, rb_thread_t *th)
3282 {
3283  rb_thread_t *const RUNNING_THREAD_ONCE_DONE = (rb_thread_t *)(0x1);
3284  union iseq_inline_storage_entry *const is = (union iseq_inline_storage_entry *)ic;
3285 
3286  again:
3287  if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
3288  return is->once.value;
3289  }
3290  else if (is->once.running_thread == NULL) {
3291  VALUE val;
3292  is->once.running_thread = th;
3293  val = is->once.value = rb_ensure(vm_once_exec, (VALUE)iseq, vm_once_clear, (VALUE)is);
3294  /* is->once.running_thread is cleared by vm_once_clear() */
3295  is->once.running_thread = RUNNING_THREAD_ONCE_DONE; /* success */
3296  rb_iseq_add_mark_object(th->ec.cfp->iseq, val);
3297  return val;
3298  }
3299  else if (is->once.running_thread == th) {
3300  /* recursive once */
3301  return vm_once_exec((VALUE)iseq);
3302  }
3303  else {
3304  /* waiting for finish */
3305  RUBY_VM_CHECK_INTS(th);
3307  goto again;
3308  }
3309 }
3310 
3311 static OFFSET
3312 vm_case_dispatch(CDHASH hash, OFFSET else_offset, VALUE key)
3313 {
3314  switch (OBJ_BUILTIN_TYPE(key)) {
3315  case -1:
3316  case T_FLOAT:
3317  case T_SYMBOL:
3318  case T_BIGNUM:
3319  case T_STRING:
3328  st_data_t val;
3329  if (RB_FLOAT_TYPE_P(key)) {
3330  double kval = RFLOAT_VALUE(key);
3331  if (!isinf(kval) && modf(kval, &kval) == 0.0) {
3332  key = FIXABLE(kval) ? LONG2FIX((long)kval) : rb_dbl2big(kval);
3333  }
3334  }
3335  if (st_lookup(RHASH_TBL_RAW(hash), key, &val)) {
3336  return FIX2INT((VALUE)val);
3337  }
3338  else {
3339  return else_offset;
3340  }
3341  }
3342  }
3343  return 0;
3344 }
3345 
3346 NORETURN(static void
3347  vm_stack_consistency_error(rb_thread_t *,
3348  const rb_control_frame_t *,
3349  const VALUE *));
3350 static void
3351 vm_stack_consistency_error(rb_thread_t *th,
3352  const rb_control_frame_t *cfp,
3353  const VALUE *bp)
3354 {
3355  const ptrdiff_t nsp = VM_SP_CNT(th, cfp->sp);
3356  const ptrdiff_t nbp = VM_SP_CNT(th, bp);
3357  static const char stack_consistency_error[] =
3358  "Stack consistency error (sp: %"PRIdPTRDIFF", bp: %"PRIdPTRDIFF")";
3359 #if defined RUBY_DEVEL
3360  VALUE mesg = rb_sprintf(stack_consistency_error, nsp, nbp);
3361  rb_str_cat_cstr(mesg, "\n");
3362  rb_str_append(mesg, rb_iseq_disasm(cfp->iseq));
3364 #else
3365  rb_bug(stack_consistency_error, nsp, nbp);
3366 #endif
3367 }
3368 
3374 };
3375 
3376 static enum binop_operands_type
3377 vm_opt_binop_dispatch(VALUE recv, VALUE obj, enum ruby_basic_operators BOP)
3378 {
3379  if (FIXNUM_2_P(recv, obj) &&
3381  return bot_fixnum;
3382  }
3383  else if (FLONUM_2_P(recv, obj) &&
3385  return bot_flonum;
3386  }
3387  else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
3388  return bot_others;
3389  }
3390  else if (RBASIC_CLASS(recv) == rb_cFloat &&
3391  RBASIC_CLASS(obj) == rb_cFloat &&
3393  return bot_float;
3394  }
3395  else {
3396  return bot_others;
3397  }
3398 }
3399 
3400 static VALUE
3401 vm_opt_plus(VALUE recv, VALUE obj)
3402 {
3403  switch (vm_opt_binop_dispatch(recv, obj, BOP_PLUS)) {
3404  case bot_float:
3405  case bot_flonum: return DBL2NUM(RFLOAT_VALUE(recv) + RFLOAT_VALUE(obj));
3406  case bot_fixnum: return rb_fix_plus_fix(recv, obj);
3407  default:
3408  if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
3409  return Qundef;
3410  }
3411  else if (RBASIC_CLASS(recv) == rb_cString &&
3412  RBASIC_CLASS(obj) == rb_cString &&
3414  return rb_str_plus(recv, obj);
3415  }
3416  else if (RBASIC_CLASS(recv) == rb_cArray &&
3418  return rb_ary_plus(recv, obj);
3419  }
3420  else {
3421  return Qundef;
3422  }
3423  }
3424 }
3425 
3426 static VALUE
3427 vm_opt_minus(VALUE recv, VALUE obj)
3428 {
3429  switch (vm_opt_binop_dispatch(recv, obj, BOP_MINUS)) {
3430  case bot_float:
3431  case bot_flonum: return DBL2NUM(RFLOAT_VALUE(recv) - RFLOAT_VALUE(obj));
3432  case bot_fixnum: return rb_fix_minus_fix(recv, obj);
3433  default: return Qundef;
3434  }
3435 }
3436 
3437 static VALUE
3438 vm_opt_mult(VALUE recv, VALUE obj)
3439 {
3440  switch (vm_opt_binop_dispatch(recv, obj, BOP_MULT)) {
3441  case bot_float:
3442  case bot_flonum: return DBL2NUM(RFLOAT_VALUE(recv) * RFLOAT_VALUE(obj));
3443  case bot_fixnum: return rb_fix_mul_fix(recv, obj);
3444  default: return Qundef;
3445  }
3446 }
3447 
3448 static VALUE
3449 vm_opt_div(VALUE recv, VALUE obj)
3450 {
3451  switch (vm_opt_binop_dispatch(recv, obj, BOP_DIV)) {
3452  case bot_float:
3453  case bot_flonum:
3454  return DBL2NUM(RFLOAT_VALUE(recv) / RFLOAT_VALUE(obj));
3455  case bot_fixnum:
3456  return (FIX2LONG(obj) == 0) ? Qundef : rb_fix_div_fix(recv, obj);
3457  default:
3458  return Qundef;
3459  }
3460 }
3461 
3462 static VALUE
3463 vm_opt_mod(VALUE recv, VALUE obj)
3464 {
3465  switch (vm_opt_binop_dispatch(recv, obj, BOP_MOD)) {
3466  case bot_float:
3467  case bot_flonum:
3468  return DBL2NUM(ruby_float_mod(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj)));
3469  case bot_fixnum:
3470  return (FIX2LONG(obj) == 0) ? Qundef : rb_fix_mod_fix(recv, obj);
3471  default:
3472  return Qundef;
3473  }
3474 }
3475 
3476 static VALUE
3477 vm_opt_neq(CALL_INFO ci, CALL_CACHE cc,
3478  CALL_INFO ci_eq, CALL_CACHE cc_eq,
3479  VALUE recv, VALUE obj)
3480 {
3481  if (vm_method_cfunc_is(ci, cc, recv, rb_obj_not_equal)) {
3482  VALUE val = opt_eq_func(recv, obj, ci_eq, cc_eq);
3483 
3484  if (val != Qundef) {
3485  return RTEST(val) ? Qfalse : Qtrue;
3486  }
3487  }
3488 
3489  return Qundef;
3490 }
3491 
3492 static VALUE
3493 vm_opt_lt(VALUE recv, VALUE obj)
3494 {
3495  switch (vm_opt_binop_dispatch(recv, obj, BOP_LT)) {
3496  case bot_float:
3498  /* FALLTHROUGH */
3499  case bot_flonum:
3500  /* flonum is not NaN */
3501  return RFLOAT_VALUE(recv) < RFLOAT_VALUE(obj) ? Qtrue : Qfalse;
3502  case bot_fixnum:
3503  return (SIGNED_VALUE)recv < (SIGNED_VALUE)obj ? Qtrue : Qfalse;
3504  default:
3505  return Qundef;
3506  }
3507 }
3508 
3509 static VALUE
3510 vm_opt_le(VALUE recv, VALUE obj)
3511 {
3512  switch (vm_opt_binop_dispatch(recv, obj, BOP_LT)) {
3513  case bot_float:
3515  /* FALLTHROUGH */
3516  case bot_flonum:
3517  /* flonum is not NaN */
3518  return RFLOAT_VALUE(recv) <= RFLOAT_VALUE(obj) ? Qtrue : Qfalse;
3519  case bot_fixnum:
3520  return (SIGNED_VALUE)recv <= (SIGNED_VALUE)obj ? Qtrue : Qfalse;
3521  default:
3522  return Qundef;
3523  }
3524 }
3525 
3526 static VALUE
3527 vm_opt_gt(VALUE recv, VALUE obj)
3528 {
3529  switch (vm_opt_binop_dispatch(recv, obj, BOP_LT)) {
3530  case bot_float:
3532  /* FALLTHROUGH */
3533  case bot_flonum:
3534  /* flonum is not NaN */
3535  return RFLOAT_VALUE(recv) > RFLOAT_VALUE(obj) ? Qtrue : Qfalse;
3536  case bot_fixnum:
3537  return (SIGNED_VALUE)recv > (SIGNED_VALUE)obj ? Qtrue : Qfalse;
3538  default:
3539  return Qundef;
3540  }
3541 }
3542 
3543 static VALUE
3544 vm_opt_ge(VALUE recv, VALUE obj)
3545 {
3546  switch (vm_opt_binop_dispatch(recv, obj, BOP_LT)) {
3547  case bot_float:
3549  /* FALLTHROUGH */
3550  case bot_flonum:
3551  /* flonum is not NaN */
3552  return RFLOAT_VALUE(recv) >= RFLOAT_VALUE(obj) ? Qtrue : Qfalse;
3553  case bot_fixnum:
3554  return (SIGNED_VALUE)recv >= (SIGNED_VALUE)obj ? Qtrue : Qfalse;
3555  default:
3556  return Qundef;
3557  }
3558 }
3559 
3560 
3561 static VALUE
3562 vm_opt_ltlt(VALUE recv, VALUE obj)
3563 {
3564  if (SPECIAL_CONST_P(recv)) {
3565  return Qundef;
3566  }
3567  else if (RBASIC_CLASS(recv) == rb_cString &&
3569  return rb_str_concat(recv, obj);
3570  }
3571  else if (RBASIC_CLASS(recv) == rb_cArray &&
3573  return rb_ary_push(recv, obj);
3574  }
3575  else {
3576  return Qundef;
3577  }
3578 }
3579 
3580 static VALUE
3581 vm_opt_aref(VALUE recv, VALUE obj)
3582 {
3583  if (SPECIAL_CONST_P(recv)) {
3584  return Qundef;
3585  }
3586  else if (RBASIC_CLASS(recv) == rb_cArray &&
3588  FIXNUM_P(obj)) {
3589  return rb_ary_entry(recv, FIX2LONG(obj));
3590  }
3591  else if (RBASIC_CLASS(recv) == rb_cHash &&
3593  return rb_hash_aref(recv, obj);
3594  }
3595  else {
3596  return Qundef;
3597  }
3598 }
3599 
3600 static VALUE
3601 vm_opt_aset(VALUE recv, VALUE obj, VALUE set)
3602 {
3603  if (SPECIAL_CONST_P(recv)) {
3604  return Qundef;
3605  }
3606  else if (RBASIC_CLASS(recv) == rb_cArray &&
3608  FIXNUM_P(obj)) {
3609  rb_ary_store(recv, FIX2LONG(obj), set);
3610  return set;
3611  }
3612  else if (RBASIC_CLASS(recv) == rb_cHash &&
3614  rb_hash_aset(recv, obj, set);
3615  return set;
3616  }
3617  else {
3618  return Qundef;
3619  }
3620 }
3621 
3622 static VALUE
3623 vm_opt_aref_with(VALUE recv, VALUE key)
3624 {
3625  if (!SPECIAL_CONST_P(recv) && RBASIC_CLASS(recv) == rb_cHash &&
3627  rb_hash_compare_by_id_p(recv) == Qfalse) {
3628  return rb_hash_aref(recv, key);
3629  }
3630  else {
3631  return Qundef;
3632  }
3633 }
3634 
3635 static VALUE
3636 vm_opt_aset_with(VALUE recv, VALUE key, VALUE val)
3637 {
3638  if (!SPECIAL_CONST_P(recv) && RBASIC_CLASS(recv) == rb_cHash &&
3640  rb_hash_compare_by_id_p(recv) == Qfalse) {
3641  return rb_hash_aset(recv, key, val);
3642  }
3643  else {
3644  return Qundef;
3645  }
3646 }
3647 
3648 static VALUE
3649 vm_opt_length(VALUE recv, int bop)
3650 {
3651  if (SPECIAL_CONST_P(recv)) {
3652  return Qundef;
3653  }
3654  else if (RBASIC_CLASS(recv) == rb_cString &&
3656  if (bop == BOP_EMPTY_P) {
3657  return LONG2NUM(RSTRING_LEN(recv));
3658  }
3659  else {
3660  return rb_str_length(recv);
3661  }
3662  }
3663  else if (RBASIC_CLASS(recv) == rb_cArray &&
3665  return LONG2NUM(RARRAY_LEN(recv));
3666  }
3667  else if (RBASIC_CLASS(recv) == rb_cHash &&
3669  return INT2FIX(RHASH_SIZE(recv));
3670  }
3671  else {
3672  return Qundef;
3673  }
3674 }
3675 
3676 static VALUE
3677 vm_opt_empty_p(VALUE recv)
3678 {
3679  switch (vm_opt_length(recv, BOP_EMPTY_P)) {
3680  case Qundef: return Qundef;
3681  case INT2FIX(0): return Qtrue;
3682  default: return Qfalse;
3683  }
3684 }
3685 
3686 static VALUE
3687 vm_opt_succ(VALUE recv)
3688 {
3689  if (FIXNUM_P(recv) &&
3691  /* fixnum + INT2FIX(1) */
3692  if (recv == LONG2FIX(FIXNUM_MAX)) {
3693  return LONG2NUM(FIXNUM_MAX + 1);
3694  }
3695  else {
3696  return recv - 1 + INT2FIX(1);
3697  }
3698  }
3699  else if (SPECIAL_CONST_P(recv)) {
3700  return Qundef;
3701  }
3702  else if (RBASIC_CLASS(recv) == rb_cString &&
3704  return rb_str_succ(recv);
3705  }
3706  else {
3707  return Qundef;
3708  }
3709 }
3710 
3711 static VALUE
3712 vm_opt_not(CALL_INFO ci, CALL_CACHE cc, VALUE recv)
3713 {
3714  if (vm_method_cfunc_is(ci, cc, recv, rb_obj_not)) {
3715  return RTEST(recv) ? Qfalse : Qtrue;
3716  }
3717  else {
3718  return Qundef;
3719  }
3720 }
3721 
3722 static VALUE
3723 vm_opt_regexpmatch1(VALUE recv, VALUE obj)
3724 {
3726  return rb_reg_match(recv, obj);
3727  }
3728  else {
3729  return rb_funcall(recv, idEqTilde, 1, obj);
3730  }
3731 }
3732 
3733 static VALUE
3734 vm_opt_regexpmatch2(VALUE recv, VALUE obj)
3735 {
3736  if (CLASS_OF(recv) == rb_cString &&
3738  return rb_reg_match(obj, recv);
3739  }
3740  else {
3741  return Qundef;
3742  }
3743 }
RUBY_EXTERN VALUE rb_cString
Definition: ruby.h:1927
double ruby_float_mod(double x, double y)
Definition: numeric.c:1202
unsigned int stack_max
Definition: vm_core.h:415
#define T_SYMBOL
Definition: ruby.h:508
#define T_OBJECT
Definition: ruby.h:491
rb_control_frame_t * rb_vm_get_ruby_level_next_cfp(const rb_thread_t *th, const rb_control_frame_t *cfp)
Definition: vm.c:498
void rb_thread_schedule(void)
Definition: thread.c:1266
wrapper for method_missing(id)
Definition: method.h:112
#define UNDEFINED_METHOD_ENTRY_P(me)
Definition: method.h:175
ID rb_check_id(volatile VALUE *)
Returns ID for the given name if it is interned already, or 0.
Definition: symbol.c:915
#define RUBY_VM_CHECK_INTS(th)
Definition: vm_core.h:1627
VALUE rb_reg_match_last(VALUE)
Definition: re.c:1768
const VALUE * ep
Definition: vm_core.h:667
rb_vm_t * vm
Definition: vm_core.h:788
const VALUE * root_lep
Definition: vm_core.h:760
const rb_iseq_t * iseq
Definition: iseq.h:170
void rb_warn(const char *fmt,...)
Definition: error.c:246
void rb_bug(const char *fmt,...)
Definition: error.c:521
binop_operands_type
VALUE rb_str_length(VALUE)
Definition: string.c:1803
VALUE rb_ary_entry(VALUE ary, long offset)
Definition: array.c:1215
RUBY_EXTERN VALUE rb_cFloat
Definition: ruby.h:1910
#define RARRAY_LEN(a)
Definition: ruby.h:1019
#define RUBY_EVENT_C_RETURN
Definition: ruby.h:2086
VALUE rb_proc_call_with_block(VALUE, int argc, const VALUE *argv, VALUE)
Definition: proc.c:892
#define FALSE
Definition: nkf.h:174
ruby_tag_type
Definition: vm_core.h:151
VALUE rb_str_equal(VALUE str1, VALUE str2)
Definition: string.c:3214
VALUE ruby_vm_const_missing_count
Definition: vm.c:319
#define RUBY_DTRACE_METHOD_RETURN_HOOK(th, klass, id)
Definition: probes_helper.h:34
#define VM_CALL_FCALL
Definition: vm_core.h:964
Definition: constant.h:31
#define RUBY_EVENT_RETURN
Definition: ruby.h:2084
Definition: st.h:79
const rb_callable_method_entry_t * me
Definition: vm_core.h:248
#define GET_LEP()
Definition: vm_insnhelper.h:95
#define RB_DEBUG_COUNTER_INC_UNLESS(type, cond)
VALUE value
Definition: vm_core.h:200
VALUE rb_threadptr_backtrace_object(rb_thread_t *th)
Definition: vm_backtrace.c:515
#define VM_DEFINECLASS_TYPE(x)
Definition: vm_core.h:874
enum ruby_tag_type state
Definition: vm_core.h:701
const VALUE owner
Definition: method.h:64
rb_control_frame_t * cfp
Definition: vm_core.h:744
#define d1
#define RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp)
Definition: vm_core.h:1238
#define GetProcPtr(obj, ptr)
Definition: vm_core.h:907
long OFFSET
Definition: vm_exec.h:15
const VALUE cref_or_me
class reference or rb_method_entry_t
Definition: internal.h:885
#define QUOTE_ID(id)
Definition: internal.h:1636
#define vm_check_frame(a, b, c, d)
#define CLASS_OF(v)
Definition: ruby.h:453
void rb_raise(VALUE exc, const char *fmt,...)
Definition: error.c:2284
#define VM_DEFINECLASS_SCOPED_P(x)
Definition: vm_core.h:877
#define T_MODULE
Definition: ruby.h:494
enum method_missing_reason method_missing_reason
Definition: vm_core.h:856
VALUE symbol
Definition: vm_core.h:656
#define FIXNUM_MAX
Definition: ruby.h:228
#define Qtrue
Definition: ruby.h:437
struct rb_method_definition_struct rb_method_definition_t
Definition: method.h:173
vm_check_match_type
Definition: vm_core.h:938
VALUE rb_cHash
Definition: hash.c:82
struct rb_method_definition_struct *const def
Definition: method.h:62
VALUE rb_imemo_new(enum imemo_type type, VALUE v1, VALUE v2, VALUE v3, VALUE v0)
Definition: gc.c:2020
#define CHECK_CFP_CONSISTENCY(func)
#define RUBY_DTRACE_CMETHOD_RETURN_ENABLED()
Definition: probes.h:46
#define rb_id2str(id)
Definition: vm_backtrace.c:29
VALUE rb_obj_not_equal(VALUE obj1, VALUE obj2)
call-seq: obj != other -> true or false
Definition: object.c:236
unsigned int end
Definition: iseq.h:173
struct rb_iseq_constant_body::@135::@136 flags
Ruby method.
Definition: method.h:102
#define RUBY_EVENT_CALL
Definition: ruby.h:2083
#define TH_JUMP_TAG(th, st)
Definition: eval_intern.h:204
#define HASH_REDEFINED_OP_FLAG
Definition: vm_core.h:610
#define rb_check_arity
Definition: intern.h:298
VALUE rb_vm_invoke_proc(rb_thread_t *th, rb_proc_t *proc, int argc, const VALUE *argv, VALUE passed_block_handler)
Definition: vm.c:1172
VALUE rb_reg_match(VALUE, VALUE)
Definition: re.c:3143
VALUE rb_check_convert_type_with_id(VALUE, int, const char *, ID)
Definition: object.c:3022
VALUE rb_ary_push(VALUE ary, VALUE item)
Definition: array.c:924
#define rb_long2int(n)
Definition: ruby.h:319
#define VM_BLOCK_HANDLER_NONE
Definition: vm_core.h:1135
#define GET_BLOCK_HANDLER()
VALUE rb_str_concat(VALUE, VALUE)
Definition: string.c:2999
#define SYM2ID(x)
Definition: ruby.h:384
const VALUE owner
Definition: method.h:56
VALUE rb_str_plus(VALUE, VALUE)
Definition: string.c:1854
VALUE rb_iseq_defined_string(enum defined_type type)
Definition: iseq.c:2211
struct rb_iseq_constant_body * body
Definition: vm_core.h:423
#define UNDEF
VALUE rb_funcall(VALUE, ID, int,...)
Calls a method.
Definition: vm_eval.c:774
arg_setup_type
Definition: vm_args.c:30
unsigned int index
Definition: vm_core.h:253
#define ROBJECT_IV_INDEX_TBL(o)
Definition: ruby.h:908
VALUE rb_ivar_get(VALUE, ID)
Definition: variable.c:1210
unsigned int flag
Definition: vm_core.h:219
#define OPTIMIZED_CMP(a, b, data)
Definition: internal.h:1004
#define RB_GC_GUARD(v)
Definition: ruby.h:552
#define T_HASH
Definition: ruby.h:499
VALUE rb_obj_alloc(VALUE)
Allocates an instance of klass.
Definition: object.c:2121
int rb_const_defined(VALUE, ID)
Definition: variable.c:2537
#define VM_PROFILE_UP(x)
#define VM_CALL_ARGS_SPLAT
Definition: vm_core.h:962
#define SET_SP(x)
Definition: vm_insnhelper.h:99
const VALUE value
Definition: constant.h:34
void rb_vm_localjump_error(const char *mesg, VALUE value, int reason)
Definition: vm.c:1414
#define FIXNUM_2_P(a, b)
#define SDR()
Definition: vm_core.h:1473
special variable
Definition: internal.h:841
VALUE rb_hash_lookup(VALUE hash, VALUE key)
Definition: hash.c:853
#define RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(th, cfp)
Definition: vm_core.h:1244
#define STACK_ADDR_FROM_TOP(n)
Definition: vm_insnhelper.h:43
#define T_ARRAY
Definition: ruby.h:498
NORETURN(static void threadptr_stack_overflow(rb_thread_t *, int))
#define GET_GLOBAL_METHOD_STATE()
st_data_t st_index_t
Definition: st.h:50
const rb_callable_method_entry_t * rb_callable_method_entry_without_refinements(VALUE klass, ID id, VALUE *defined_class)
Definition: vm_method.c:886
#define st_lookup
Definition: regint.h:185
vm_call_handler call
Definition: vm_core.h:250
#define VM_CALL_KWARG
Definition: vm_core.h:968
VALUE rb_reg_match_post(VALUE)
Definition: re.c:1750
#define ROBJECT_NUMIV(o)
Definition: ruby.h:900
#define DEC_SP(x)
RUBY_EXTERN VALUE rb_cProc
Definition: ruby.h:1920
VALUE rb_ensure(VALUE(*b_proc)(ANYARGS), VALUE data1, VALUE(*e_proc)(ANYARGS), VALUE data2)
An equivalent to ensure clause.
Definition: eval.c:1035
rb_vm_defineclass_type_t
Definition: vm_core.h:866
const VALUE backref
Definition: internal.h:887
#define FIXNUM_P(f)
Definition: ruby.h:365
#define FL_TEST(x, f)
Definition: ruby.h:1282
unsigned int cont
Definition: iseq.h:174
VALUE rb_ivar_defined(VALUE, ID)
Definition: variable.c:1374
VALUE value
Definition: vm_core.h:193
PUREFUNC(static rb_callable_method_entry_t *check_method_entry(VALUE obj, int can_be_svar))
#define VM_CALL_VCALL
Definition: vm_core.h:965
VALUE rb_reg_last_match(VALUE)
Definition: re.c:1705
#define FALSE_REDEFINED_OP_FLAG
Definition: vm_core.h:617
#define IS_ARGS_KEYWORD(ci)
Definition: vm_args.c:885
refinement
Definition: method.h:113
#define RUBY_DTRACE_METHOD_ENTRY_HOOK(th, klass, id)
Definition: probes_helper.h:31
#define VM_ENV_DATA_INDEX_ME_CREF
Definition: vm_core.h:1049
#define RUBY_DTRACE_CMETHOD_ENTRY_HOOK(th, klass, id)
Definition: probes_helper.h:37
#define THROW_DATA_P(err)
Definition: internal.h:903
const VALUE * ep
Definition: vm_core.h:631
#define GET_THREAD()
Definition: vm_core.h:1583
VALUE rb_eArgError
Definition: error.c:802
#define sym(x)
Definition: date_core.c:3721
RUBY_SYMBOL_EXPORT_BEGIN typedef unsigned long st_data_t
Definition: st.h:22
VALUE rb_vm_make_proc(rb_thread_t *th, const struct rb_captured_block *captured, VALUE klass)
Definition: vm.c:868
enum iseq_catch_table_entry::catch_type type
VALUE rb_eNoMethodError
Definition: error.c:809
const VALUE * iseq_encoded
Definition: vm_core.h:314
int orig_argc
Definition: vm_core.h:220
const rb_callable_method_entry_t * rb_vm_frame_method_entry(const rb_control_frame_t *cfp)
SVAR (Special VARiable)
Definition: internal.h:883
#define FL_SINGLETON
Definition: ruby.h:1208
VALUE rb_singleton_class(VALUE obj)
Returns the singleton class of obj.
Definition: class.c:1689
#define GET_CFP()
Definition: vm_insnhelper.h:92
VALUE rb_obj_class(VALUE)
call-seq: obj.class -> class
Definition: object.c:277
VALUE rb_float_equal(VALUE x, VALUE y)
Definition: numeric.c:1378
#define RB_TYPE_P(obj, type)
Definition: ruby.h:527
VALUE rb_obj_is_kind_of(VALUE, VALUE)
call-seq: obj.is_a?(class) -> true or false obj.kind_of?(class) -> true or false
Definition: object.c:842
#define UNLIKELY(x)
Definition: internal.h:43
Definition: id.h:110
#define BASIC_OP_UNREDEFINED_P(op, klass)
Definition: vm_core.h:619
const rb_iseq_t * iseq
Definition: vm_core.h:665
VALUE rb_cvar_defined(VALUE, ID)
Definition: variable.c:2906
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class::inherited.
Definition: class.c:620
struct rb_cref_struct *const next
Definition: method.h:45
VALUE rb_str_succ(VALUE)
Definition: string.c:4030
#define RUBY_DTRACE_METHOD_ENTRY_ENABLED()
Definition: probes.h:156
#define ROBJECT_IVPTR(o)
Definition: ruby.h:904
#define IS_ARGS_SPLAT(ci)
Definition: vm_args.c:884
VALUE rb_dbl2big(double d)
Definition: bignum.c:5214
VALUE rb_hash_aset(VALUE hash, VALUE key, VALUE val)
Definition: hash.c:1616
enum method_missing_reason method_missing_reason
Definition: vm_core.h:254
VALUE(* vm_call_handler)(struct rb_thread_struct *th, struct rb_control_frame_struct *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
Definition: vm_core.h:240
unsigned int local_table_size
Definition: vm_core.h:410
VALUE rb_gvar_defined(struct rb_global_entry *)
Definition: variable.c:860
const rb_callable_method_entry_t * rb_callable_method_entry_with_refinements(VALUE klass, ID id, VALUE *defined_class)
Definition: vm_method.c:872
#define val
RUBY_EXTERN VALUE rb_cObject
Definition: ruby.h:1893
#define POPN(n)
Definition: vm_insnhelper.h:41
#define CALLER_SETUP_ARG(cfp, calling, ci)
Definition: vm_args.c:887
RUBY_EXTERN VALUE rb_cBasicObject
Definition: ruby.h:1892
VALUE rb_ary_new(void)
Definition: array.c:499
#define FLONUM_2_P(a, b)
VALUE rb_sym_proc_call(ID mid, int argc, const VALUE *argv, VALUE passed_proc)
Definition: string.c:10206
#define RCLASS_ORIGIN(c)
Definition: internal.h:794
#define CHECK_CMP_NAN(a, b)
#define NIL_P(v)
Definition: ruby.h:451
ALWAYS_INLINE(static VALUE vm_getivar(VALUE, ID, IC, struct rb_call_cache *, int))
#define TRUE_REDEFINED_OP_FLAG
Definition: vm_core.h:616
struct rb_method_definition_struct *const def
Definition: method.h:54
#define OBJ_BUILTIN_TYPE(obj)
Definition: internal.h:1945
void rb_ary_store(VALUE ary, long idx, VALUE val)
Definition: array.c:815
THROW_DATA.
Definition: internal.h:895
VALUE rb_define_module_id(ID id)
Definition: class.c:757
Definition: vm_core.h:197
#define GET_SELF()
#define T_FLOAT
Definition: ruby.h:495
int rb_public_const_defined_from(VALUE klass, ID id)
Definition: variable.c:2549
void rb_notimplement(void)
Definition: error.c:2330
int argc
Definition: ruby.c:187
vm_special_object_type
Definition: vm_core.h:974
VALUE rb_str_eql(VALUE str1, VALUE str2)
Definition: string.c:3234
#define Qfalse
Definition: ruby.h:436
VALUE rb_hash_has_key(VALUE hash, VALUE key)
Definition: hash.c:2219
ruby_basic_operators
Definition: vm_core.h:463
const VALUE special_exceptions[ruby_special_error_count]
Definition: vm_core.h:536
#define ALLOCA_N(type, n)
Definition: ruby.h:1593
const VALUE defined_class
Definition: method.h:61
#define RUBY_DTRACE_CMETHOD_RETURN_HOOK(th, klass, id)
Definition: probes_helper.h:40
Definition: method.h:51
union rb_captured_block::@141 code
VALUE rb_float_eql(VALUE x, VALUE y)
Definition: numeric.c:1648
#define VM_GUARDED_PREV_EP(ep)
Definition: vm_core.h:1134
RUBY_EXTERN VALUE rb_cModule
Definition: ruby.h:1916
#define T_BIGNUM
Definition: ruby.h:501
Definition: method.h:59
#define MEMCPY(p1, p2, type, n)
Definition: ruby.h:1661
RUBY_EXTERN int isinf(double)
Definition: isinf.c:56
void rb_obj_copy_ivar(VALUE dest, VALUE obj)
Definition: object.c:307
#define rb_ary_new4
Definition: intern.h:92
#define RUBY_EVENT_C_CALL
Definition: ruby.h:2085
int err
Definition: win32.c:135
VALUE rb_struct_aset(VALUE, VALUE, VALUE)
Definition: struct.c:923
#define VM_DEFINECLASS_HAS_SUPERCLASS_P(x)
Definition: vm_core.h:878
struct iseq_inline_storage_entry::@133 once
rb_serial_t method_state
Definition: vm_core.h:244
#define PRIdPTRDIFF
Definition: ruby.h:159
#define TOPN(n)
Definition: vm_insnhelper.h:40
void rb_vm_pop_frame(rb_thread_t *th)
#define VM_ENV_DATA_INDEX_SPECVAL
Definition: vm_core.h:1050
unsigned long rb_num_t
Definition: vm_core.h:149
rb_method_type_t
Definition: method.h:101
#define VM_SP_CNT(th, sp)
Definition: vm_exec.h:160
void rb_threadptr_stack_overflow(rb_thread_t *th, int crit)
Definition: vm_insnhelper.c:57
#define RCLASS_REFINED_CLASS(c)
Definition: internal.h:795
#define VM_CALL_SUPER
Definition: vm_core.h:971
Definition: vm_core.h:188
VALUE rb_const_get(VALUE, ID)
Definition: variable.c:2292
VALUE rb_ary_to_ary(VALUE obj)
Definition: array.c:1553
attr_writer or attr_accessor
Definition: method.h:104
class reference
Definition: internal.h:840
#define STRING_REDEFINED_OP_FLAG
Definition: vm_core.h:608
union rb_call_cache::@134 aux
#define RSTRING_LEN(str)
Definition: ruby.h:971
#define RARRAY_CONST_PTR(a)
Definition: ruby.h:1021
#define CI_SET_FASTPATH(cc, func, enabled)
int rb_during_gc(void)
Definition: gc.c:6735
VALUE(* invoker)(VALUE(*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
Definition: method.h:130
union rb_block::@142 as
#define TRUE
Definition: nkf.h:175
struct rb_iseq_constant_body::@135 param
parameter information
defined_type
Definition: iseq.h:240
VALUE rb_sprintf(const char *format,...)
Definition: sprintf.c:1452
const rb_callable_method_entry_t * rb_callable_method_entry(VALUE klass, ID id)
Definition: vm_method.c:833
#define RICLASS_IS_ORIGIN
Definition: internal.h:798
#define RUBY_DTRACE_CMETHOD_ENTRY_ENABLED()
Definition: probes.h:36
#define VM_ASSERT(expr)
Definition: vm_core.h:53
const rb_method_entry_t * rb_method_entry(VALUE klass, ID id)
Definition: vm_method.c:793
void rb_raise_method_missing(rb_thread_t *th, int argc, const VALUE *argv, VALUE obj, int call_status)
Definition: vm_eval.c:728
#define VM_CHECKMATCH_TYPE_MASK
Definition: vm_core.h:944
VALUE(* func)(ANYARGS)
Definition: method.h:129
#define MEMMOVE(p1, p2, type, n)
Definition: ruby.h:1662
VALUE proc
Definition: vm_core.h:657
#define RHASH_SIZE(hsh)
Definition: fbuffer.h:8
#define VMDEBUG
VM Debug Level.
Definition: vm_core.h:37
#define VM_ENV_DATA_SIZE
Definition: vm_core.h:1047
VALUE rb_ivar_set(VALUE, ID, VALUE)
Definition: variable.c:1315
#define T_IMEMO
Definition: ruby.h:511
#define PRIsVALUE
Definition: ruby.h:135
unsigned long ID
Definition: ruby.h:86
#define VM_CHECK_MODE
Definition: vm_core.h:20
#define Qnil
Definition: ruby.h:438
void rb_const_set(VALUE, ID, VALUE)
Definition: variable.c:2573
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
Definition: eval.c:615
rb_control_frame_t *FUNC_FASTCALL() rb_vm_opt_struct_aref(rb_thread_t *th, rb_control_frame_t *reg_cfp)
VALUE rb_sym_to_proc(VALUE sym)
Definition: proc.c:1198
IFUNC (Internal FUNCtion)
Definition: internal.h:917
#define METHOD_ENTRY_VISI(me)
Definition: method.h:67
#define BUILTIN_TYPE(x)
Definition: ruby.h:518
#define debug(x)
Definition: _sdbm.c:51
int rb_autoloading_value(VALUE mod, ID id, VALUE *value)
Definition: variable.c:2015
unsigned long VALUE
Definition: ruby.h:85
method_missing_reason
Definition: vm_core.h:205
#define EXEC_EVENT_HOOK(th_, flag_, self_, id_, called_id_, klass_, data_)
Definition: vm_core.h:1686
#define RBASIC(obj)
Definition: ruby.h:1197
Definition: iseq.h:148
const VALUE defined_class
Definition: method.h:53
#define REGEXP_REDEFINED_OP_FLAG
Definition: vm_core.h:614
VALUE rb_eTypeError
Definition: error.c:801
#define FIX2INT(x)
Definition: ruby.h:686
VALUE ruby_vm_special_exception_copy(VALUE exc)
Definition: vm_insnhelper.c:26
ID called_id
Definition: method.h:63
#define VM_CALL_OPT_SEND
Definition: vm_core.h:972
#define VM_CHECKMATCH_ARRAY
Definition: vm_core.h:945
#define VM_ENV_DATA_INDEX_FLAGS
Definition: vm_core.h:1051
#define rb_ary_new3
Definition: intern.h:91
VALUE rb_check_funcall(VALUE, ID, int, const VALUE *)
Definition: vm_eval.c:389
const struct vm_ifunc * ifunc
Definition: vm_core.h:634
CREF (Class REFerence)
Definition: method.h:41
void rb_const_warn_if_deprecated(const rb_const_entry_t *ce, VALUE klass, ID id)
Definition: variable.c:2216
Definition: iseq.h:153
VALUE rb_equal_opt(VALUE obj1, VALUE obj2)
rb_serial_t ic_serial
Definition: vm_core.h:189
enum rb_iseq_constant_body::iseq_type type
#define FIXABLE(f)
Definition: ruby.h:368
union iseq_inline_cache_entry::@132 ic_value
const void * block_code
Definition: vm_core.h:668
#define RUBY_VM_END_CONTROL_FRAME(th)
Definition: vm_core.h:1240
#define RB_FLOAT_TYPE_P(obj)
Definition: ruby.h:523
Kernel::send, Proc::call, etc.
Definition: method.h:111
const rb_callable_method_entry_t * passed_bmethod_me
Definition: vm_core.h:798
#define GET_PREV_EP(ep)
struct rb_captured_block captured
Definition: vm_core.h:655
#define LONG2NUM(x)
Definition: ruby.h:1573
#define rb_funcallv
Definition: console.c:21
register unsigned int len
Definition: zonetab.h:51
VALUE rb_str_freeze(VALUE)
Definition: string.c:2549
int8_t is_lambda
Definition: vm_core.h:914
VALUE rb_mRubyVMFrozenCore
Definition: vm.c:316
#define rb_exc_new3
Definition: intern.h:244
void rb_vm_rewrite_cref(rb_cref_t *cref, VALUE old_klass, VALUE new_klass, rb_cref_t **new_cref_ptr)
#define SYMBOL_REDEFINED_OP_FLAG
Definition: vm_core.h:612
#define RB_OBJ_WRITE(a, slot, b)
Definition: eval_intern.h:175
VALUE rb_iseq_disasm(const rb_iseq_t *iseq)
Definition: iseq.c:1515
const rb_cref_t * ic_cref
Definition: vm_core.h:190
int rb_const_defined_at(VALUE, ID)
Definition: variable.c:2543
rb_const_entry_t * rb_const_lookup(VALUE klass, ID id)
Definition: variable.c:3128
#define RFLOAT_VALUE(v)
Definition: ruby.h:933
#define TAG_RETRY
Definition: vm_core.h:168
#define INT2FIX(i)
Definition: ruby.h:232
#define UNLIMITED_ARGUMENTS
Definition: intern.h:44
VALUE rb_hash_compare_by_id_p(VALUE hash)
Definition: hash.c:2913
unsigned int start
Definition: iseq.h:172
#define TAG_THROW
Definition: vm_core.h:171
#define RCLASS_SUPER(c)
Definition: classext.h:16
const struct iseq_catch_table * catch_table
Definition: vm_core.h:394
const rb_callable_method_entry_t * rb_method_entry_complement_defined_class(const rb_method_entry_t *src_me, ID called_id, VALUE defined_class)
Definition: vm_method.c:405
#define RARRAY_AREF(a, i)
Definition: ruby.h:1033
rb_serial_t class_serial
Definition: vm_core.h:245
size_t index
Definition: vm_core.h:192
VALUE rb_ary_plus(VALUE x, VALUE y)
Definition: array.c:3635
#define RBASIC_CLASS(obj)
Definition: ruby.h:878
#define ANYARGS
Definition: defines.h:173
struct rb_call_info ci
Definition: vm_core.h:229
#define INC_SP(x)
#define rb_thread_raised_reset(th, f)
Definition: eval_intern.h:286
VALUE rb_eRuntimeError
Definition: error.c:800
#define FLOAT_REDEFINED_OP_FLAG
Definition: vm_core.h:607
VALUE rb_check_array_type(VALUE ary)
Definition: array.c:651
VALUE rb_struct_aref(VALUE, VALUE)
Definition: struct.c:896
VALUE rb_hash_aref(VALUE hash, VALUE key)
Definition: hash.c:831
void rb_error_arity(int argc, int min, int max)
#define FUNC_FASTCALL(x)
Definition: vm_core.h:998
struct rb_iseq_struct * local_iseq
Definition: vm_core.h:398
VALUE block_handler
Definition: vm_core.h:234
VALUE rb_eFatal
Definition: error.c:798
struct rb_vm_tag * tag
Definition: vm_core.h:746
#define RHASH_TBL_RAW(h)
Definition: internal.h:1268
void rb_iseq_add_mark_object(const rb_iseq_t *iseq, VALUE obj)
Definition: iseq.c:280
unsigned long lindex_t
Definition: vm_exec.h:16
#define LONG2FIX(i)
Definition: ruby.h:234
#define RTEST(v)
Definition: ruby.h:450
#define T_STRING
Definition: ruby.h:496
void rb_warning(const char *fmt,...)
Definition: error.c:267
const VALUE * pc
Definition: vm_core.h:663
VALUE CDHASH
Definition: vm_core.h:995
int rb_method_basic_definition_p(VALUE, ID)
Definition: vm_method.c:1879
VALUE rb_str_cat_cstr(VALUE, const char *)
Definition: string.c:2756
Definition: id.h:95
#define RB_BUILTIN_TYPE(x)
Definition: ruby.h:517
struct rb_thread_struct * running_thread
Definition: vm_core.h:199
#define INTEGER_REDEFINED_OP_FLAG
Definition: vm_core.h:606
#define GET_EP()
Definition: vm_insnhelper.h:93
#define VM_UNREACHABLE(func)
Definition: vm_core.h:54
const char * rb_obj_info(VALUE obj)
Definition: gc.c:9442
VALUE rb_ary_dup(VALUE ary)
Definition: array.c:1930
#define CHECK(sub)
Definition: compile.c:442
VALUE rb_public_const_get_at(VALUE klass, ID id)
Definition: variable.c:2316
VALUE(* func)(ANYARGS)
Definition: internal.h:920
VALUE rb_cArray
Definition: array.c:26
VALUE rb_ary_concat(VALUE x, VALUE y)
Definition: array.c:3703
#define VM_CALL_TAILCALL
Definition: vm_core.h:970
#define rb_thread_raised_p(th, f)
Definition: eval_intern.h:287
const rb_iseq_t * iseq
Definition: vm_core.h:633
void rb_gc_writebarrier_remember(VALUE obj)
Definition: gc.c:6041
const void * data
Definition: internal.h:921
#define T_CLASS
Definition: ruby.h:492
#define ARRAY_REDEFINED_OP_FLAG
Definition: vm_core.h:609
const struct rb_iseq_struct * parent_iseq
Definition: vm_core.h:397
#define RB_DEBUG_COUNTER_INC(type)
#define EQ_UNREDEFINED_P(t)
VALUE rb_const_get_at(VALUE, ID)
Definition: variable.c:2298
VALUE rb_reg_match_pre(VALUE)
Definition: re.c:1723
#define TAG_RETURN
Definition: vm_core.h:165
rb_execution_context_t ec
Definition: vm_core.h:790
#define VM_CALL_KW_SPLAT
Definition: vm_core.h:969
#define ID2SYM(x)
Definition: ruby.h:383
#define GET_SP()
Definition: vm_insnhelper.h:98
imemo_type
Definition: internal.h:838
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
Definition: eval.c:631
VALUE rb_define_class_id(ID id, VALUE super)
Defines a new class.
Definition: class.c:599
uint32_t rb_event_flag_t
Definition: ruby.h:2116
Definition: id.h:99
NOINLINE(static void vm_env_write_slowpath(const VALUE *ep, int index, VALUE v))
#define RUBY_DTRACE_METHOD_RETURN_ENABLED()
Definition: probes.h:166
C method.
Definition: method.h:103
#define NIL_REDEFINED_OP_FLAG
Definition: vm_core.h:615
#define rb_check_frozen(obj)
Definition: intern.h:271
#define GET_ISEQ()
void rb_vm_env_write(const VALUE *ep, int index, VALUE v)
VALUE rb_str_intern(VALUE)
Definition: symbol.c:661
rb_control_frame_t *FUNC_FASTCALL() rb_vm_opt_struct_aset(rb_thread_t *th, rb_control_frame_t *reg_cfp)
#define TAG_BREAK
Definition: vm_core.h:166
#define SPECIAL_CONST_P(x)
Definition: ruby.h:1242
VALUE rb_public_const_get_from(VALUE klass, ID id)
Definition: variable.c:2304
#define CHECK_VM_STACK_OVERFLOW(cfp, margin)
Definition: vm_core.h:1565
Definition: id.h:94
int rb_method_boundp(VALUE, ID, int)
Definition: vm_method.c:1068
unsigned int size
Definition: vm_core.h:352
#define SYMBOL_P(x)
Definition: ruby.h:382
#define stat(path, st)
Definition: win32.h:183
#define mod(x, y)
Definition: date_strftime.c:28
#define GET_GLOBAL_CONSTANT_STATE()
#define TAG_RAISE
Definition: vm_core.h:170
#define NULL
Definition: _sdbm.c:102
struct rb_global_entry * rb_global_entry(ID)
Definition: variable.c:482
#define FIX2LONG(x)
Definition: ruby.h:363
#define Qundef
Definition: ruby.h:439
#define T_ICLASS
Definition: ruby.h:493
#define RCLASS_SERIAL(c)
Definition: internal.h:796
VALUE rb_class_real(VALUE cl)
Looks up the nearest ancestor of cl, skipping singleton classes or module inclusions.
Definition: object.c:251
attr_reader or attr_accessor
Definition: method.h:105
#define BUILTIN_CLASS_P(x, k)
void rb_gc_verify_internal_consistency(void)
Definition: gc.c:5379
#define ruby_verbose
Definition: ruby.h:1813
VALUE rb_str_append(VALUE, VALUE)
Definition: string.c:2900
#define CHECK_VM_STACK_OVERFLOW0(cfp, sp, margin)
Definition: vm_core.h:1563
#define bp()
Definition: vm_debug.h:25
VALUE rb_autoload_load(VALUE, ID)
Definition: variable.c:2151
void rb_set_class_path_string(VALUE, VALUE, VALUE)
Definition: variable.c:344
VALUE rb_reg_nth_match(int, VALUE)
Definition: re.c:1679
VALUE rb_attr_get(VALUE, ID)
Definition: variable.c:1224
char ** argv
Definition: ruby.c:188
#define DBL2NUM(dbl)
Definition: ruby.h:934
VALUE rb_eql_opt(VALUE obj1, VALUE obj2)
#define SIGNED_VALUE
Definition: ruby.h:87
#define LIKELY(x)
Definition: internal.h:42
rb_control_frame_t * rb_vm_push_frame(rb_execution_context_t *ec, const rb_iseq_t *iseq, VALUE type, VALUE self, VALUE specval, VALUE cref_or_me, const VALUE *pc, VALUE *sp, int local_size, int stack_max)