1 /**********************************************************************
3 compile.c - ruby node tree -> VM instruction sequence
6 created at: 04/01/01 03:42:15 JST
8 Copyright (C) 2004-2007 Koichi Sasada
10 **********************************************************************/
12 #include "ruby/3/config.h"
23 #include "internal/array.h"
24 #include "internal/compile.h"
25 #include "internal/complex.h"
26 #include "internal/encoding.h"
27 #include "internal/error.h"
28 #include "internal/hash.h"
29 #include "internal/numeric.h"
30 #include "internal/object.h"
31 #include "internal/re.h"
32 #include "internal/symbol.h"
33 #include "internal/thread.h"
34 #include "internal/variable.h"
37 #include "ruby/util.h"
39 #include "vm_callinfo.h"
44 #include "insns_info.inc"
46 #undef RUBY_UNTYPED_DATA_WARNING
47 #define RUBY_UNTYPED_DATA_WARNING 0
49 #define FIXNUM_INC(n, i) ((n)+(INT2FIX(i)&~FIXNUM_FLAG))
50 #define FIXNUM_OR(n, i) ((n)|INT2FIX(i))
52 typedef struct iseq_link_element
{
60 struct iseq_link_element
*next
;
61 struct iseq_link_element
*prev
;
64 typedef struct iseq_link_anchor
{
76 typedef struct iseq_label_data
{
84 unsigned int rescued
: 2;
85 unsigned int unremovable
: 1;
88 typedef struct iseq_insn_data
{
90 enum ruby_vminsn_type insn_id
;
96 rb_event_flag_t events
;
100 typedef struct iseq_adjust_data
{
106 typedef struct iseq_trace_data
{
108 rb_event_flag_t event
;
112 struct ensure_range
{
115 struct ensure_range
*next
;
118 struct iseq_compile_data_ensure_node_stack
{
119 const NODE
*ensure_node
;
120 struct iseq_compile_data_ensure_node_stack
*prev
;
121 struct ensure_range
*erange
;
124 const ID rb_iseq_shared_exc_local_tbl
[] = {idERROR_INFO
};
127 * debug function(macro) interface depend on CPDEBUG
128 * if it is less than 0, runtime option is in effect.
133 * 2: show node important parameters
135 * 5: show other parameters
136 * 10: show every AST array
144 #define compile_debug CPDEBUG
146 #define compile_debug ISEQ_COMPILE_DATA(iseq)->option->debug_level
151 #define compile_debug_print_indent(level) \
152 ruby_debug_print_indent((level), compile_debug, gl_node_level * 2)
154 #define debugp(header, value) (void) \
155 (compile_debug_print_indent(1) && \
156 ruby_debug_print_value(1, compile_debug, (header), (value)))
158 #define debugi(header, id) (void) \
159 (compile_debug_print_indent(1) && \
160 ruby_debug_print_id(1, compile_debug, (header), (id)))
162 #define debugp_param(header, value) (void) \
163 (compile_debug_print_indent(1) && \
164 ruby_debug_print_value(1, compile_debug, (header), (value)))
166 #define debugp_verbose(header, value) (void) \
167 (compile_debug_print_indent(2) && \
168 ruby_debug_print_value(2, compile_debug, (header), (value)))
170 #define debugp_verbose_node(header, value) (void) \
171 (compile_debug_print_indent(10) && \
172 ruby_debug_print_value(10, compile_debug, (header), (value)))
174 #define debug_node_start(node) ((void) \
175 (compile_debug_print_indent(1) && \
176 (ruby_debug_print_node(1, CPDEBUG, "", (const NODE *)(node)), gl_node_level)), \
179 #define debug_node_end() gl_node_level --
183 #define debugi(header, id) ((void)0)
184 #define debugp(header, value) ((void)0)
185 #define debugp_verbose(header, value) ((void)0)
186 #define debugp_verbose_node(header, value) ((void)0)
187 #define debugp_param(header, value) ((void)0)
188 #define debug_node_start(node) ((void)0)
189 #define debug_node_end() ((void)0)
192 #if CPDEBUG > 1 || CPDEBUG < 0
194 #define printf ruby_debug_printf
195 #define debugs if (compile_debug_print_indent(1)) ruby_debug_printf
196 #define debug_compile(msg, v) ((void)(compile_debug_print_indent(1) && fputs((msg), stderr)), (v))
198 #define debugs if(0)printf
199 #define debug_compile(msg, v) (v)
202 #define LVAR_ERRINFO (1)
204 /* create new label */
205 #define NEW_LABEL(l) new_label_body(iseq, (l))
206 #define LABEL_FORMAT "<L%03d>"
208 #define NEW_ISEQ(node, name, type, line_no) \
209 new_child_iseq(iseq, (node), rb_fstring(name), 0, (type), (line_no))
211 #define NEW_CHILD_ISEQ(node, name, type, line_no) \
212 new_child_iseq(iseq, (node), rb_fstring(name), iseq, (type), (line_no))
214 /* add instructions */
215 #define ADD_SEQ(seq1, seq2) \
216 APPEND_LIST((seq1), (seq2))
218 /* add an instruction */
219 #define ADD_INSN(seq, line, insn) \
220 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_body(iseq, (line), BIN(insn), 0))
222 /* insert an instruction before next */
223 #define INSERT_BEFORE_INSN(next, line, insn) \
224 ELEM_INSERT_PREV(&(next)->link, (LINK_ELEMENT *) new_insn_body(iseq, (line), BIN(insn), 0))
226 /* insert an instruction after prev */
227 #define INSERT_AFTER_INSN(prev, line, insn) \
228 ELEM_INSERT_NEXT(&(prev)->link, (LINK_ELEMENT *) new_insn_body(iseq, (line), BIN(insn), 0))
230 /* add an instruction with some operands (1, 2, 3, 5) */
231 #define ADD_INSN1(seq, line, insn, op1) \
232 ADD_ELEM((seq), (LINK_ELEMENT *) \
233 new_insn_body(iseq, (line), BIN(insn), 1, (VALUE)(op1)))
235 /* insert an instruction with some operands (1, 2, 3, 5) before next */
236 #define INSERT_BEFORE_INSN1(next, line, insn, op1) \
237 ELEM_INSERT_PREV(&(next)->link, (LINK_ELEMENT *) \
238 new_insn_body(iseq, (line), BIN(insn), 1, (VALUE)(op1)))
240 /* insert an instruction with some operands (1, 2, 3, 5) after prev */
241 #define INSERT_AFTER_INSN1(prev, line, insn, op1) \
242 ELEM_INSERT_NEXT(&(prev)->link, (LINK_ELEMENT *) \
243 new_insn_body(iseq, (line), BIN(insn), 1, (VALUE)(op1)))
245 #define LABEL_REF(label) ((label)->refcnt++)
247 /* add an instruction with label operand (alias of ADD_INSN1) */
248 #define ADD_INSNL(seq, line, insn, label) (ADD_INSN1(seq, line, insn, label), LABEL_REF(label))
250 #define ADD_INSN2(seq, line, insn, op1, op2) \
251 ADD_ELEM((seq), (LINK_ELEMENT *) \
252 new_insn_body(iseq, (line), BIN(insn), 2, (VALUE)(op1), (VALUE)(op2)))
254 #define ADD_INSN3(seq, line, insn, op1, op2, op3) \
255 ADD_ELEM((seq), (LINK_ELEMENT *) \
256 new_insn_body(iseq, (line), BIN(insn), 3, (VALUE)(op1), (VALUE)(op2), (VALUE)(op3)))
258 /* Specific Insn factory */
259 #define ADD_SEND(seq, line, id, argc) \
260 ADD_SEND_R((seq), (line), (id), (argc), NULL, (VALUE)INT2FIX(0), NULL)
262 #define ADD_SEND_WITH_FLAG(seq, line, id, argc, flag) \
263 ADD_SEND_R((seq), (line), (id), (argc), NULL, (VALUE)(flag), NULL)
265 #define ADD_SEND_WITH_BLOCK(seq, line, id, argc, block) \
266 ADD_SEND_R((seq), (line), (id), (argc), (block), (VALUE)INT2FIX(0), NULL)
268 #define ADD_CALL_RECEIVER(seq, line) \
269 ADD_INSN((seq), (line), putself)
271 #define ADD_CALL(seq, line, id, argc) \
272 ADD_SEND_R((seq), (line), (id), (argc), NULL, (VALUE)INT2FIX(VM_CALL_FCALL), NULL)
274 #define ADD_CALL_WITH_BLOCK(seq, line, id, argc, block) \
275 ADD_SEND_R((seq), (line), (id), (argc), (block), (VALUE)INT2FIX(VM_CALL_FCALL), NULL)
277 #define ADD_SEND_R(seq, line, id, argc, block, flag, keywords) \
278 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_send(iseq, (line), (id), (VALUE)(argc), (block), (VALUE)(flag), (keywords)))
280 #define ADD_TRACE(seq, event) \
281 ADD_ELEM((seq), (LINK_ELEMENT *)new_trace_body(iseq, (event), 0))
282 #define ADD_TRACE_WITH_DATA(seq, event, data) \
283 ADD_ELEM((seq), (LINK_ELEMENT *)new_trace_body(iseq, (event), (data)))
286 #define DECL_BRANCH_BASE(branches, first_line, first_column, last_line, last_column, type) \
288 if (ISEQ_COVERAGE(iseq) && \
289 ISEQ_BRANCH_COVERAGE(iseq) && \
290 (first_line) > 0) { \
291 VALUE structure = RARRAY_AREF(ISEQ_BRANCH_COVERAGE(iseq), 0); \
292 branches = rb_ary_tmp_new(5); \
293 rb_ary_push(structure, branches); \
294 rb_ary_push(branches, ID2SYM(rb_intern(type))); \
295 rb_ary_push(branches, INT2FIX(first_line)); \
296 rb_ary_push(branches, INT2FIX(first_column)); \
297 rb_ary_push(branches, INT2FIX(last_line)); \
298 rb_ary_push(branches, INT2FIX(last_column)); \
301 #define ADD_TRACE_BRANCH_COVERAGE(seq, first_line, first_column, last_line, last_column, type, branches) \
303 if (ISEQ_COVERAGE(iseq) && \
304 ISEQ_BRANCH_COVERAGE(iseq) && \
305 (first_line) > 0) { \
306 VALUE counters = RARRAY_AREF(ISEQ_BRANCH_COVERAGE(iseq), 1); \
307 long counter_idx = RARRAY_LEN(counters); \
308 rb_ary_push(counters, INT2FIX(0)); \
309 rb_ary_push(branches, ID2SYM(rb_intern(type))); \
310 rb_ary_push(branches, INT2FIX(first_line)); \
311 rb_ary_push(branches, INT2FIX(first_column)); \
312 rb_ary_push(branches, INT2FIX(last_line)); \
313 rb_ary_push(branches, INT2FIX(last_column)); \
314 rb_ary_push(branches, INT2FIX(counter_idx)); \
315 ADD_TRACE_WITH_DATA(seq, RUBY_EVENT_COVERAGE_BRANCH, counter_idx); \
316 ADD_INSN(seq, last_line, nop); \
320 static void iseq_add_getlocal(rb_iseq_t
*iseq
, LINK_ANCHOR
*const seq
, int line
, int idx
, int level
);
321 static void iseq_add_setlocal(rb_iseq_t
*iseq
, LINK_ANCHOR
*const seq
, int line
, int idx
, int level
);
323 #define ADD_GETLOCAL(seq, line, idx, level) iseq_add_getlocal(iseq, (seq), (line), (idx), (level))
324 #define ADD_SETLOCAL(seq, line, idx, level) iseq_add_setlocal(iseq, (seq), (line), (idx), (level))
327 #define ADD_LABEL(seq, label) \
328 ADD_ELEM((seq), (LINK_ELEMENT *) (label))
330 #define APPEND_LABEL(seq, before, label) \
331 APPEND_ELEM((seq), (before), (LINK_ELEMENT *) (label))
333 #define ADD_ADJUST(seq, line, label) \
334 ADD_ELEM((seq), (LINK_ELEMENT *) new_adjust_body(iseq, (label), (line)))
336 #define ADD_ADJUST_RESTORE(seq, label) \
337 ADD_ELEM((seq), (LINK_ELEMENT *) new_adjust_body(iseq, (label), -1))
339 #define LABEL_UNREMOVABLE(label) \
340 ((label) ? (LABEL_REF(label), (label)->unremovable=1) : 0)
341 #define ADD_CATCH_ENTRY(type, ls, le, iseqv, lc) do { \
342 VALUE _e = rb_ary_new3(5, (type), \
343 (VALUE)(ls) | 1, (VALUE)(le) | 1, \
344 (VALUE)(iseqv), (VALUE)(lc) | 1); \
345 LABEL_UNREMOVABLE(ls); \
348 if (NIL_P(ISEQ_COMPILE_DATA(iseq)->catch_table_ary)) \
349 RB_OBJ_WRITE(iseq, &ISEQ_COMPILE_DATA(iseq)->catch_table_ary, rb_ary_tmp_new(3)); \
350 rb_ary_push(ISEQ_COMPILE_DATA(iseq)->catch_table_ary, freeze_hide_obj(_e)); \
354 #define COMPILE(anchor, desc, node) \
355 (debug_compile("== " desc "\n", \
356 iseq_compile_each(iseq, (anchor), (node), 0)))
358 /* compile node, this node's value will be popped */
359 #define COMPILE_POPPED(anchor, desc, node) \
360 (debug_compile("== " desc "\n", \
361 iseq_compile_each(iseq, (anchor), (node), 1)))
363 /* compile node, which is popped when 'popped' is true */
364 #define COMPILE_(anchor, desc, node, popped) \
365 (debug_compile("== " desc "\n", \
366 iseq_compile_each(iseq, (anchor), (node), (popped))))
368 #define COMPILE_RECV(anchor, desc, node) \
369 (private_recv_p(node) ? \
370 (ADD_INSN(anchor, nd_line(node), putself), VM_CALL_FCALL) : \
371 COMPILE(anchor, desc, node->nd_recv) ? 0 : -1)
373 #define OPERAND_AT(insn, idx) \
374 (((INSN*)(insn))->operands[(idx)])
376 #define INSN_OF(insn) \
377 (((INSN*)(insn))->insn_id)
379 #define IS_INSN(link) ((link)->type == ISEQ_ELEMENT_INSN)
380 #define IS_LABEL(link) ((link)->type == ISEQ_ELEMENT_LABEL)
381 #define IS_ADJUST(link) ((link)->type == ISEQ_ELEMENT_ADJUST)
382 #define IS_TRACE(link) ((link)->type == ISEQ_ELEMENT_TRACE)
383 #define IS_INSN_ID(iobj, insn) (INSN_OF(iobj) == BIN(insn))
384 #define IS_NEXT_INSN_ID(link, insn) \
385 ((link)->next && IS_INSN((link)->next) && IS_INSN_ID((link)->next, insn))
389 NORETURN(static void append_compile_error(const rb_iseq_t
*iseq
, int line
, const char *fmt
, ...));
393 append_compile_error(const rb_iseq_t
*iseq
, int line
, const char *fmt
, ...)
395 VALUE err_info
= ISEQ_COMPILE_DATA(iseq
)->err_info
;
396 VALUE file
= rb_iseq_path(iseq
);
397 VALUE err
= err_info
== Qtrue
? Qfalse
: err_info
;
401 err
= rb_syntax_error_append(err
, file
, line
, -1, NULL
, fmt
, args
);
403 if (NIL_P(err_info
)) {
404 RB_OBJ_WRITE(iseq
, &ISEQ_COMPILE_DATA(iseq
)->err_info
, err
);
407 else if (!err_info
) {
408 RB_OBJ_WRITE(iseq
, &ISEQ_COMPILE_DATA(iseq
)->err_info
, Qtrue
);
411 if (SPECIAL_CONST_P(err
)) err
= rb_eSyntaxError
;
418 compile_bug(rb_iseq_t
*iseq
, int line
, const char *fmt
, ...)
422 rb_report_bug_valist(rb_iseq_path(iseq
), line
, fmt
, args
);
428 #define COMPILE_ERROR append_compile_error
430 #define ERROR_ARGS_AT(n) iseq, nd_line(n),
431 #define ERROR_ARGS ERROR_ARGS_AT(node)
433 #define EXPECT_NODE(prefix, node, ndtype, errval) \
435 const NODE *error_node = (node); \
436 enum node_type error_type = nd_type(error_node); \
437 if (error_type != (ndtype)) { \
438 COMPILE_ERROR(ERROR_ARGS_AT(error_node) \
439 prefix ": " #ndtype " is expected, but %s", \
440 ruby_node_name(error_type)); \
445 #define EXPECT_NODE_NONULL(prefix, parent, ndtype, errval) \
447 COMPILE_ERROR(ERROR_ARGS_AT(parent) \
448 prefix ": must be " #ndtype ", but 0"); \
452 #define UNKNOWN_NODE(prefix, node, errval) \
454 const NODE *error_node = (node); \
455 COMPILE_ERROR(ERROR_ARGS_AT(error_node) prefix ": unknown node (%s)", \
456 ruby_node_name(nd_type(error_node))); \
463 #define CHECK(sub) if (!(sub)) {BEFORE_RETURN;return COMPILE_NG;}
464 #define NO_CHECK(sub) (void)(sub)
465 #define BEFORE_RETURN
467 /* leave name uninitialized so that compiler warn if INIT_ANCHOR is
469 #define DECL_ANCHOR(name) \
470 LINK_ANCHOR name[1] = {{{ISEQ_ELEMENT_ANCHOR,},}}
471 #define INIT_ANCHOR(name) \
472 (name->last = &name->anchor)
475 freeze_hide_obj(VALUE obj
)
478 RBASIC_CLEAR_CLASS(obj
);
482 #include "optinsn.inc"
483 #if OPT_INSTRUCTIONS_UNIFICATION
484 #include "optunifs.inc"
489 #define ISEQ_ARG iseq,
490 #define ISEQ_ARG_DECLARE rb_iseq_t *iseq,
493 #define ISEQ_ARG_DECLARE
497 #define gl_node_level ISEQ_COMPILE_DATA(iseq)->node_level
500 static void dump_disasm_list_with_cursor(const LINK_ELEMENT
*link
, const LINK_ELEMENT
*curr
, const LABEL
*dest
);
501 static void dump_disasm_list(const LINK_ELEMENT
*elem
);
503 static int insn_data_length(INSN
*iobj
);
504 static int calc_sp_depth(int depth
, INSN
*iobj
);
506 static INSN
*new_insn_body(rb_iseq_t
*iseq
, int line_no
, enum ruby_vminsn_type insn_id
, int argc
, ...);
507 static LABEL
*new_label_body(rb_iseq_t
*iseq
, long line
);
508 static ADJUST
*new_adjust_body(rb_iseq_t
*iseq
, LABEL
*label
, int line
);
509 static TRACE
*new_trace_body(rb_iseq_t
*iseq
, rb_event_flag_t event
, long data
);
512 static int iseq_compile_each(rb_iseq_t
*iseq
, LINK_ANCHOR
*anchor
, const NODE
*n
, int);
513 static int iseq_setup(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
);
514 static int iseq_setup_insn(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
);
515 static int iseq_optimize(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
);
516 static int iseq_insns_unification(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
);
518 static int iseq_set_local_table(rb_iseq_t
*iseq
, const ID
*tbl
);
519 static int iseq_set_exception_local_table(rb_iseq_t
*iseq
);
520 static int iseq_set_arguments(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
, const NODE
*const node
);
522 static int iseq_set_sequence_stackcaching(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
);
523 static int iseq_set_sequence(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
);
524 static int iseq_set_exception_table(rb_iseq_t
*iseq
);
525 static int iseq_set_optargs_table(rb_iseq_t
*iseq
);
527 static int compile_defined_expr(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, VALUE needstr
);
530 * To make Array to LinkedList, use link_anchor
534 verify_list(ISEQ_ARG_DECLARE
const char *info
, LINK_ANCHOR
*const anchor
)
538 LINK_ELEMENT
*list
, *plist
;
540 if (!compile_debug
) return;
542 list
= anchor
->anchor
.next
;
543 plist
= &anchor
->anchor
;
545 if (plist
!= list
->prev
) {
552 if (anchor
->last
!= plist
&& anchor
->last
!= 0) {
557 rb_bug("list verify error: %08x (%s)", flag
, info
);
562 #define verify_list(info, anchor) verify_list(iseq, (info), (anchor))
566 verify_call_cache(rb_iseq_t
*iseq
)
569 // fprintf(stderr, "ci_size:%d\t", iseq->body->ci_size); rp(iseq);
571 VALUE
*original
= rb_iseq_original_iseq(iseq
);
573 while (i
< iseq
->body
->iseq_size
) {
574 VALUE insn
= original
[i
];
575 const char *types
= insn_op_types(insn
);
577 for (int j
=0; types
[j
]; j
++) {
578 if (types
[j
] == TS_CALLDATA
) {
579 struct rb_call_data
*cd
= (struct rb_call_data
*)original
[i
+j
+1];
580 const struct rb_callinfo
*ci
= cd
->ci
;
581 const struct rb_callcache
*cc
= cd
->cc
;
582 if (cc
!= vm_cc_empty()) {
584 rb_bug("call cache is not initialized by vm_cc_empty()");
591 for (unsigned int i
=0; i
<iseq
->body
->ci_size
; i
++) {
592 struct rb_call_data
*cd
= &iseq
->body
->call_data
[i
];
593 const struct rb_callinfo
*ci
= cd
->ci
;
594 const struct rb_callcache
*cc
= cd
->cc
;
595 if (cc
!= NULL
&& cc
!= vm_cc_empty()) {
597 rb_bug("call cache is not initialized by vm_cc_empty()");
604 * elem1, elem2 => elem1, elem2, elem
607 ADD_ELEM(ISEQ_ARG_DECLARE LINK_ANCHOR
*const anchor
, LINK_ELEMENT
*elem
)
609 elem
->prev
= anchor
->last
;
610 anchor
->last
->next
= elem
;
612 verify_list("add", anchor
);
616 * elem1, before, elem2 => elem1, before, elem, elem2
619 APPEND_ELEM(ISEQ_ARG_DECLARE LINK_ANCHOR
*const anchor
, LINK_ELEMENT
*before
, LINK_ELEMENT
*elem
)
622 elem
->next
= before
->next
;
623 elem
->next
->prev
= elem
;
625 if (before
== anchor
->last
) anchor
->last
= elem
;
626 verify_list("add", anchor
);
629 #define ADD_ELEM(anchor, elem) ADD_ELEM(iseq, (anchor), (elem))
630 #define APPEND_ELEM(anchor, before, elem) APPEND_ELEM(iseq, (anchor), (before), (elem))
633 #define ISEQ_LAST_LINE(iseq) (ISEQ_COMPILE_DATA(iseq)->last_line)
636 validate_label(st_data_t name
, st_data_t label
, st_data_t arg
)
638 rb_iseq_t
*iseq
= (rb_iseq_t
*)arg
;
639 LABEL
*lobj
= (LABEL
*)label
;
640 if (!lobj
->link
.next
) {
642 COMPILE_ERROR(iseq
, lobj
->position
,
643 "%"PRIsVALUE
": undefined label",
644 rb_sym2str((VALUE
)name
));
651 validate_labels(rb_iseq_t
*iseq
, st_table
*labels_table
)
653 st_foreach(labels_table
, validate_label
, (st_data_t
)iseq
);
654 st_free_table(labels_table
);
658 rb_iseq_compile_callback(rb_iseq_t
*iseq
, const struct rb_iseq_new_with_callback_callback_func
* ifunc
)
663 (*ifunc
->func
)(iseq
, ret
, ifunc
->data
);
665 ADD_INSN(ret
, ISEQ_COMPILE_DATA(iseq
)->last_line
, leave
);
667 CHECK(iseq_setup_insn(iseq
, ret
));
668 return iseq_setup(iseq
, ret
);
672 rb_iseq_compile_node(rb_iseq_t
*iseq
, const NODE
*node
)
677 if (IMEMO_TYPE_P(node
, imemo_ifunc
)) {
678 rb_raise(rb_eArgError
, "unexpected imemo_ifunc");
682 NO_CHECK(COMPILE(ret
, "nil", node
));
683 iseq_set_local_table(iseq
, 0);
685 /* assume node is T_NODE */
686 else if (nd_type(node
) == NODE_SCOPE
) {
687 /* iseq type of top, method, class, block */
688 iseq_set_local_table(iseq
, node
->nd_tbl
);
689 iseq_set_arguments(iseq
, ret
, node
->nd_args
);
691 switch (iseq
->body
->type
) {
692 case ISEQ_TYPE_BLOCK
:
694 LABEL
*start
= ISEQ_COMPILE_DATA(iseq
)->start_label
= NEW_LABEL(0);
695 LABEL
*end
= ISEQ_COMPILE_DATA(iseq
)->end_label
= NEW_LABEL(0);
697 start
->rescued
= LABEL_RESCUE_BEG
;
698 end
->rescued
= LABEL_RESCUE_END
;
700 ADD_TRACE(ret
, RUBY_EVENT_B_CALL
);
701 ADD_INSN (ret
, FIX2INT(iseq
->body
->location
.first_lineno
), nop
);
702 ADD_LABEL(ret
, start
);
703 CHECK(COMPILE(ret
, "block body", node
->nd_body
));
705 ADD_TRACE(ret
, RUBY_EVENT_B_RETURN
);
706 ISEQ_COMPILE_DATA(iseq
)->last_line
= iseq
->body
->location
.code_location
.end_pos
.lineno
;
708 /* wide range catch handler must put at last */
709 ADD_CATCH_ENTRY(CATCH_TYPE_REDO
, start
, end
, NULL
, start
);
710 ADD_CATCH_ENTRY(CATCH_TYPE_NEXT
, start
, end
, NULL
, end
);
713 case ISEQ_TYPE_CLASS
:
715 ADD_TRACE(ret
, RUBY_EVENT_CLASS
);
716 CHECK(COMPILE(ret
, "scoped node", node
->nd_body
));
717 ADD_TRACE(ret
, RUBY_EVENT_END
);
718 ISEQ_COMPILE_DATA(iseq
)->last_line
= nd_line(node
);
721 case ISEQ_TYPE_METHOD
:
723 ADD_TRACE(ret
, RUBY_EVENT_CALL
);
724 CHECK(COMPILE(ret
, "scoped node", node
->nd_body
));
725 ADD_TRACE(ret
, RUBY_EVENT_RETURN
);
726 ISEQ_COMPILE_DATA(iseq
)->last_line
= nd_line(node
);
730 CHECK(COMPILE(ret
, "scoped node", node
->nd_body
));
737 #define INVALID_ISEQ_TYPE(type) \
738 ISEQ_TYPE_##type: m = #type; goto invalid_iseq_type
739 switch (iseq
->body
->type
) {
740 case INVALID_ISEQ_TYPE(METHOD
);
741 case INVALID_ISEQ_TYPE(CLASS
);
742 case INVALID_ISEQ_TYPE(BLOCK
);
743 case INVALID_ISEQ_TYPE(EVAL
);
744 case INVALID_ISEQ_TYPE(MAIN
);
745 case INVALID_ISEQ_TYPE(TOP
);
746 #undef INVALID_ISEQ_TYPE /* invalid iseq types end */
747 case ISEQ_TYPE_RESCUE
:
748 iseq_set_exception_local_table(iseq
);
749 CHECK(COMPILE(ret
, "rescue", node
));
751 case ISEQ_TYPE_ENSURE
:
752 iseq_set_exception_local_table(iseq
);
753 CHECK(COMPILE_POPPED(ret
, "ensure", node
));
755 case ISEQ_TYPE_PLAIN
:
756 CHECK(COMPILE(ret
, "ensure", node
));
759 COMPILE_ERROR(ERROR_ARGS
"unknown scope: %d", iseq
->body
->type
);
762 COMPILE_ERROR(ERROR_ARGS
"compile/ISEQ_TYPE_%s should not be reached", m
);
767 if (iseq
->body
->type
== ISEQ_TYPE_RESCUE
|| iseq
->body
->type
== ISEQ_TYPE_ENSURE
) {
768 ADD_GETLOCAL(ret
, 0, LVAR_ERRINFO
, 0);
769 ADD_INSN1(ret
, 0, throw, INT2FIX(0) /* continue throw */ );
772 ADD_INSN(ret
, ISEQ_COMPILE_DATA(iseq
)->last_line
, leave
);
776 if (ISEQ_COMPILE_DATA(iseq
)->labels_table
) {
777 st_table
*labels_table
= ISEQ_COMPILE_DATA(iseq
)->labels_table
;
778 ISEQ_COMPILE_DATA(iseq
)->labels_table
= 0;
779 validate_labels(iseq
, labels_table
);
782 CHECK(iseq_setup_insn(iseq
, ret
));
783 return iseq_setup(iseq
, ret
);
787 rb_iseq_translate_threaded_code(rb_iseq_t
*iseq
)
789 #if OPT_DIRECT_THREADED_CODE || OPT_CALL_THREADED_CODE
790 const void * const *table
= rb_vm_get_insns_address_table();
792 VALUE
*encoded
= (VALUE
*)iseq
->body
->iseq_encoded
;
794 for (i
= 0; i
< iseq
->body
->iseq_size
; /* */ ) {
795 int insn
= (int)iseq
->body
->iseq_encoded
[i
];
796 int len
= insn_len(insn
);
797 encoded
[i
] = (VALUE
)table
[insn
];
800 FL_SET((VALUE
)iseq
, ISEQ_TRANSLATED
);
806 rb_iseq_original_iseq(const rb_iseq_t
*iseq
) /* cold path */
808 VALUE
*original_code
;
810 if (ISEQ_ORIGINAL_ISEQ(iseq
)) return ISEQ_ORIGINAL_ISEQ(iseq
);
811 original_code
= ISEQ_ORIGINAL_ISEQ_ALLOC(iseq
, iseq
->body
->iseq_size
);
812 MEMCPY(original_code
, iseq
->body
->iseq_encoded
, VALUE
, iseq
->body
->iseq_size
);
814 #if OPT_DIRECT_THREADED_CODE || OPT_CALL_THREADED_CODE
818 for (i
= 0; i
< iseq
->body
->iseq_size
; /* */ ) {
819 const void *addr
= (const void *)original_code
[i
];
820 const int insn
= rb_vm_insn_addr2insn(addr
);
822 original_code
[i
] = insn
;
827 return original_code
;
830 /*********************************************/
831 /* definition of data structure for compiler */
832 /*********************************************/
835 * On 32-bit SPARC, GCC by default generates SPARC V7 code that may require
836 * 8-byte word alignment. On the other hand, Oracle Solaris Studio seems to
837 * generate SPARCV8PLUS code with unaligned memory access instructions.
838 * That is why the STRICT_ALIGNMENT is defined only with GCC.
840 #if defined(__sparc) && SIZEOF_VOIDP == 4 && defined(__GNUC__)
841 #define STRICT_ALIGNMENT
845 * Some OpenBSD platforms (including sparc64) require strict alignment.
847 #if defined(__OpenBSD__)
848 #include <sys/endian.h>
849 #ifdef __STRICT_ALIGNMENT
850 #define STRICT_ALIGNMENT
854 #ifdef STRICT_ALIGNMENT
855 #if defined(HAVE_TRUE_LONG_LONG) && SIZEOF_LONG_LONG > SIZEOF_VALUE
856 #define ALIGNMENT_SIZE SIZEOF_LONG_LONG
858 #define ALIGNMENT_SIZE SIZEOF_VALUE
860 #define PADDING_SIZE_MAX ((size_t)((ALIGNMENT_SIZE) - 1))
861 #define ALIGNMENT_SIZE_MASK PADDING_SIZE_MAX
862 /* Note: ALIGNMENT_SIZE == (2 ** N) is expected. */
864 #define PADDING_SIZE_MAX 0
865 #endif /* STRICT_ALIGNMENT */
867 #ifdef STRICT_ALIGNMENT
868 /* calculate padding size for aligned memory access */
870 calc_padding(void *ptr
, size_t size
)
875 mis
= (size_t)ptr
& ALIGNMENT_SIZE_MASK
;
877 padding
= ALIGNMENT_SIZE
- mis
;
880 * On 32-bit sparc or equivalents, when a single VALUE is requested
881 * and padding == sizeof(VALUE), it is clear that no padding is needed.
883 #if ALIGNMENT_SIZE > SIZEOF_VALUE
884 if (size
== sizeof(VALUE
) && padding
== sizeof(VALUE
)) {
891 #endif /* STRICT_ALIGNMENT */
894 compile_data_alloc_with_arena(struct iseq_compile_data_storage
**arena
, size_t size
)
897 struct iseq_compile_data_storage
*storage
= *arena
;
898 #ifdef STRICT_ALIGNMENT
899 size_t padding
= calc_padding((void *)&storage
->buff
[storage
->pos
], size
);
901 const size_t padding
= 0; /* expected to be optimized by compiler */
902 #endif /* STRICT_ALIGNMENT */
904 if (size
>= INT_MAX
- padding
) rb_memerror();
905 if (storage
->pos
+ size
+ padding
> storage
->size
) {
906 unsigned int alloc_size
= storage
->size
;
908 while (alloc_size
< size
+ PADDING_SIZE_MAX
) {
909 if (alloc_size
>= INT_MAX
/ 2) rb_memerror();
912 storage
->next
= (void *)ALLOC_N(char, alloc_size
+
913 offsetof(struct iseq_compile_data_storage
, buff
));
914 storage
= *arena
= storage
->next
;
917 storage
->size
= alloc_size
;
918 #ifdef STRICT_ALIGNMENT
919 padding
= calc_padding((void *)&storage
->buff
[storage
->pos
], size
);
920 #endif /* STRICT_ALIGNMENT */
923 #ifdef STRICT_ALIGNMENT
924 storage
->pos
+= (int)padding
;
925 #endif /* STRICT_ALIGNMENT */
927 ptr
= (void *)&storage
->buff
[storage
->pos
];
928 storage
->pos
+= (int)size
;
933 compile_data_alloc(rb_iseq_t
*iseq
, size_t size
)
935 struct iseq_compile_data_storage
** arena
= &ISEQ_COMPILE_DATA(iseq
)->node
.storage_current
;
936 return compile_data_alloc_with_arena(arena
, size
);
940 compile_data_alloc2(rb_iseq_t
*iseq
, size_t x
, size_t y
)
942 size_t size
= rb_size_mul_or_raise(x
, y
, rb_eRuntimeError
);
943 return compile_data_alloc(iseq
, size
);
947 compile_data_calloc2(rb_iseq_t
*iseq
, size_t x
, size_t y
)
949 size_t size
= rb_size_mul_or_raise(x
, y
, rb_eRuntimeError
);
950 void *p
= compile_data_alloc(iseq
, size
);
956 compile_data_alloc_insn(rb_iseq_t
*iseq
)
958 struct iseq_compile_data_storage
** arena
= &ISEQ_COMPILE_DATA(iseq
)->insn
.storage_current
;
959 return (INSN
*)compile_data_alloc_with_arena(arena
, sizeof(INSN
));
963 compile_data_alloc_label(rb_iseq_t
*iseq
)
965 return (LABEL
*)compile_data_alloc(iseq
, sizeof(LABEL
));
969 compile_data_alloc_adjust(rb_iseq_t
*iseq
)
971 return (ADJUST
*)compile_data_alloc(iseq
, sizeof(ADJUST
));
975 compile_data_alloc_trace(rb_iseq_t
*iseq
)
977 return (TRACE
*)compile_data_alloc(iseq
, sizeof(TRACE
));
981 * elem1, elemX => elem1, elem2, elemX
984 ELEM_INSERT_NEXT(LINK_ELEMENT
*elem1
, LINK_ELEMENT
*elem2
)
986 elem2
->next
= elem1
->next
;
990 elem2
->next
->prev
= elem2
;
995 * elem1, elemX => elemX, elem2, elem1
998 ELEM_INSERT_PREV(LINK_ELEMENT
*elem1
, LINK_ELEMENT
*elem2
)
1000 elem2
->prev
= elem1
->prev
;
1001 elem2
->next
= elem1
;
1002 elem1
->prev
= elem2
;
1004 elem2
->prev
->next
= elem2
;
1009 * elemX, elem1, elemY => elemX, elem2, elemY
1012 ELEM_REPLACE(LINK_ELEMENT
*elem1
, LINK_ELEMENT
*elem2
)
1014 elem2
->prev
= elem1
->prev
;
1015 elem2
->next
= elem1
->next
;
1017 elem1
->prev
->next
= elem2
;
1020 elem1
->next
->prev
= elem2
;
1025 ELEM_REMOVE(LINK_ELEMENT
*elem
)
1027 elem
->prev
->next
= elem
->next
;
1029 elem
->next
->prev
= elem
->prev
;
1033 static LINK_ELEMENT
*
1034 FIRST_ELEMENT(const LINK_ANCHOR
*const anchor
)
1036 return anchor
->anchor
.next
;
1039 static LINK_ELEMENT
*
1040 LAST_ELEMENT(LINK_ANCHOR
*const anchor
)
1042 return anchor
->last
;
1045 static LINK_ELEMENT
*
1046 POP_ELEMENT(ISEQ_ARG_DECLARE LINK_ANCHOR
*const anchor
)
1048 LINK_ELEMENT
*elem
= anchor
->last
;
1049 anchor
->last
= anchor
->last
->prev
;
1050 anchor
->last
->next
= 0;
1051 verify_list("pop", anchor
);
1055 #define POP_ELEMENT(anchor) POP_ELEMENT(iseq, (anchor))
1058 static LINK_ELEMENT
*
1059 ELEM_FIRST_INSN(LINK_ELEMENT
*elem
)
1062 switch (elem
->type
) {
1063 case ISEQ_ELEMENT_INSN
:
1064 case ISEQ_ELEMENT_ADJUST
:
1074 LIST_INSN_SIZE_ONE(const LINK_ANCHOR
*const anchor
)
1076 LINK_ELEMENT
*first_insn
= ELEM_FIRST_INSN(FIRST_ELEMENT(anchor
));
1077 if (first_insn
!= NULL
&&
1078 ELEM_FIRST_INSN(first_insn
->next
) == NULL
) {
1087 LIST_INSN_SIZE_ZERO(const LINK_ANCHOR
*const anchor
)
1089 if (ELEM_FIRST_INSN(FIRST_ELEMENT(anchor
)) == NULL
) {
1101 * anc1: e1, e2, e3, e4, e5
1102 * anc2: e4, e5 (broken)
1105 APPEND_LIST(ISEQ_ARG_DECLARE LINK_ANCHOR
*const anc1
, LINK_ANCHOR
*const anc2
)
1107 if (anc2
->anchor
.next
) {
1108 anc1
->last
->next
= anc2
->anchor
.next
;
1109 anc2
->anchor
.next
->prev
= anc1
->last
;
1110 anc1
->last
= anc2
->last
;
1112 verify_list("append", anc1
);
1115 #define APPEND_LIST(anc1, anc2) APPEND_LIST(iseq, (anc1), (anc2))
1120 debug_list(ISEQ_ARG_DECLARE LINK_ANCHOR
*const anchor
)
1122 LINK_ELEMENT
*list
= FIRST_ELEMENT(anchor
);
1124 printf("anch: %p, frst: %p, last: %p\n", &anchor
->anchor
,
1125 anchor
->anchor
.next
, anchor
->last
);
1127 printf("curr: %p, next: %p, prev: %p, type: %d\n", list
, list
->next
,
1128 list
->prev
, FIX2INT(list
->type
));
1133 dump_disasm_list(anchor
->anchor
.next
);
1134 verify_list("debug list", anchor
);
1137 #define debug_list(anc) debug_list(iseq, (anc))
1140 #define debug_list(anc) ((void)0)
1144 new_trace_body(rb_iseq_t
*iseq
, rb_event_flag_t event
, long data
)
1146 TRACE
*trace
= compile_data_alloc_trace(iseq
);
1148 trace
->link
.type
= ISEQ_ELEMENT_TRACE
;
1149 trace
->link
.next
= NULL
;
1150 trace
->event
= event
;
1157 new_label_body(rb_iseq_t
*iseq
, long line
)
1159 LABEL
*labelobj
= compile_data_alloc_label(iseq
);
1161 labelobj
->link
.type
= ISEQ_ELEMENT_LABEL
;
1162 labelobj
->link
.next
= 0;
1164 labelobj
->label_no
= ISEQ_COMPILE_DATA(iseq
)->label_no
++;
1165 labelobj
->sc_state
= 0;
1167 labelobj
->refcnt
= 0;
1169 labelobj
->rescued
= LABEL_RESCUE_NONE
;
1170 labelobj
->unremovable
= 0;
1175 new_adjust_body(rb_iseq_t
*iseq
, LABEL
*label
, int line
)
1177 ADJUST
*adjust
= compile_data_alloc_adjust(iseq
);
1178 adjust
->link
.type
= ISEQ_ELEMENT_ADJUST
;
1179 adjust
->link
.next
= 0;
1180 adjust
->label
= label
;
1181 adjust
->line_no
= line
;
1182 LABEL_UNREMOVABLE(label
);
1187 new_insn_core(rb_iseq_t
*iseq
, int line_no
,
1188 int insn_id
, int argc
, VALUE
*argv
)
1190 INSN
*iobj
= compile_data_alloc_insn(iseq
);
1192 /* printf("insn_id: %d, line: %d\n", insn_id, line_no); */
1194 iobj
->link
.type
= ISEQ_ELEMENT_INSN
;
1195 iobj
->link
.next
= 0;
1196 iobj
->insn_id
= insn_id
;
1197 iobj
->insn_info
.line_no
= line_no
;
1198 iobj
->insn_info
.events
= 0;
1199 iobj
->operands
= argv
;
1200 iobj
->operand_size
= argc
;
1206 new_insn_body(rb_iseq_t
*iseq
, int line_no
, enum ruby_vminsn_type insn_id
, int argc
, ...)
1208 VALUE
*operands
= 0;
1212 va_init_list(argv
, argc
);
1213 operands
= compile_data_alloc2(iseq
, sizeof(VALUE
), argc
);
1214 for (i
= 0; i
< argc
; i
++) {
1215 VALUE v
= va_arg(argv
, VALUE
);
1220 return new_insn_core(iseq
, line_no
, insn_id
, argc
, operands
);
1223 static const struct rb_callinfo
*
1224 new_callinfo(rb_iseq_t
*iseq
, ID mid
, int argc
, unsigned int flag
, struct rb_callinfo_kwarg
*kw_arg
, int has_blockiseq
)
1226 VM_ASSERT(argc
>= 0);
1228 if (!(flag
& (VM_CALL_ARGS_SPLAT
| VM_CALL_ARGS_BLOCKARG
| VM_CALL_KW_SPLAT
)) &&
1229 kw_arg
== NULL
&& !has_blockiseq
) {
1230 flag
|= VM_CALL_ARGS_SIMPLE
;
1234 flag
|= VM_CALL_KWARG
;
1235 argc
+= kw_arg
->keyword_len
;
1238 // fprintf(stderr, "[%d] id:%s\t", (int)iseq->body->ci_size, rb_id2name(mid)); rp(iseq);
1239 iseq
->body
->ci_size
++;
1240 const struct rb_callinfo
*ci
= vm_ci_new(mid
, flag
, argc
, kw_arg
);
1241 RB_OBJ_WRITTEN(iseq
, Qundef
, ci
);
1246 new_insn_send(rb_iseq_t
*iseq
, int line_no
, ID id
, VALUE argc
, const rb_iseq_t
*blockiseq
, VALUE flag
, struct rb_callinfo_kwarg
*keywords
)
1248 VALUE
*operands
= compile_data_calloc2(iseq
, sizeof(VALUE
), 2);
1249 operands
[0] = (VALUE
)new_callinfo(iseq
, id
, FIX2INT(argc
), FIX2INT(flag
), keywords
, blockiseq
!= NULL
);
1250 operands
[1] = (VALUE
)blockiseq
;
1251 return new_insn_core(iseq
, line_no
, BIN(send
), 2, operands
);
1255 new_child_iseq(rb_iseq_t
*iseq
, const NODE
*const node
,
1256 VALUE name
, const rb_iseq_t
*parent
, enum iseq_type type
, int line_no
)
1258 rb_iseq_t
*ret_iseq
;
1262 ast
.compile_option
= 0;
1263 ast
.line_count
= -1;
1265 debugs("[new_child_iseq]> ---------------------------------------\n");
1266 ret_iseq
= rb_iseq_new_with_opt(&ast
, name
,
1267 rb_iseq_path(iseq
), rb_iseq_realpath(iseq
),
1268 INT2FIX(line_no
), parent
, type
, ISEQ_COMPILE_DATA(iseq
)->option
);
1269 debugs("[new_child_iseq]< ---------------------------------------\n");
1274 new_child_iseq_with_callback(rb_iseq_t
*iseq
, const struct rb_iseq_new_with_callback_callback_func
*ifunc
,
1275 VALUE name
, const rb_iseq_t
*parent
, enum iseq_type type
, int line_no
)
1277 rb_iseq_t
*ret_iseq
;
1279 debugs("[new_child_iseq_with_callback]> ---------------------------------------\n");
1280 ret_iseq
= rb_iseq_new_with_callback(ifunc
, name
,
1281 rb_iseq_path(iseq
), rb_iseq_realpath(iseq
),
1282 INT2FIX(line_no
), parent
, type
, ISEQ_COMPILE_DATA(iseq
)->option
);
1283 debugs("[new_child_iseq_with_callback]< ---------------------------------------\n");
1288 set_catch_except_p(struct rb_iseq_constant_body
*body
)
1290 body
->catch_except_p
= TRUE
;
1291 if (body
->parent_iseq
!= NULL
) {
1292 set_catch_except_p(body
->parent_iseq
->body
);
1296 /* Set body->catch_except_p to TRUE if the ISeq may catch an exception. If it is FALSE,
1297 JIT-ed code may be optimized. If we are extremely conservative, we should set TRUE
1298 if catch table exists. But we want to optimize while loop, which always has catch
1299 table entries for break/next/redo.
1301 So this function sets TRUE for limited ISeqs with break/next/redo catch table entries
1302 whose child ISeq would really raise an exception. */
1304 update_catch_except_flags(struct rb_iseq_constant_body
*body
)
1309 const struct iseq_catch_table
*ct
= body
->catch_table
;
1311 /* This assumes that a block has parent_iseq which may catch an exception from the block, and that
1312 BREAK/NEXT/REDO catch table entries are used only when `throw` insn is used in the block. */
1314 while (pos
< body
->iseq_size
) {
1315 #if OPT_DIRECT_THREADED_CODE || OPT_CALL_THREADED_CODE
1316 insn
= rb_vm_insn_addr2insn((void *)body
->iseq_encoded
[pos
]);
1318 insn
= (int)body
->iseq_encoded
[pos
];
1320 if (insn
== BIN(throw)) {
1321 set_catch_except_p(body
);
1324 pos
+= insn_len(insn
);
1330 for (i
= 0; i
< ct
->size
; i
++) {
1331 const struct iseq_catch_table_entry
*entry
=
1332 UNALIGNED_MEMBER_PTR(ct
, entries
[i
]);
1333 if (entry
->type
!= CATCH_TYPE_BREAK
1334 && entry
->type
!= CATCH_TYPE_NEXT
1335 && entry
->type
!= CATCH_TYPE_REDO
) {
1336 body
->catch_except_p
= TRUE
;
1343 iseq_insert_nop_between_end_and_cont(rb_iseq_t
*iseq
)
1345 VALUE catch_table_ary
= ISEQ_COMPILE_DATA(iseq
)->catch_table_ary
;
1346 if (NIL_P(catch_table_ary
)) return;
1347 unsigned int i
, tlen
= (unsigned int)RARRAY_LEN(catch_table_ary
);
1348 const VALUE
*tptr
= RARRAY_CONST_PTR_TRANSIENT(catch_table_ary
);
1349 for (i
= 0; i
< tlen
; i
++) {
1350 const VALUE
*ptr
= RARRAY_CONST_PTR_TRANSIENT(tptr
[i
]);
1351 LINK_ELEMENT
*end
= (LINK_ELEMENT
*)(ptr
[2] & ~1);
1352 LINK_ELEMENT
*cont
= (LINK_ELEMENT
*)(ptr
[4] & ~1);
1354 for (e
= end
; e
&& (IS_LABEL(e
) || IS_TRACE(e
)); e
= e
->next
) {
1356 INSN
*nop
= new_insn_core(iseq
, 0, BIN(nop
), 0, 0);
1357 ELEM_INSERT_NEXT(end
, &nop
->link
);
1365 iseq_setup_insn(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
)
1367 if (RTEST(ISEQ_COMPILE_DATA(iseq
)->err_info
))
1370 /* debugs("[compile step 2] (iseq_array_to_linkedlist)\n"); */
1372 if (compile_debug
> 5)
1373 dump_disasm_list(FIRST_ELEMENT(anchor
));
1375 debugs("[compile step 3.1 (iseq_optimize)]\n");
1376 iseq_optimize(iseq
, anchor
);
1378 if (compile_debug
> 5)
1379 dump_disasm_list(FIRST_ELEMENT(anchor
));
1381 if (ISEQ_COMPILE_DATA(iseq
)->option
->instructions_unification
) {
1382 debugs("[compile step 3.2 (iseq_insns_unification)]\n");
1383 iseq_insns_unification(iseq
, anchor
);
1384 if (compile_debug
> 5)
1385 dump_disasm_list(FIRST_ELEMENT(anchor
));
1388 if (ISEQ_COMPILE_DATA(iseq
)->option
->stack_caching
) {
1389 debugs("[compile step 3.3 (iseq_set_sequence_stackcaching)]\n");
1390 iseq_set_sequence_stackcaching(iseq
, anchor
);
1391 if (compile_debug
> 5)
1392 dump_disasm_list(FIRST_ELEMENT(anchor
));
1395 debugs("[compile step 3.4 (iseq_insert_nop_between_end_and_cont)]\n");
1396 iseq_insert_nop_between_end_and_cont(iseq
);
1402 iseq_setup(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
)
1404 if (RTEST(ISEQ_COMPILE_DATA(iseq
)->err_info
))
1407 debugs("[compile step 4.1 (iseq_set_sequence)]\n");
1408 if (!iseq_set_sequence(iseq
, anchor
)) return COMPILE_NG
;
1409 if (compile_debug
> 5)
1410 dump_disasm_list(FIRST_ELEMENT(anchor
));
1412 debugs("[compile step 4.2 (iseq_set_exception_table)]\n");
1413 if (!iseq_set_exception_table(iseq
)) return COMPILE_NG
;
1415 debugs("[compile step 4.3 (set_optargs_table)] \n");
1416 if (!iseq_set_optargs_table(iseq
)) return COMPILE_NG
;
1418 debugs("[compile step 5 (iseq_translate_threaded_code)] \n");
1419 if (!rb_iseq_translate_threaded_code(iseq
)) return COMPILE_NG
;
1421 update_catch_except_flags(iseq
->body
);
1423 if (compile_debug
> 1) {
1424 VALUE str
= rb_iseq_disasm(iseq
);
1425 printf("%s\n", StringValueCStr(str
));
1427 verify_call_cache(iseq
);
1428 debugs("[compile step: finish]\n");
1434 iseq_set_exception_local_table(rb_iseq_t
*iseq
)
1436 iseq
->body
->local_table_size
= numberof(rb_iseq_shared_exc_local_tbl
);
1437 iseq
->body
->local_table
= rb_iseq_shared_exc_local_tbl
;
1442 get_lvar_level(const rb_iseq_t
*iseq
)
1445 while (iseq
!= iseq
->body
->local_iseq
) {
1447 iseq
= iseq
->body
->parent_iseq
;
1453 get_dyna_var_idx_at_raw(const rb_iseq_t
*iseq
, ID id
)
1457 for (i
= 0; i
< iseq
->body
->local_table_size
; i
++) {
1458 if (iseq
->body
->local_table
[i
] == id
) {
1466 get_local_var_idx(const rb_iseq_t
*iseq
, ID id
)
1468 int idx
= get_dyna_var_idx_at_raw(iseq
->body
->local_iseq
, id
);
1471 COMPILE_ERROR(iseq
, ISEQ_LAST_LINE(iseq
),
1472 "get_local_var_idx: %d", idx
);
1479 get_dyna_var_idx(const rb_iseq_t
*iseq
, ID id
, int *level
, int *ls
)
1481 int lv
= 0, idx
= -1;
1482 const rb_iseq_t
*const topmost_iseq
= iseq
;
1485 idx
= get_dyna_var_idx_at_raw(iseq
, id
);
1489 iseq
= iseq
->body
->parent_iseq
;
1494 COMPILE_ERROR(topmost_iseq
, ISEQ_LAST_LINE(topmost_iseq
),
1495 "get_dyna_var_idx: -1");
1499 *ls
= iseq
->body
->local_table_size
;
1504 iseq_local_block_param_p(const rb_iseq_t
*iseq
, unsigned int idx
, unsigned int level
)
1506 const struct rb_iseq_constant_body
*body
;
1508 iseq
= iseq
->body
->parent_iseq
;
1512 if (body
->local_iseq
== iseq
&& /* local variables */
1513 body
->param
.flags
.has_block
&&
1514 body
->local_table_size
- body
->param
.block_start
== idx
) {
1523 iseq_block_param_id_p(const rb_iseq_t
*iseq
, ID id
, int *pidx
, int *plevel
)
1526 int idx
= get_dyna_var_idx(iseq
, id
, &level
, &ls
);
1527 if (iseq_local_block_param_p(iseq
, ls
- idx
, level
)) {
1538 iseq_add_getlocal(rb_iseq_t
*iseq
, LINK_ANCHOR
*const seq
, int line
, int idx
, int level
)
1540 if (iseq_local_block_param_p(iseq
, idx
, level
)) {
1541 ADD_INSN2(seq
, line
, getblockparam
, INT2FIX((idx
) + VM_ENV_DATA_SIZE
- 1), INT2FIX(level
));
1544 ADD_INSN2(seq
, line
, getlocal
, INT2FIX((idx
) + VM_ENV_DATA_SIZE
- 1), INT2FIX(level
));
1549 iseq_add_setlocal(rb_iseq_t
*iseq
, LINK_ANCHOR
*const seq
, int line
, int idx
, int level
)
1551 if (iseq_local_block_param_p(iseq
, idx
, level
)) {
1552 ADD_INSN2(seq
, line
, setblockparam
, INT2FIX((idx
) + VM_ENV_DATA_SIZE
- 1), INT2FIX(level
));
1555 ADD_INSN2(seq
, line
, setlocal
, INT2FIX((idx
) + VM_ENV_DATA_SIZE
- 1), INT2FIX(level
));
1562 iseq_calc_param_size(rb_iseq_t
*iseq
)
1564 struct rb_iseq_constant_body
*const body
= iseq
->body
;
1565 if (body
->param
.flags
.has_opt
||
1566 body
->param
.flags
.has_post
||
1567 body
->param
.flags
.has_rest
||
1568 body
->param
.flags
.has_block
||
1569 body
->param
.flags
.has_kw
||
1570 body
->param
.flags
.has_kwrest
) {
1572 if (body
->param
.flags
.has_block
) {
1573 body
->param
.size
= body
->param
.block_start
+ 1;
1575 else if (body
->param
.flags
.has_kwrest
) {
1576 body
->param
.size
= body
->param
.keyword
->rest_start
+ 1;
1578 else if (body
->param
.flags
.has_kw
) {
1579 body
->param
.size
= body
->param
.keyword
->bits_start
+ 1;
1581 else if (body
->param
.flags
.has_post
) {
1582 body
->param
.size
= body
->param
.post_start
+ body
->param
.post_num
;
1584 else if (body
->param
.flags
.has_rest
) {
1585 body
->param
.size
= body
->param
.rest_start
+ 1;
1587 else if (body
->param
.flags
.has_opt
) {
1588 body
->param
.size
= body
->param
.lead_num
+ body
->param
.opt_num
;
1595 body
->param
.size
= body
->param
.lead_num
;
1600 iseq_set_arguments_keywords(rb_iseq_t
*iseq
, LINK_ANCHOR
*const optargs
,
1601 const struct rb_args_info
*args
, int arg_size
)
1603 const NODE
*node
= args
->kw_args
;
1604 struct rb_iseq_constant_body
*const body
= iseq
->body
;
1605 struct rb_iseq_param_keyword
*keyword
;
1606 const VALUE default_values
= rb_ary_tmp_new(1);
1607 const VALUE complex_mark
= rb_str_tmp_new(0);
1608 int kw
= 0, rkw
= 0, di
= 0, i
;
1610 body
->param
.flags
.has_kw
= TRUE
;
1611 body
->param
.keyword
= keyword
= ZALLOC_N(struct rb_iseq_param_keyword
, 1);
1615 node
= node
->nd_next
;
1618 keyword
->bits_start
= arg_size
++;
1620 node
= args
->kw_args
;
1622 const NODE
*val_node
= node
->nd_body
->nd_value
;
1625 if (val_node
== NODE_SPECIAL_REQUIRED_KEYWORD
) {
1629 switch (nd_type(val_node
)) {
1631 dv
= val_node
->nd_lit
;
1643 NO_CHECK(COMPILE_POPPED(optargs
, "kwarg", node
)); /* nd_type(node) == NODE_KW_ARG */
1647 keyword
->num
= ++di
;
1648 rb_ary_push(default_values
, dv
);
1651 node
= node
->nd_next
;
1656 if (args
->kw_rest_arg
->nd_vid
!= 0) {
1657 keyword
->rest_start
= arg_size
++;
1658 body
->param
.flags
.has_kwrest
= TRUE
;
1660 keyword
->required_num
= rkw
;
1661 keyword
->table
= &body
->local_table
[keyword
->bits_start
- keyword
->num
];
1664 VALUE
*dvs
= ALLOC_N(VALUE
, RARRAY_LEN(default_values
));
1666 for (i
= 0; i
< RARRAY_LEN(default_values
); i
++) {
1667 VALUE dv
= RARRAY_AREF(default_values
, i
);
1668 if (dv
== complex_mark
) dv
= Qundef
;
1669 if (!SPECIAL_CONST_P(dv
)) {
1670 RB_OBJ_WRITTEN(iseq
, Qundef
, dv
);
1675 keyword
->default_values
= dvs
;
1681 iseq_set_arguments(rb_iseq_t
*iseq
, LINK_ANCHOR
*const optargs
, const NODE
*const node_args
)
1683 debugs("iseq_set_arguments: %s\n", node_args
? "" : "0");
1686 struct rb_iseq_constant_body
*const body
= iseq
->body
;
1687 struct rb_args_info
*args
= node_args
->nd_ainfo
;
1693 EXPECT_NODE("iseq_set_arguments", node_args
, NODE_ARGS
, COMPILE_NG
);
1695 body
->param
.flags
.ruby2_keywords
= args
->ruby2_keywords
;
1696 body
->param
.lead_num
= arg_size
= (int)args
->pre_args_num
;
1697 if (body
->param
.lead_num
> 0) body
->param
.flags
.has_lead
= TRUE
;
1698 debugs(" - argc: %d\n", body
->param
.lead_num
);
1700 rest_id
= args
->rest_arg
;
1701 if (rest_id
== NODE_SPECIAL_EXCESSIVE_COMMA
) {
1705 block_id
= args
->block_arg
;
1707 if (args
->opt_args
) {
1708 const NODE
*node
= args
->opt_args
;
1710 VALUE labels
= rb_ary_tmp_new(1);
1715 label
= NEW_LABEL(nd_line(node
));
1716 rb_ary_push(labels
, (VALUE
)label
| 1);
1717 ADD_LABEL(optargs
, label
);
1718 NO_CHECK(COMPILE_POPPED(optargs
, "optarg", node
->nd_body
));
1719 node
= node
->nd_next
;
1724 label
= NEW_LABEL(nd_line(node_args
));
1725 rb_ary_push(labels
, (VALUE
)label
| 1);
1726 ADD_LABEL(optargs
, label
);
1728 opt_table
= ALLOC_N(VALUE
, i
+1);
1730 MEMCPY(opt_table
, RARRAY_CONST_PTR_TRANSIENT(labels
), VALUE
, i
+1);
1731 for (j
= 0; j
< i
+1; j
++) {
1734 rb_ary_clear(labels
);
1736 body
->param
.flags
.has_opt
= TRUE
;
1737 body
->param
.opt_num
= i
;
1738 body
->param
.opt_table
= opt_table
;
1743 body
->param
.rest_start
= arg_size
++;
1744 body
->param
.flags
.has_rest
= TRUE
;
1745 assert(body
->param
.rest_start
!= -1);
1748 if (args
->first_post_arg
) {
1749 body
->param
.post_start
= arg_size
;
1750 body
->param
.post_num
= args
->post_args_num
;
1751 body
->param
.flags
.has_post
= TRUE
;
1752 arg_size
+= args
->post_args_num
;
1754 if (body
->param
.flags
.has_rest
) { /* TODO: why that? */
1755 body
->param
.post_start
= body
->param
.rest_start
+ 1;
1759 if (args
->kw_args
) {
1760 arg_size
= iseq_set_arguments_keywords(iseq
, optargs
, args
, arg_size
);
1762 else if (args
->kw_rest_arg
) {
1763 struct rb_iseq_param_keyword
*keyword
= ZALLOC_N(struct rb_iseq_param_keyword
, 1);
1764 keyword
->rest_start
= arg_size
++;
1765 body
->param
.keyword
= keyword
;
1766 body
->param
.flags
.has_kwrest
= TRUE
;
1768 else if (args
->no_kwarg
) {
1769 body
->param
.flags
.accepts_no_kwarg
= TRUE
;
1773 body
->param
.block_start
= arg_size
++;
1774 body
->param
.flags
.has_block
= TRUE
;
1777 iseq_calc_param_size(iseq
);
1778 body
->param
.size
= arg_size
;
1780 if (args
->pre_init
) { /* m_init */
1781 NO_CHECK(COMPILE_POPPED(optargs
, "init arguments (m)", args
->pre_init
));
1783 if (args
->post_init
) { /* p_init */
1784 NO_CHECK(COMPILE_POPPED(optargs
, "init arguments (p)", args
->post_init
));
1787 if (body
->type
== ISEQ_TYPE_BLOCK
) {
1788 if (body
->param
.flags
.has_opt
== FALSE
&&
1789 body
->param
.flags
.has_post
== FALSE
&&
1790 body
->param
.flags
.has_rest
== FALSE
&&
1791 body
->param
.flags
.has_kw
== FALSE
&&
1792 body
->param
.flags
.has_kwrest
== FALSE
) {
1794 if (body
->param
.lead_num
== 1 && last_comma
== 0) {
1796 body
->param
.flags
.ambiguous_param0
= TRUE
;
1806 iseq_set_local_table(rb_iseq_t
*iseq
, const ID
*tbl
)
1811 size
= (unsigned int)*tbl
;
1819 ID
*ids
= (ID
*)ALLOC_N(ID
, size
);
1820 MEMCPY(ids
, tbl
, ID
, size
);
1821 iseq
->body
->local_table
= ids
;
1823 iseq
->body
->local_table_size
= size
;
1825 debugs("iseq_set_local_table: %u\n", iseq
->body
->local_table_size
);
1830 cdhash_cmp(VALUE val
, VALUE lit
)
1837 else if ((tlit
= OBJ_BUILTIN_TYPE(lit
)) == -1) {
1840 else if ((tval
= OBJ_BUILTIN_TYPE(val
)) == -1) {
1843 else if (tlit
!= tval
) {
1846 else if (tlit
== T_SYMBOL
) {
1849 else if (tlit
== T_STRING
) {
1850 return rb_str_hash_cmp(lit
, val
);
1852 else if (tlit
== T_BIGNUM
) {
1853 long x
= FIX2LONG(rb_big_cmp(lit
, val
));
1855 /* Given lit and val are both Bignum, x must be -1, 0, 1.
1856 * There is no need to call rb_fix2int here. */
1857 RUBY_ASSERT((x
== 1) || (x
== 0) || (x
== -1));
1860 else if (tlit
== T_FLOAT
) {
1861 return rb_float_cmp(lit
, val
);
1864 UNREACHABLE_RETURN(-1);
1869 cdhash_hash(VALUE a
)
1871 switch (OBJ_BUILTIN_TYPE(a
)) {
1874 return (st_index_t
)a
;
1876 return rb_str_hash(a
);
1878 return FIX2LONG(rb_big_hash(a
));
1880 return rb_dbl_long_hash(RFLOAT_VALUE(a
));
1882 UNREACHABLE_RETURN(0);
1886 static const struct st_hash_type cdhash_type
= {
1891 struct cdhash_set_label_struct
{
1898 cdhash_set_label_i(VALUE key
, VALUE val
, VALUE ptr
)
1900 struct cdhash_set_label_struct
*data
= (struct cdhash_set_label_struct
*)ptr
;
1901 LABEL
*lobj
= (LABEL
*)(val
& ~1);
1902 rb_hash_aset(data
->hash
, key
, INT2FIX(lobj
->position
- (data
->pos
+data
->len
)));
1908 get_ivar_ic_value(rb_iseq_t
*iseq
,ID id
)
1911 struct rb_id_table
*tbl
= ISEQ_COMPILE_DATA(iseq
)->ivar_cache_table
;
1913 if (rb_id_table_lookup(tbl
,id
,&val
)) {
1918 tbl
= rb_id_table_create(1);
1919 ISEQ_COMPILE_DATA(iseq
)->ivar_cache_table
= tbl
;
1921 val
= INT2FIX(iseq
->body
->is_size
++);
1922 rb_id_table_insert(tbl
,id
,val
);
1926 #define BADINSN_DUMP(anchor, list, dest) \
1927 dump_disasm_list_with_cursor(FIRST_ELEMENT(anchor), list, dest)
1929 #define BADINSN_ERROR \
1930 (xfree(generated_iseq), \
1931 xfree(insns_info), \
1932 BADINSN_DUMP(anchor, list, NULL), \
1936 fix_sp_depth(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
)
1938 int stack_max
= 0, sp
= 0, line
= 0;
1941 for (list
= FIRST_ELEMENT(anchor
); list
; list
= list
->next
) {
1942 if (list
->type
== ISEQ_ELEMENT_LABEL
) {
1943 LABEL
*lobj
= (LABEL
*)list
;
1948 for (list
= FIRST_ELEMENT(anchor
); list
; list
= list
->next
) {
1949 switch (list
->type
) {
1950 case ISEQ_ELEMENT_INSN
:
1955 INSN
*iobj
= (INSN
*)list
;
1958 sp
= calc_sp_depth(sp
, iobj
);
1960 BADINSN_DUMP(anchor
, list
, NULL
);
1961 COMPILE_ERROR(iseq
, iobj
->insn_info
.line_no
,
1962 "argument stack underflow (%d)", sp
);
1965 if (sp
> stack_max
) {
1969 line
= iobj
->insn_info
.line_no
;
1970 /* fprintf(stderr, "insn: %-16s, sp: %d\n", insn_name(iobj->insn_id), sp); */
1971 operands
= iobj
->operands
;
1972 insn
= iobj
->insn_id
;
1973 types
= insn_op_types(insn
);
1974 len
= insn_len(insn
);
1977 if (iobj
->operand_size
!= len
- 1) {
1978 /* printf("operand size miss! (%d, %d)\n", iobj->operand_size, len); */
1979 BADINSN_DUMP(anchor
, list
, NULL
);
1980 COMPILE_ERROR(iseq
, iobj
->insn_info
.line_no
,
1981 "operand size miss! (%d for %d)",
1982 iobj
->operand_size
, len
- 1);
1986 for (j
= 0; types
[j
]; j
++) {
1987 if (types
[j
] == TS_OFFSET
) {
1988 /* label(destination position) */
1989 LABEL
*lobj
= (LABEL
*)operands
[j
];
1991 BADINSN_DUMP(anchor
, list
, NULL
);
1992 COMPILE_ERROR(iseq
, iobj
->insn_info
.line_no
,
1993 "unknown label: "LABEL_FORMAT
, lobj
->label_no
);
1996 if (lobj
->sp
== -1) {
2003 case ISEQ_ELEMENT_LABEL
:
2005 LABEL
*lobj
= (LABEL
*)list
;
2006 if (lobj
->sp
== -1) {
2014 case ISEQ_ELEMENT_TRACE
:
2019 case ISEQ_ELEMENT_ADJUST
:
2021 ADJUST
*adjust
= (ADJUST
*)list
;
2024 sp
= adjust
->label
? adjust
->label
->sp
: 0;
2025 if (adjust
->line_no
!= -1 && orig_sp
- sp
< 0) {
2026 BADINSN_DUMP(anchor
, list
, NULL
);
2027 COMPILE_ERROR(iseq
, adjust
->line_no
,
2028 "iseq_set_sequence: adjust bug %d < %d",
2035 BADINSN_DUMP(anchor
, list
, NULL
);
2036 COMPILE_ERROR(iseq
, line
, "unknown list type: %d", list
->type
);
2044 add_insn_info(struct iseq_insn_info_entry
*insns_info
, unsigned int *positions
,
2045 int insns_info_index
, int code_index
, const INSN
*iobj
)
2047 if (insns_info_index
== 0 ||
2048 insns_info
[insns_info_index
-1].line_no
!= iobj
->insn_info
.line_no
||
2049 insns_info
[insns_info_index
-1].events
!= iobj
->insn_info
.events
) {
2050 insns_info
[insns_info_index
].line_no
= iobj
->insn_info
.line_no
;
2051 insns_info
[insns_info_index
].events
= iobj
->insn_info
.events
;
2052 positions
[insns_info_index
] = code_index
;
2059 add_adjust_info(struct iseq_insn_info_entry
*insns_info
, unsigned int *positions
,
2060 int insns_info_index
, int code_index
, const ADJUST
*adjust
)
2062 if (insns_info_index
> 0 ||
2063 insns_info
[insns_info_index
-1].line_no
!= adjust
->line_no
) {
2064 insns_info
[insns_info_index
].line_no
= adjust
->line_no
;
2065 insns_info
[insns_info_index
].events
= 0;
2066 positions
[insns_info_index
] = code_index
;
2073 ruby insn object list -> raw instruction sequence
2076 iseq_set_sequence(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
)
2078 VALUE iseqv
= (VALUE
)iseq
;
2079 struct iseq_insn_info_entry
*insns_info
;
2080 struct rb_iseq_constant_body
*const body
= iseq
->body
;
2081 unsigned int *positions
;
2083 VALUE
*generated_iseq
;
2084 rb_event_flag_t events
= 0;
2087 int insn_num
, code_index
, insns_info_index
, sp
= 0;
2088 int stack_max
= fix_sp_depth(iseq
, anchor
);
2090 if (stack_max
< 0) return COMPILE_NG
;
2092 /* fix label position */
2093 insn_num
= code_index
= 0;
2094 for (list
= FIRST_ELEMENT(anchor
); list
; list
= list
->next
) {
2095 switch (list
->type
) {
2096 case ISEQ_ELEMENT_INSN
:
2098 INSN
*iobj
= (INSN
*)list
;
2100 sp
= calc_sp_depth(sp
, iobj
);
2102 events
= iobj
->insn_info
.events
|= events
;
2103 if (ISEQ_COVERAGE(iseq
)) {
2104 if (ISEQ_LINE_COVERAGE(iseq
) && (events
& RUBY_EVENT_COVERAGE_LINE
) &&
2105 !(rb_get_coverage_mode() & COVERAGE_TARGET_ONESHOT_LINES
)) {
2106 int line
= iobj
->insn_info
.line_no
;
2108 RARRAY_ASET(ISEQ_LINE_COVERAGE(iseq
), line
- 1, INT2FIX(0));
2111 if (ISEQ_BRANCH_COVERAGE(iseq
) && (events
& RUBY_EVENT_COVERAGE_BRANCH
)) {
2112 while (RARRAY_LEN(ISEQ_PC2BRANCHINDEX(iseq
)) <= code_index
) {
2113 rb_ary_push(ISEQ_PC2BRANCHINDEX(iseq
), Qnil
);
2115 RARRAY_ASET(ISEQ_PC2BRANCHINDEX(iseq
), code_index
, INT2FIX(data
));
2118 code_index
+= insn_data_length(iobj
);
2123 case ISEQ_ELEMENT_LABEL
:
2125 LABEL
*lobj
= (LABEL
*)list
;
2126 lobj
->position
= code_index
;
2130 case ISEQ_ELEMENT_TRACE
:
2132 TRACE
*trace
= (TRACE
*)list
;
2133 events
|= trace
->event
;
2134 if (trace
->event
& RUBY_EVENT_COVERAGE_BRANCH
) data
= trace
->data
;
2137 case ISEQ_ELEMENT_ADJUST
:
2139 ADJUST
*adjust
= (ADJUST
*)list
;
2140 if (adjust
->line_no
!= -1) {
2142 sp
= adjust
->label
? adjust
->label
->sp
: 0;
2143 if (orig_sp
- sp
> 0) {
2144 if (orig_sp
- sp
> 1) code_index
++; /* 1 operand */
2145 code_index
++; /* insn */
2155 /* make instruction sequence */
2156 generated_iseq
= ALLOC_N(VALUE
, code_index
);
2157 insns_info
= ALLOC_N(struct iseq_insn_info_entry
, insn_num
);
2158 positions
= ALLOC_N(unsigned int, insn_num
);
2159 body
->is_entries
= ZALLOC_N(union iseq_inline_storage_entry
, body
->is_size
);
2160 body
->call_data
= ZALLOC_N(struct rb_call_data
, body
->ci_size
);
2161 ISEQ_COMPILE_DATA(iseq
)->ci_index
= 0;
2163 list
= FIRST_ELEMENT(anchor
);
2164 insns_info_index
= code_index
= sp
= 0;
2167 switch (list
->type
) {
2168 case ISEQ_ELEMENT_INSN
:
2173 INSN
*iobj
= (INSN
*)list
;
2176 sp
= calc_sp_depth(sp
, iobj
);
2177 /* fprintf(stderr, "insn: %-16s, sp: %d\n", insn_name(iobj->insn_id), sp); */
2178 operands
= iobj
->operands
;
2179 insn
= iobj
->insn_id
;
2180 generated_iseq
[code_index
] = insn
;
2181 types
= insn_op_types(insn
);
2182 len
= insn_len(insn
);
2184 for (j
= 0; types
[j
]; j
++) {
2185 char type
= types
[j
];
2186 /* printf("--> [%c - (%d-%d)]\n", type, k, j); */
2190 /* label(destination position) */
2191 LABEL
*lobj
= (LABEL
*)operands
[j
];
2192 generated_iseq
[code_index
+ 1 + j
] = lobj
->position
- (code_index
+ len
);
2197 VALUE map
= operands
[j
];
2198 struct cdhash_set_label_struct data
;
2200 data
.pos
= code_index
;
2202 rb_hash_foreach(map
, cdhash_set_label_i
, (VALUE
)&data
);
2204 rb_hash_rehash(map
);
2205 freeze_hide_obj(map
);
2206 generated_iseq
[code_index
+ 1 + j
] = map
;
2207 RB_OBJ_WRITTEN(iseq
, Qundef
, map
);
2208 FL_SET(iseqv
, ISEQ_MARKABLE_ISEQ
);
2212 case TS_NUM
: /* ulong */
2213 generated_iseq
[code_index
+ 1 + j
] = FIX2INT(operands
[j
]);
2215 case TS_VALUE
: /* VALUE */
2216 case TS_ISEQ
: /* iseq */
2218 VALUE v
= operands
[j
];
2219 generated_iseq
[code_index
+ 1 + j
] = v
;
2220 /* to mark ruby object */
2221 if (!SPECIAL_CONST_P(v
)) {
2222 RB_OBJ_WRITTEN(iseq
, Qundef
, v
);
2223 FL_SET(iseqv
, ISEQ_MARKABLE_ISEQ
);
2227 case TS_ISE
: /* inline storage entry */
2228 /* Treated as an IC, but may contain a markable VALUE */
2229 FL_SET(iseqv
, ISEQ_MARKABLE_ISEQ
);
2231 case TS_IC
: /* inline cache */
2232 case TS_IVC
: /* inline ivar cache */
2234 unsigned int ic_index
= FIX2UINT(operands
[j
]);
2235 IC ic
= (IC
)&body
->is_entries
[ic_index
];
2236 if (UNLIKELY(ic_index
>= body
->is_size
)) {
2237 BADINSN_DUMP(anchor
, &iobj
->link
, 0);
2238 COMPILE_ERROR(iseq
, iobj
->insn_info
.line_no
,
2239 "iseq_set_sequence: ic_index overflow: index: %d, size: %d",
2240 ic_index
, body
->is_size
);
2242 generated_iseq
[code_index
+ 1 + j
] = (VALUE
)ic
;
2247 const struct rb_callinfo
*source_ci
= (const struct rb_callinfo
*)operands
[j
];
2248 struct rb_call_data
*cd
= &body
->call_data
[ISEQ_COMPILE_DATA(iseq
)->ci_index
++];
2249 assert(ISEQ_COMPILE_DATA(iseq
)->ci_index
<= body
->ci_size
);
2251 cd
->cc
= vm_cc_empty();
2252 generated_iseq
[code_index
+ 1 + j
] = (VALUE
)cd
;
2255 case TS_ID
: /* ID */
2256 generated_iseq
[code_index
+ 1 + j
] = SYM2ID(operands
[j
]);
2260 struct rb_global_entry
*entry
=
2261 (struct rb_global_entry
*)(operands
[j
] & (~1));
2262 generated_iseq
[code_index
+ 1 + j
] = (VALUE
)entry
;
2266 generated_iseq
[code_index
+ 1 + j
] = operands
[j
];
2269 generated_iseq
[code_index
+ 1 + j
] = operands
[j
];
2272 BADINSN_ERROR(iseq
, iobj
->insn_info
.line_no
,
2273 "unknown operand type: %c", type
);
2277 if (add_insn_info(insns_info
, positions
, insns_info_index
, code_index
, iobj
)) insns_info_index
++;
2281 case ISEQ_ELEMENT_LABEL
:
2283 LABEL
*lobj
= (LABEL
*)list
;
2287 case ISEQ_ELEMENT_ADJUST
:
2289 ADJUST
*adjust
= (ADJUST
*)list
;
2292 if (adjust
->label
) {
2293 sp
= adjust
->label
->sp
;
2299 if (adjust
->line_no
!= -1) {
2300 const int diff
= orig_sp
- sp
;
2302 if (add_adjust_info(insns_info
, positions
, insns_info_index
, code_index
, adjust
)) insns_info_index
++;
2305 generated_iseq
[code_index
++] = BIN(adjuststack
);
2306 generated_iseq
[code_index
++] = orig_sp
- sp
;
2308 else if (diff
== 1) {
2309 generated_iseq
[code_index
++] = BIN(pop
);
2311 else if (diff
< 0) {
2312 int label_no
= adjust
->label
? adjust
->label
->label_no
: -1;
2313 xfree(generated_iseq
);
2317 COMPILE_ERROR(iseq
, adjust
->line_no
,
2318 "iseq_set_sequence: adjust bug to %d %d < %d",
2319 label_no
, orig_sp
, sp
);
2332 body
->iseq_encoded
= (void *)generated_iseq
;
2333 body
->iseq_size
= code_index
;
2334 body
->stack_max
= stack_max
;
2336 /* get rid of memory leak when REALLOC failed */
2337 body
->insns_info
.body
= insns_info
;
2338 body
->insns_info
.positions
= positions
;
2340 REALLOC_N(insns_info
, struct iseq_insn_info_entry
, insns_info_index
);
2341 body
->insns_info
.body
= insns_info
;
2342 REALLOC_N(positions
, unsigned int, insns_info_index
);
2343 body
->insns_info
.positions
= positions
;
2344 body
->insns_info
.size
= insns_info_index
;
2350 label_get_position(LABEL
*lobj
)
2352 return lobj
->position
;
2356 label_get_sp(LABEL
*lobj
)
2362 iseq_set_exception_table(rb_iseq_t
*iseq
)
2364 const VALUE
*tptr
, *ptr
;
2365 unsigned int tlen
, i
;
2366 struct iseq_catch_table_entry
*entry
;
2368 if (NIL_P(ISEQ_COMPILE_DATA(iseq
)->catch_table_ary
)) goto no_catch_table
;
2369 tlen
= (int)RARRAY_LEN(ISEQ_COMPILE_DATA(iseq
)->catch_table_ary
);
2370 tptr
= RARRAY_CONST_PTR_TRANSIENT(ISEQ_COMPILE_DATA(iseq
)->catch_table_ary
);
2373 struct iseq_catch_table
*table
= xmalloc(iseq_catch_table_bytes(tlen
));
2376 for (i
= 0; i
< table
->size
; i
++) {
2377 ptr
= RARRAY_CONST_PTR_TRANSIENT(tptr
[i
]);
2378 entry
= UNALIGNED_MEMBER_PTR(table
, entries
[i
]);
2379 entry
->type
= (enum catch_type
)(ptr
[0] & 0xffff);
2380 entry
->start
= label_get_position((LABEL
*)(ptr
[1] & ~1));
2381 entry
->end
= label_get_position((LABEL
*)(ptr
[2] & ~1));
2382 entry
->iseq
= (rb_iseq_t
*)ptr
[3];
2383 RB_OBJ_WRITTEN(iseq
, Qundef
, entry
->iseq
);
2387 LABEL
*lobj
= (LABEL
*)(ptr
[4] & ~1);
2388 entry
->cont
= label_get_position(lobj
);
2389 entry
->sp
= label_get_sp(lobj
);
2391 /* TODO: Dirty Hack! Fix me */
2392 if (entry
->type
== CATCH_TYPE_RESCUE
||
2393 entry
->type
== CATCH_TYPE_BREAK
||
2394 entry
->type
== CATCH_TYPE_NEXT
) {
2402 iseq
->body
->catch_table
= table
;
2403 RB_OBJ_WRITE(iseq
, &ISEQ_COMPILE_DATA(iseq
)->catch_table_ary
, 0); /* free */
2407 iseq
->body
->catch_table
= NULL
;
2414 * set optional argument table
2415 * def foo(a, b=expr1, c=expr2)
2423 iseq_set_optargs_table(rb_iseq_t
*iseq
)
2426 VALUE
*opt_table
= (VALUE
*)iseq
->body
->param
.opt_table
;
2428 if (iseq
->body
->param
.flags
.has_opt
) {
2429 for (i
= 0; i
< iseq
->body
->param
.opt_num
+ 1; i
++) {
2430 opt_table
[i
] = label_get_position((LABEL
*)opt_table
[i
]);
2436 static LINK_ELEMENT
*
2437 get_destination_insn(INSN
*iobj
)
2439 LABEL
*lobj
= (LABEL
*)OPERAND_AT(iobj
, 0);
2441 rb_event_flag_t events
= 0;
2443 list
= lobj
->link
.next
;
2445 switch (list
->type
) {
2446 case ISEQ_ELEMENT_INSN
:
2447 case ISEQ_ELEMENT_ADJUST
:
2449 case ISEQ_ELEMENT_LABEL
:
2452 case ISEQ_ELEMENT_TRACE
:
2454 TRACE
*trace
= (TRACE
*)list
;
2455 events
|= trace
->event
;
2463 if (list
&& IS_INSN(list
)) {
2464 INSN
*iobj
= (INSN
*)list
;
2465 iobj
->insn_info
.events
|= events
;
2470 static LINK_ELEMENT
*
2471 get_next_insn(INSN
*iobj
)
2473 LINK_ELEMENT
*list
= iobj
->link
.next
;
2476 if (IS_INSN(list
) || IS_ADJUST(list
)) {
2484 static LINK_ELEMENT
*
2485 get_prev_insn(INSN
*iobj
)
2487 LINK_ELEMENT
*list
= iobj
->link
.prev
;
2490 if (IS_INSN(list
) || IS_ADJUST(list
)) {
2499 unref_destination(INSN
*iobj
, int pos
)
2501 LABEL
*lobj
= (LABEL
*)OPERAND_AT(iobj
, pos
);
2503 if (!lobj
->refcnt
) ELEM_REMOVE(&lobj
->link
);
2507 replace_destination(INSN
*dobj
, INSN
*nobj
)
2509 VALUE n
= OPERAND_AT(nobj
, 0);
2510 LABEL
*dl
= (LABEL
*)OPERAND_AT(dobj
, 0);
2511 LABEL
*nl
= (LABEL
*)n
;
2514 OPERAND_AT(dobj
, 0) = n
;
2515 if (!dl
->refcnt
) ELEM_REMOVE(&dl
->link
);
2519 find_destination(INSN
*i
)
2521 int pos
, len
= insn_len(i
->insn_id
);
2522 for (pos
= 0; pos
< len
; ++pos
) {
2523 if (insn_op_types(i
->insn_id
)[pos
] == TS_OFFSET
) {
2524 return (LABEL
*)OPERAND_AT(i
, pos
);
2531 remove_unreachable_chunk(rb_iseq_t
*iseq
, LINK_ELEMENT
*i
)
2533 LINK_ELEMENT
*first
= i
, *end
;
2534 int *unref_counts
= 0, nlabels
= ISEQ_COMPILE_DATA(iseq
)->label_no
;
2537 unref_counts
= ALLOCA_N(int, nlabels
);
2538 MEMZERO(unref_counts
, int, nlabels
);
2543 if (IS_INSN_ID(i
, leave
)) {
2547 else if ((lab
= find_destination((INSN
*)i
)) != 0) {
2548 if (lab
->unremovable
) break;
2549 unref_counts
[lab
->label_no
]++;
2552 else if (IS_LABEL(i
)) {
2554 if (lab
->unremovable
) return 0;
2555 if (lab
->refcnt
> unref_counts
[lab
->label_no
]) {
2556 if (i
== first
) return 0;
2561 else if (IS_TRACE(i
)) {
2564 else if (IS_ADJUST(i
)) {
2565 LABEL
*dest
= ((ADJUST
*)i
)->label
;
2566 if (dest
&& dest
->unremovable
) return 0;
2569 } while ((i
= i
->next
) != 0);
2573 struct rb_iseq_constant_body
*body
= iseq
->body
;
2574 VALUE insn
= INSN_OF(i
);
2575 int pos
, len
= insn_len(insn
);
2576 for (pos
= 0; pos
< len
; ++pos
) {
2577 switch (insn_op_types(insn
)[pos
]) {
2579 unref_destination((INSN
*)i
, pos
);
2588 } while ((i
!= end
) && (i
= i
->next
) != 0);
2593 iseq_pop_newarray(rb_iseq_t
*iseq
, INSN
*iobj
)
2595 switch (OPERAND_AT(iobj
, 0)) {
2596 case INT2FIX(0): /* empty array */
2597 ELEM_REMOVE(&iobj
->link
);
2599 case INT2FIX(1): /* single element array */
2600 ELEM_REMOVE(&iobj
->link
);
2603 iobj
->insn_id
= BIN(adjuststack
);
2609 same_debug_pos_p(LINK_ELEMENT
*iobj1
, LINK_ELEMENT
*iobj2
)
2611 VALUE debug1
= OPERAND_AT(iobj1
, 0);
2612 VALUE debug2
= OPERAND_AT(iobj2
, 0);
2613 if (debug1
== debug2
) return TRUE
;
2614 if (!RB_TYPE_P(debug1
, T_ARRAY
)) return FALSE
;
2615 if (!RB_TYPE_P(debug2
, T_ARRAY
)) return FALSE
;
2616 if (RARRAY_LEN(debug1
) != 2) return FALSE
;
2617 if (RARRAY_LEN(debug2
) != 2) return FALSE
;
2618 if (RARRAY_AREF(debug1
, 0) != RARRAY_AREF(debug2
, 0)) return FALSE
;
2619 if (RARRAY_AREF(debug1
, 1) != RARRAY_AREF(debug2
, 1)) return FALSE
;
2624 is_frozen_putstring(INSN
*insn
, VALUE
*op
)
2626 if (IS_INSN_ID(insn
, putstring
)) {
2627 *op
= OPERAND_AT(insn
, 0);
2630 else if (IS_INSN_ID(insn
, putobject
)) { /* frozen_string_literal */
2631 *op
= OPERAND_AT(insn
, 0);
2632 return RB_TYPE_P(*op
, T_STRING
);
2638 optimize_checktype(rb_iseq_t
*iseq
, INSN
*iobj
)
2651 * putobject obj (T_XXX)
2655 * => obj is not a T_XXX
2657 * putobject obj (T_XXX)
2662 INSN
*niobj
, *ciobj
, *dup
= 0;
2666 switch (INSN_OF(iobj
)) {
2667 case BIN(putstring
):
2668 type
= INT2FIX(T_STRING
);
2671 type
= INT2FIX(T_NIL
);
2673 case BIN(putobject
):
2674 type
= INT2FIX(TYPE(OPERAND_AT(iobj
, 0)));
2676 default: return FALSE
;
2679 ciobj
= (INSN
*)get_next_insn(iobj
);
2680 if (IS_INSN_ID(ciobj
, jump
)) {
2681 ciobj
= (INSN
*)get_next_insn((INSN
*)OPERAND_AT(ciobj
, 0));
2683 if (IS_INSN_ID(ciobj
, dup
)) {
2684 ciobj
= (INSN
*)get_next_insn(dup
= ciobj
);
2686 if (!ciobj
|| !IS_INSN_ID(ciobj
, checktype
)) return FALSE
;
2687 niobj
= (INSN
*)get_next_insn(ciobj
);
2690 /* TODO: putobject true/false */
2693 switch (INSN_OF(niobj
)) {
2695 if (OPERAND_AT(ciobj
, 0) == type
) {
2696 dest
= (LABEL
*)OPERAND_AT(niobj
, 0);
2699 case BIN(branchunless
):
2700 if (OPERAND_AT(ciobj
, 0) != type
) {
2701 dest
= (LABEL
*)OPERAND_AT(niobj
, 0);
2707 line
= ciobj
->insn_info
.line_no
;
2709 if (niobj
->link
.next
&& IS_LABEL(niobj
->link
.next
)) {
2710 dest
= (LABEL
*)niobj
->link
.next
; /* reuse label */
2713 dest
= NEW_LABEL(line
);
2714 ELEM_INSERT_NEXT(&niobj
->link
, &dest
->link
);
2717 INSERT_AFTER_INSN1(iobj
, line
, jump
, dest
);
2719 if (!dup
) INSERT_AFTER_INSN(iobj
, line
, pop
);
2723 static const struct rb_callinfo
*
2724 ci_flag_set(const rb_iseq_t
*iseq
, const struct rb_callinfo
*ci
, unsigned int add
)
2726 const struct rb_callinfo
*nci
= vm_ci_new(vm_ci_mid(ci
),
2727 vm_ci_flag(ci
) | add
,
2730 RB_OBJ_WRITTEN(iseq
, ci
, nci
);
2734 static const struct rb_callinfo
*
2735 ci_argc_set(const rb_iseq_t
*iseq
, const struct rb_callinfo
*ci
, int argc
)
2737 const struct rb_callinfo
*nci
= vm_ci_new(vm_ci_mid(ci
),
2741 RB_OBJ_WRITTEN(iseq
, ci
, nci
);
2746 iseq_peephole_optimize(rb_iseq_t
*iseq
, LINK_ELEMENT
*list
, const int do_tailcallopt
)
2748 INSN
*const iobj
= (INSN
*)list
;
2751 optimize_checktype(iseq
, iobj
);
2753 if (IS_INSN_ID(iobj
, jump
)) {
2754 INSN
*niobj
, *diobj
, *piobj
;
2755 diobj
= (INSN
*)get_destination_insn(iobj
);
2756 niobj
= (INSN
*)get_next_insn(iobj
);
2758 if (diobj
== niobj
) {
2765 unref_destination(iobj
, 0);
2766 ELEM_REMOVE(&iobj
->link
);
2769 else if (iobj
!= diobj
&& IS_INSN_ID(diobj
, jump
) &&
2770 OPERAND_AT(iobj
, 0) != OPERAND_AT(diobj
, 0)) {
2772 * useless jump elimination:
2778 * => in this case, first jump instruction should jump to
2781 replace_destination(iobj
, diobj
);
2782 remove_unreachable_chunk(iseq
, iobj
->link
.next
);
2785 else if (IS_INSN_ID(diobj
, leave
)) {
2800 unref_destination(iobj
, 0);
2801 iobj
->insn_id
= BIN(leave
);
2802 iobj
->operand_size
= 0;
2803 iobj
->insn_info
= diobj
->insn_info
;
2804 /* adjust stack depth */
2805 pop
= new_insn_body(iseq
, diobj
->insn_info
.line_no
, BIN(pop
), 0);
2806 ELEM_INSERT_NEXT(&iobj
->link
, &pop
->link
);
2809 else if (IS_INSN(iobj
->link
.prev
) &&
2810 (piobj
= (INSN
*)iobj
->link
.prev
) &&
2811 (IS_INSN_ID(piobj
, branchif
) ||
2812 IS_INSN_ID(piobj
, branchunless
))) {
2813 INSN
*pdiobj
= (INSN
*)get_destination_insn(piobj
);
2814 if (niobj
== pdiobj
) {
2815 int refcnt
= IS_LABEL(piobj
->link
.next
) ?
2816 ((LABEL
*)piobj
->link
.next
)->refcnt
: 0;
2818 * useless jump elimination (if/unless destination):
2831 piobj
->insn_id
= (IS_INSN_ID(piobj
, branchif
))
2832 ? BIN(branchunless
) : BIN(branchif
);
2833 replace_destination(piobj
, iobj
);
2835 ELEM_REMOVE(&iobj
->link
);
2838 /* TODO: replace other branch destinations too */
2842 else if (diobj
== pdiobj
) {
2844 * useless jump elimination (if/unless before jump):
2856 INSN
*popiobj
= new_insn_core(iseq
, iobj
->insn_info
.line_no
,
2858 ELEM_REPLACE(&piobj
->link
, &popiobj
->link
);
2861 if (remove_unreachable_chunk(iseq
, iobj
->link
.next
)) {
2873 * putobject "beg".."end"
2875 if (IS_INSN_ID(iobj
, checkmatch
)) {
2876 INSN
*range
= (INSN
*)get_prev_insn(iobj
);
2878 VALUE str_beg
, str_end
;
2880 if (range
&& IS_INSN_ID(range
, newrange
) &&
2881 (end
= (INSN
*)get_prev_insn(range
)) != 0 &&
2882 is_frozen_putstring(end
, &str_end
) &&
2883 (beg
= (INSN
*)get_prev_insn(end
)) != 0 &&
2884 is_frozen_putstring(beg
, &str_beg
)) {
2885 int excl
= FIX2INT(OPERAND_AT(range
, 0));
2886 VALUE lit_range
= rb_range_new(str_beg
, str_end
, excl
);
2888 ELEM_REMOVE(&beg
->link
);
2889 ELEM_REMOVE(&end
->link
);
2890 range
->insn_id
= BIN(putobject
);
2891 OPERAND_AT(range
, 0) = lit_range
;
2892 RB_OBJ_WRITTEN(iseq
, Qundef
, lit_range
);
2896 if (IS_INSN_ID(iobj
, leave
)) {
2897 remove_unreachable_chunk(iseq
, iobj
->link
.next
);
2900 if (IS_INSN_ID(iobj
, branchif
) ||
2901 IS_INSN_ID(iobj
, branchnil
) ||
2902 IS_INSN_ID(iobj
, branchunless
)) {
2911 INSN
*nobj
= (INSN
*)get_destination_insn(iobj
);
2913 /* This is super nasty hack!!!
2915 * This jump-jump optimization may ignore event flags of the jump
2916 * instruction being skipped. Actually, Line 2 TracePoint event
2917 * is never fired in the following code:
2919 * 1: raise if 1 == 2
2924 * This is critical for coverage measurement. [Bug #15980]
2926 * This is a stopgap measure: stop the jump-jump optimization if
2927 * coverage measurement is enabled and if the skipped instruction
2928 * has any event flag.
2930 * Note that, still, TracePoint Line event does not occur on Line 2.
2931 * This should be fixed in future.
2933 int stop_optimization
=
2934 ISEQ_COVERAGE(iseq
) && ISEQ_LINE_COVERAGE(iseq
) &&
2935 nobj
->insn_info
.events
;
2936 if (!stop_optimization
) {
2937 INSN
*pobj
= (INSN
*)iobj
->link
.prev
;
2940 if (!IS_INSN(&pobj
->link
))
2942 else if (IS_INSN_ID(pobj
, dup
))
2947 if (IS_INSN_ID(nobj
, jump
)) {
2948 replace_destination(iobj
, nobj
);
2950 else if (prev_dup
&& IS_INSN_ID(nobj
, dup
) &&
2951 !!(nobj
= (INSN
*)nobj
->link
.next
) &&
2952 /* basic blocks, with no labels in the middle */
2953 nobj
->insn_id
== iobj
->insn_id
) {
2969 replace_destination(iobj
, nobj
);
2997 if (prev_dup
&& IS_INSN(pobj
->link
.prev
)) {
2998 pobj
= (INSN
*)pobj
->link
.prev
;
3000 if (IS_INSN_ID(pobj
, putobject
)) {
3001 cond
= (IS_INSN_ID(iobj
, branchif
) ?
3002 OPERAND_AT(pobj
, 0) != Qfalse
:
3003 IS_INSN_ID(iobj
, branchunless
) ?
3004 OPERAND_AT(pobj
, 0) == Qfalse
:
3007 else if (IS_INSN_ID(pobj
, putstring
) ||
3008 IS_INSN_ID(pobj
, duparray
) ||
3009 IS_INSN_ID(pobj
, newarray
)) {
3010 cond
= IS_INSN_ID(iobj
, branchif
);
3012 else if (IS_INSN_ID(pobj
, putnil
)) {
3013 cond
= !IS_INSN_ID(iobj
, branchif
);
3016 if (prev_dup
|| !IS_INSN_ID(pobj
, newarray
)) {
3017 ELEM_REMOVE(iobj
->link
.prev
);
3019 else if (!iseq_pop_newarray(iseq
, pobj
)) {
3020 pobj
= new_insn_core(iseq
, pobj
->insn_info
.line_no
, BIN(pop
), 0, NULL
);
3021 ELEM_INSERT_PREV(&iobj
->link
, &pobj
->link
);
3025 pobj
= new_insn_core(iseq
, pobj
->insn_info
.line_no
, BIN(putnil
), 0, NULL
);
3026 ELEM_INSERT_NEXT(&iobj
->link
, &pobj
->link
);
3028 iobj
->insn_id
= BIN(jump
);
3032 unref_destination(iobj
, 0);
3033 ELEM_REMOVE(&iobj
->link
);
3038 nobj
= (INSN
*)get_destination_insn(nobj
);
3043 if (IS_INSN_ID(iobj
, pop
)) {
3045 * putself / putnil / putobject obj / putstring "..."
3050 LINK_ELEMENT
*prev
= iobj
->link
.prev
;
3051 if (IS_INSN(prev
)) {
3052 enum ruby_vminsn_type previ
= ((INSN
*)prev
)->insn_id
;
3053 if (previ
== BIN(putobject
) || previ
== BIN(putnil
) ||
3054 previ
== BIN(putself
) || previ
== BIN(putstring
) ||
3055 previ
== BIN(dup
) ||
3056 previ
== BIN(getlocal
) ||
3057 previ
== BIN(getblockparam
) ||
3058 previ
== BIN(getblockparamproxy
) ||
3059 /* getinstancevariable may issue a warning */
3060 previ
== BIN(duparray
)) {
3061 /* just push operand or static value and pop soon, no
3064 ELEM_REMOVE(&iobj
->link
);
3066 else if (previ
== BIN(newarray
) && iseq_pop_newarray(iseq
, (INSN
*)prev
)) {
3067 ELEM_REMOVE(&iobj
->link
);
3069 else if (previ
== BIN(concatarray
)) {
3070 INSN
*piobj
= (INSN
*)prev
;
3071 INSERT_BEFORE_INSN1(piobj
, piobj
->insn_info
.line_no
, splatarray
, Qfalse
);
3072 INSN_OF(piobj
) = BIN(pop
);
3074 else if (previ
== BIN(concatstrings
)) {
3075 if (OPERAND_AT(prev
, 0) == INT2FIX(1)) {
3079 ELEM_REMOVE(&iobj
->link
);
3080 INSN_OF(prev
) = BIN(adjuststack
);
3086 if (IS_INSN_ID(iobj
, newarray
) ||
3087 IS_INSN_ID(iobj
, duparray
) ||
3088 IS_INSN_ID(iobj
, expandarray
) ||
3089 IS_INSN_ID(iobj
, concatarray
) ||
3090 IS_INSN_ID(iobj
, splatarray
) ||
3097 * newarray always puts an array
3099 LINK_ELEMENT
*next
= iobj
->link
.next
;
3100 if (IS_INSN(next
) && IS_INSN_ID(next
, splatarray
)) {
3101 /* remove splatarray following always-array insn */
3106 if (IS_INSN_ID(iobj
, tostring
)) {
3107 LINK_ELEMENT
*next
= iobj
->link
.next
;
3114 if (IS_INSN(next
) && IS_INSN_ID(next
, concatstrings
) &&
3115 OPERAND_AT(next
, 0) == INT2FIX(1)) {
3120 if (IS_INSN_ID(iobj
, putstring
) ||
3121 (IS_INSN_ID(iobj
, putobject
) && RB_TYPE_P(OPERAND_AT(iobj
, 0), T_STRING
))) {
3128 if (IS_NEXT_INSN_ID(&iobj
->link
, concatstrings
) &&
3129 RSTRING_LEN(OPERAND_AT(iobj
, 0)) == 0) {
3130 INSN
*next
= (INSN
*)iobj
->link
.next
;
3131 if ((OPERAND_AT(next
, 0) = FIXNUM_INC(OPERAND_AT(next
, 0), -1)) == INT2FIX(1)) {
3132 ELEM_REMOVE(&next
->link
);
3134 ELEM_REMOVE(&iobj
->link
);
3138 if (IS_INSN_ID(iobj
, concatstrings
)) {
3143 * concatstrings N+M-1
3145 LINK_ELEMENT
*next
= iobj
->link
.next
, *freeze
= 0;
3147 if (IS_INSN(next
) && IS_INSN_ID(next
, freezestring
))
3148 next
= (freeze
= next
)->next
;
3149 if (IS_INSN(next
) && IS_INSN_ID(next
, jump
))
3150 next
= get_destination_insn(jump
= (INSN
*)next
);
3151 if (IS_INSN(next
) && IS_INSN_ID(next
, concatstrings
)) {
3152 int n
= FIX2INT(OPERAND_AT(iobj
, 0)) + FIX2INT(OPERAND_AT(next
, 0)) - 1;
3153 OPERAND_AT(iobj
, 0) = INT2FIX(n
);
3155 LABEL
*label
= ((LABEL
*)OPERAND_AT(jump
, 0));
3156 if (!--label
->refcnt
) {
3157 ELEM_REMOVE(&label
->link
);
3160 label
= NEW_LABEL(0);
3161 OPERAND_AT(jump
, 0) = (VALUE
)label
;
3164 if (freeze
&& IS_NEXT_INSN_ID(next
, freezestring
)) {
3165 if (same_debug_pos_p(freeze
, next
->next
)) {
3166 ELEM_REMOVE(freeze
);
3172 ELEM_INSERT_NEXT(next
, &label
->link
);
3173 CHECK(iseq_peephole_optimize(iseq
, get_next_insn(jump
), do_tailcallopt
));
3176 if (freeze
) ELEM_REMOVE(freeze
);
3182 if (IS_INSN_ID(iobj
, freezestring
) &&
3183 NIL_P(OPERAND_AT(iobj
, 0)) &&
3184 IS_NEXT_INSN_ID(&iobj
->link
, send
)) {
3185 INSN
*niobj
= (INSN
*)iobj
->link
.next
;
3186 const struct rb_callinfo
*ci
= (struct rb_callinfo
*)OPERAND_AT(niobj
, 0);
3189 * freezestring nil # no debug_info
3190 * send <:+@, 0, ARG_SIMPLE> # :-@, too
3192 * send <:+@, 0, ARG_SIMPLE> # :-@, too
3194 if ((vm_ci_mid(ci
) == idUPlus
|| vm_ci_mid(ci
) == idUMinus
) &&
3195 (vm_ci_flag(ci
) & VM_CALL_ARGS_SIMPLE
) &&
3196 vm_ci_argc(ci
) == 0) {
3202 if (do_tailcallopt
&&
3203 (IS_INSN_ID(iobj
, send
) ||
3204 IS_INSN_ID(iobj
, opt_aref_with
) ||
3205 IS_INSN_ID(iobj
, opt_aset_with
) ||
3206 IS_INSN_ID(iobj
, invokesuper
))) {
3211 * send ..., ... | VM_CALL_TAILCALL, ...
3212 * leave # unreachable
3215 if (iobj
->link
.next
) {
3216 LINK_ELEMENT
*next
= iobj
->link
.next
;
3218 if (!IS_INSN(next
)) {
3222 switch (INSN_OF(next
)) {
3231 next
= get_destination_insn((INSN
*)next
);
3244 const struct rb_callinfo
*ci
= (struct rb_callinfo
*)OPERAND_AT(piobj
, 0);
3245 if (IS_INSN_ID(piobj
, send
) ||
3246 IS_INSN_ID(piobj
, invokesuper
)) {
3247 if (OPERAND_AT(piobj
, 1) == 0) { /* no blockiseq */
3248 ci
= ci_flag_set(iseq
, ci
, VM_CALL_TAILCALL
);
3249 OPERAND_AT(piobj
, 0) = (VALUE
)ci
;
3250 RB_OBJ_WRITTEN(iseq
, Qundef
, ci
);
3254 ci
= ci_flag_set(iseq
, ci
, VM_CALL_TAILCALL
);
3255 OPERAND_AT(piobj
, 0) = (VALUE
)ci
;
3256 RB_OBJ_WRITTEN(iseq
, Qundef
, ci
);
3261 if (IS_INSN_ID(iobj
, dup
)) {
3262 if (IS_NEXT_INSN_ID(&iobj
->link
, setlocal
)) {
3263 LINK_ELEMENT
*set1
= iobj
->link
.next
, *set2
= NULL
;
3264 if (IS_NEXT_INSN_ID(set1
, setlocal
)) {
3266 if (OPERAND_AT(set1
, 0) == OPERAND_AT(set2
, 0) &&
3267 OPERAND_AT(set1
, 1) == OPERAND_AT(set2
, 1)) {
3269 ELEM_REMOVE(&iobj
->link
);
3272 else if (IS_NEXT_INSN_ID(set1
, dup
) &&
3273 IS_NEXT_INSN_ID(set1
->next
, setlocal
)) {
3274 set2
= set1
->next
->next
;
3275 if (OPERAND_AT(set1
, 0) == OPERAND_AT(set2
, 0) &&
3276 OPERAND_AT(set1
, 1) == OPERAND_AT(set2
, 1)) {
3277 ELEM_REMOVE(set1
->next
);
3284 if (IS_INSN_ID(iobj
, getlocal
)) {
3285 LINK_ELEMENT
*niobj
= &iobj
->link
;
3286 if (IS_NEXT_INSN_ID(niobj
, dup
)) {
3287 niobj
= niobj
->next
;
3289 if (IS_NEXT_INSN_ID(niobj
, setlocal
)) {
3290 LINK_ELEMENT
*set1
= niobj
->next
;
3291 if (OPERAND_AT(iobj
, 0) == OPERAND_AT(set1
, 0) &&
3292 OPERAND_AT(iobj
, 1) == OPERAND_AT(set1
, 1)) {
3299 if (IS_INSN_ID(iobj
, opt_invokebuiltin_delegate
)) {
3300 if (IS_TRACE(iobj
->link
.next
)) {
3301 if (IS_NEXT_INSN_ID(iobj
->link
.next
, leave
)) {
3302 iobj
->insn_id
= BIN(opt_invokebuiltin_delegate_leave
);
3311 insn_set_specialized_instruction(rb_iseq_t
*iseq
, INSN
*iobj
, int insn_id
)
3313 iobj
->insn_id
= insn_id
;
3314 iobj
->operand_size
= insn_len(insn_id
) - 1;
3316 if (insn_id
== BIN(opt_neq
)) {
3317 VALUE
*old_operands
= iobj
->operands
;
3318 iobj
->operand_size
= 2;
3319 iobj
->operands
= compile_data_calloc2(iseq
, iobj
->operand_size
, sizeof(VALUE
));
3320 iobj
->operands
[0] = (VALUE
)new_callinfo(iseq
, idEq
, 1, 0, NULL
, FALSE
);
3321 iobj
->operands
[1] = old_operands
[0];
3328 iseq_specialized_instruction(rb_iseq_t
*iseq
, INSN
*iobj
)
3330 if (IS_INSN_ID(iobj
, newarray
) && iobj
->link
.next
&&
3331 IS_INSN(iobj
->link
.next
)) {
3333 * [a, b, ...].max/min -> a, b, c, opt_newarray_max/min
3335 INSN
*niobj
= (INSN
*)iobj
->link
.next
;
3336 if (IS_INSN_ID(niobj
, send
)) {
3337 const struct rb_callinfo
*ci
= (struct rb_callinfo
*)OPERAND_AT(niobj
, 0);
3338 if ((vm_ci_flag(ci
) & VM_CALL_ARGS_SIMPLE
) && vm_ci_argc(ci
) == 0) {
3339 switch (vm_ci_mid(ci
)) {
3341 iobj
->insn_id
= BIN(opt_newarray_max
);
3342 ELEM_REMOVE(&niobj
->link
);
3345 iobj
->insn_id
= BIN(opt_newarray_min
);
3346 ELEM_REMOVE(&niobj
->link
);
3353 if (IS_INSN_ID(iobj
, send
)) {
3354 const struct rb_callinfo
*ci
= (struct rb_callinfo
*)OPERAND_AT(iobj
, 0);
3355 const rb_iseq_t
*blockiseq
= (rb_iseq_t
*)OPERAND_AT(iobj
, 1);
3357 #define SP_INSN(opt) insn_set_specialized_instruction(iseq, iobj, BIN(opt_##opt))
3358 if (vm_ci_flag(ci
) & VM_CALL_ARGS_SIMPLE
) {
3359 switch (vm_ci_argc(ci
)) {
3361 switch (vm_ci_mid(ci
)) {
3362 case idLength
: SP_INSN(length
); return COMPILE_OK
;
3363 case idSize
: SP_INSN(size
); return COMPILE_OK
;
3364 case idEmptyP
: SP_INSN(empty_p
);return COMPILE_OK
;
3365 case idNilP
: SP_INSN(nil_p
); return COMPILE_OK
;
3366 case idSucc
: SP_INSN(succ
); return COMPILE_OK
;
3367 case idNot
: SP_INSN(not); return COMPILE_OK
;
3371 switch (vm_ci_mid(ci
)) {
3372 case idPLUS
: SP_INSN(plus
); return COMPILE_OK
;
3373 case idMINUS
: SP_INSN(minus
); return COMPILE_OK
;
3374 case idMULT
: SP_INSN(mult
); return COMPILE_OK
;
3375 case idDIV
: SP_INSN(div
); return COMPILE_OK
;
3376 case idMOD
: SP_INSN(mod
); return COMPILE_OK
;
3377 case idEq
: SP_INSN(eq
); return COMPILE_OK
;
3378 case idNeq
: SP_INSN(neq
); return COMPILE_OK
;
3379 case idEqTilde
:SP_INSN(regexpmatch2
);return COMPILE_OK
;
3380 case idLT
: SP_INSN(lt
); return COMPILE_OK
;
3381 case idLE
: SP_INSN(le
); return COMPILE_OK
;
3382 case idGT
: SP_INSN(gt
); return COMPILE_OK
;
3383 case idGE
: SP_INSN(ge
); return COMPILE_OK
;
3384 case idLTLT
: SP_INSN(ltlt
); return COMPILE_OK
;
3385 case idAREF
: SP_INSN(aref
); return COMPILE_OK
;
3386 case idAnd
: SP_INSN(and); return COMPILE_OK
;
3387 case idOr
: SP_INSN(or); return COMPILE_OK
;
3391 switch (vm_ci_mid(ci
)) {
3392 case idASET
: SP_INSN(aset
); return COMPILE_OK
;
3398 if ((vm_ci_flag(ci
) & VM_CALL_ARGS_BLOCKARG
) == 0 && blockiseq
== NULL
) {
3399 iobj
->insn_id
= BIN(opt_send_without_block
);
3400 iobj
->operand_size
= insn_len(iobj
->insn_id
) - 1;
3409 tailcallable_p(rb_iseq_t
*iseq
)
3411 switch (iseq
->body
->type
) {
3413 case ISEQ_TYPE_EVAL
:
3414 case ISEQ_TYPE_MAIN
:
3415 /* not tail callable because cfp will be over popped */
3416 case ISEQ_TYPE_RESCUE
:
3417 case ISEQ_TYPE_ENSURE
:
3418 /* rescue block can't tail call because of errinfo */
3426 iseq_optimize(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
)
3429 const int do_peepholeopt
= ISEQ_COMPILE_DATA(iseq
)->option
->peephole_optimization
;
3430 const int do_tailcallopt
= tailcallable_p(iseq
) &&
3431 ISEQ_COMPILE_DATA(iseq
)->option
->tailcall_optimization
;
3432 const int do_si
= ISEQ_COMPILE_DATA(iseq
)->option
->specialized_instruction
;
3433 const int do_ou
= ISEQ_COMPILE_DATA(iseq
)->option
->operands_unification
;
3434 int rescue_level
= 0;
3435 int tailcallopt
= do_tailcallopt
;
3437 list
= FIRST_ELEMENT(anchor
);
3440 if (IS_INSN(list
)) {
3441 if (do_peepholeopt
) {
3442 iseq_peephole_optimize(iseq
, list
, tailcallopt
);
3445 iseq_specialized_instruction(iseq
, (INSN
*)list
);
3448 insn_operands_unification((INSN
*)list
);
3451 if (IS_LABEL(list
)) {
3452 switch (((LABEL
*)list
)->rescued
) {
3453 case LABEL_RESCUE_BEG
:
3455 tailcallopt
= FALSE
;
3457 case LABEL_RESCUE_END
:
3458 if (!--rescue_level
) tailcallopt
= do_tailcallopt
;
3467 #if OPT_INSTRUCTIONS_UNIFICATION
3469 new_unified_insn(rb_iseq_t
*iseq
,
3470 int insn_id
, int size
, LINK_ELEMENT
*seq_list
)
3473 LINK_ELEMENT
*list
= seq_list
;
3475 VALUE
*operands
= 0, *ptr
= 0;
3479 for (i
= 0; i
< size
; i
++) {
3480 iobj
= (INSN
*)list
;
3481 argc
+= iobj
->operand_size
;
3486 ptr
= operands
= compile_data_alloc2(iseq
, sizeof(VALUE
), argc
);
3491 for (i
= 0; i
< size
; i
++) {
3492 iobj
= (INSN
*)list
;
3493 MEMCPY(ptr
, iobj
->operands
, VALUE
, iobj
->operand_size
);
3494 ptr
+= iobj
->operand_size
;
3498 return new_insn_core(iseq
, iobj
->insn_info
.line_no
, insn_id
, argc
, operands
);
3503 * This scheme can get more performance if do this optimize with
3504 * label address resolving.
3505 * It's future work (if compile time was bottle neck).
3508 iseq_insns_unification(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
)
3510 #if OPT_INSTRUCTIONS_UNIFICATION
3516 list
= FIRST_ELEMENT(anchor
);
3518 if (IS_INSN(list
)) {
3519 iobj
= (INSN
*)list
;
3521 if (unified_insns_data
[id
] != 0) {
3522 const int *const *entry
= unified_insns_data
[id
];
3523 for (j
= 1; j
< (intptr_t)entry
[0]; j
++) {
3524 const int *unified
= entry
[j
];
3525 LINK_ELEMENT
*li
= list
->next
;
3526 for (k
= 2; k
< unified
[1]; k
++) {
3528 ((INSN
*)li
)->insn_id
!= unified
[k
]) {
3535 new_unified_insn(iseq
, unified
[0], unified
[1] - 1,
3538 /* insert to list */
3539 niobj
->link
.prev
= (LINK_ELEMENT
*)iobj
->link
.prev
;
3540 niobj
->link
.next
= li
;
3542 li
->prev
= (LINK_ELEMENT
*)niobj
;
3545 list
->prev
->next
= (LINK_ELEMENT
*)niobj
;
3546 list
= (LINK_ELEMENT
*)niobj
;
3558 #if OPT_STACK_CACHING
3560 #define SC_INSN(insn, stat) sc_insn_info[(insn)][(stat)]
3561 #define SC_NEXT(insn) sc_insn_next[(insn)]
3563 #include "opt_sc.inc"
3566 insn_set_sc_state(rb_iseq_t
*iseq
, const LINK_ELEMENT
*anchor
, INSN
*iobj
, int state
)
3571 insn_id
= iobj
->insn_id
;
3572 iobj
->insn_id
= SC_INSN(insn_id
, state
);
3573 nstate
= SC_NEXT(iobj
->insn_id
);
3575 if (insn_id
== BIN(jump
) ||
3576 insn_id
== BIN(branchif
) || insn_id
== BIN(branchunless
)) {
3577 LABEL
*lobj
= (LABEL
*)OPERAND_AT(iobj
, 0);
3579 if (lobj
->sc_state
!= 0) {
3580 if (lobj
->sc_state
!= nstate
) {
3581 BADINSN_DUMP(anchor
, iobj
, lobj
);
3582 COMPILE_ERROR(iseq
, iobj
->insn_info
.line_no
,
3583 "insn_set_sc_state error: %d at "LABEL_FORMAT
3585 lobj
->sc_state
, lobj
->label_no
, nstate
);
3590 lobj
->sc_state
= nstate
;
3592 if (insn_id
== BIN(jump
)) {
3596 else if (insn_id
== BIN(leave
)) {
3604 label_set_sc_state(LABEL
*lobj
, int state
)
3606 if (lobj
->sc_state
!= 0) {
3607 if (lobj
->sc_state
!= state
) {
3608 state
= lobj
->sc_state
;
3612 lobj
->sc_state
= state
;
3622 iseq_set_sequence_stackcaching(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
)
3624 #if OPT_STACK_CACHING
3630 list
= FIRST_ELEMENT(anchor
);
3631 /* dump_disasm_list(list); */
3633 /* for each list element */
3636 switch (list
->type
) {
3637 case ISEQ_ELEMENT_INSN
:
3639 INSN
*iobj
= (INSN
*)list
;
3640 insn_id
= iobj
->insn_id
;
3642 /* dump_disasm_list(list); */
3647 /* exception merge point */
3648 if (state
!= SCS_AX
) {
3650 new_insn_body(iseq
, 0, BIN(reput
), 0);
3652 /* replace this insn */
3653 ELEM_REPLACE(list
, (LINK_ELEMENT
*)rpobj
);
3654 list
= (LINK_ELEMENT
*)rpobj
;
3661 if (state
== SCS_AB
|| state
== SCS_BA
) {
3662 state
= (state
== SCS_AB
? SCS_BA
: SCS_AB
);
3686 COMPILE_ERROR(iseq
, iobj
->insn_info
.line_no
,
3690 /* remove useless pop */
3697 } /* end of switch */
3699 state
= insn_set_sc_state(iseq
, anchor
, iobj
, state
);
3702 case ISEQ_ELEMENT_LABEL
:
3705 lobj
= (LABEL
*)list
;
3707 state
= label_set_sc_state(lobj
, state
);
3719 all_string_result_p(const NODE
*node
)
3721 if (!node
) return FALSE
;
3722 switch (nd_type(node
)) {
3723 case NODE_STR
: case NODE_DSTR
:
3725 case NODE_IF
: case NODE_UNLESS
:
3726 if (!node
->nd_body
|| !node
->nd_else
) return FALSE
;
3727 if (all_string_result_p(node
->nd_body
))
3728 return all_string_result_p(node
->nd_else
);
3730 case NODE_AND
: case NODE_OR
:
3732 return all_string_result_p(node
->nd_1st
);
3733 if (!all_string_result_p(node
->nd_1st
))
3735 return all_string_result_p(node
->nd_2nd
);
3742 compile_dstr_fragments(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int *cntp
)
3744 const NODE
*list
= node
->nd_next
;
3745 VALUE lit
= node
->nd_lit
;
3746 LINK_ELEMENT
*first_lit
= 0;
3749 debugp_param("nd_lit", lit
);
3752 if (!RB_TYPE_P(lit
, T_STRING
)) {
3753 COMPILE_ERROR(ERROR_ARGS
"dstr: must be string: %s",
3754 rb_builtin_type_name(TYPE(lit
)));
3757 lit
= rb_fstring(lit
);
3758 ADD_INSN1(ret
, nd_line(node
), putobject
, lit
);
3759 RB_OBJ_WRITTEN(iseq
, Qundef
, lit
);
3760 if (RSTRING_LEN(lit
) == 0) first_lit
= LAST_ELEMENT(ret
);
3764 const NODE
*const head
= list
->nd_head
;
3765 if (nd_type(head
) == NODE_STR
) {
3766 lit
= rb_fstring(head
->nd_lit
);
3767 ADD_INSN1(ret
, nd_line(head
), putobject
, lit
);
3768 RB_OBJ_WRITTEN(iseq
, Qundef
, lit
);
3772 CHECK(COMPILE(ret
, "each string", head
));
3775 list
= list
->nd_next
;
3777 if (NIL_P(lit
) && first_lit
) {
3778 ELEM_REMOVE(first_lit
);
3787 compile_dstr(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
)
3790 CHECK(compile_dstr_fragments(iseq
, ret
, node
, &cnt
));
3791 ADD_INSN1(ret
, nd_line(node
), concatstrings
, INT2FIX(cnt
));
3796 compile_dregx(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
)
3799 CHECK(compile_dstr_fragments(iseq
, ret
, node
, &cnt
));
3800 ADD_INSN2(ret
, nd_line(node
), toregexp
, INT2FIX(node
->nd_cflag
), INT2FIX(cnt
));
3805 compile_flip_flop(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int again
,
3806 LABEL
*then_label
, LABEL
*else_label
)
3808 const int line
= nd_line(node
);
3809 LABEL
*lend
= NEW_LABEL(line
);
3810 rb_num_t cnt
= ISEQ_FLIP_CNT_INCREMENT(iseq
->body
->local_iseq
)
3811 + VM_SVAR_FLIPFLOP_START
;
3812 VALUE key
= INT2FIX(cnt
);
3814 ADD_INSN2(ret
, line
, getspecial
, key
, INT2FIX(0));
3815 ADD_INSNL(ret
, line
, branchif
, lend
);
3818 CHECK(COMPILE(ret
, "flip2 beg", node
->nd_beg
));
3819 ADD_INSNL(ret
, line
, branchunless
, else_label
);
3820 ADD_INSN1(ret
, line
, putobject
, Qtrue
);
3821 ADD_INSN1(ret
, line
, setspecial
, key
);
3823 ADD_INSNL(ret
, line
, jump
, then_label
);
3827 ADD_LABEL(ret
, lend
);
3828 CHECK(COMPILE(ret
, "flip2 end", node
->nd_end
));
3829 ADD_INSNL(ret
, line
, branchunless
, then_label
);
3830 ADD_INSN1(ret
, line
, putobject
, Qfalse
);
3831 ADD_INSN1(ret
, line
, setspecial
, key
);
3832 ADD_INSNL(ret
, line
, jump
, then_label
);
3838 compile_branch_condition(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*cond
,
3839 LABEL
*then_label
, LABEL
*else_label
)
3842 switch (nd_type(cond
)) {
3845 LABEL
*label
= NEW_LABEL(nd_line(cond
));
3846 CHECK(compile_branch_condition(iseq
, ret
, cond
->nd_1st
, label
,
3848 if (!label
->refcnt
) break;
3849 ADD_LABEL(ret
, label
);
3850 cond
= cond
->nd_2nd
;
3855 LABEL
*label
= NEW_LABEL(nd_line(cond
));
3856 CHECK(compile_branch_condition(iseq
, ret
, cond
->nd_1st
, then_label
,
3858 if (!label
->refcnt
) break;
3859 ADD_LABEL(ret
, label
);
3860 cond
= cond
->nd_2nd
;
3863 case NODE_LIT
: /* NODE_LIT is always true */
3868 /* printf("useless condition eliminate (%s)\n", ruby_node_name(nd_type(cond))); */
3869 ADD_INSNL(ret
, nd_line(cond
), jump
, then_label
);
3873 /* printf("useless condition eliminate (%s)\n", ruby_node_name(nd_type(cond))); */
3874 ADD_INSNL(ret
, nd_line(cond
), jump
, else_label
);
3880 CHECK(COMPILE_POPPED(ret
, "branch condition", cond
));
3881 ADD_INSNL(ret
, nd_line(cond
), jump
, then_label
);
3884 CHECK(compile_flip_flop(iseq
, ret
, cond
, TRUE
, then_label
, else_label
));
3887 CHECK(compile_flip_flop(iseq
, ret
, cond
, FALSE
, then_label
, else_label
));
3890 CHECK(compile_defined_expr(iseq
, ret
, cond
, Qfalse
));
3893 CHECK(COMPILE(ret
, "branch condition", cond
));
3895 ADD_INSNL(ret
, nd_line(cond
), branchunless
, else_label
);
3896 ADD_INSNL(ret
, nd_line(cond
), jump
, then_label
);
3902 #define HASH_NO_BRACE 0
3903 #define HASH_BRACE 1
3904 #define METHOD_CALL_KEYWORDS 2
3907 keyword_node_p(const NODE
*const node
)
3909 return nd_type(node
) == NODE_HASH
&& (node
->nd_brace
& HASH_BRACE
) != HASH_BRACE
;
3913 compile_keyword_arg(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
,
3914 const NODE
*const root_node
,
3915 struct rb_callinfo_kwarg
**const kw_arg_ptr
,
3918 if (kw_arg_ptr
== NULL
) return FALSE
;
3920 if (root_node
->nd_head
&& nd_type(root_node
->nd_head
) == NODE_LIST
) {
3921 const NODE
*node
= root_node
->nd_head
;
3925 const NODE
*key_node
= node
->nd_head
;
3928 assert(nd_type(node
) == NODE_LIST
);
3929 if (key_node
&& nd_type(key_node
) == NODE_LIT
&& RB_TYPE_P(key_node
->nd_lit
, T_SYMBOL
)) {
3930 /* can be keywords */
3934 *flag
|= VM_CALL_KW_SPLAT
;
3935 if (seen_nodes
> 1 || node
->nd_next
->nd_next
) {
3936 /* A new hash will be created for the keyword arguments
3937 * in this case, so mark the method as passing mutable
3940 *flag
|= VM_CALL_KW_SPLAT_MUT
;
3945 node
= node
->nd_next
; /* skip value node */
3946 node
= node
->nd_next
;
3949 /* may be keywords */
3950 node
= root_node
->nd_head
;
3952 int len
= (int)node
->nd_alen
/ 2;
3953 struct rb_callinfo_kwarg
*kw_arg
=
3954 rb_xmalloc_mul_add(len
- 1, sizeof(VALUE
), sizeof(struct rb_callinfo_kwarg
));
3955 VALUE
*keywords
= kw_arg
->keywords
;
3957 kw_arg
->keyword_len
= len
;
3959 *kw_arg_ptr
= kw_arg
;
3961 for (i
=0; node
!= NULL
; i
++, node
= node
->nd_next
->nd_next
) {
3962 const NODE
*key_node
= node
->nd_head
;
3963 const NODE
*val_node
= node
->nd_next
->nd_head
;
3964 keywords
[i
] = key_node
->nd_lit
;
3965 NO_CHECK(COMPILE(ret
, "keyword values", val_node
));
3975 compile_args(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*node
,
3976 struct rb_callinfo_kwarg
**keywords_ptr
, unsigned int *flag
)
3980 for (; node
; len
++, node
= node
->nd_next
) {
3982 EXPECT_NODE("compile_args", node
, NODE_LIST
, -1);
3985 if (node
->nd_next
== NULL
&& keyword_node_p(node
->nd_head
)) { /* last node */
3986 if (compile_keyword_arg(iseq
, ret
, node
->nd_head
, keywords_ptr
, flag
)) {
3990 /* Bad Hack: temporarily mark hash node with flag so compile_hash
3991 * can compile call differently.
3993 node
->nd_head
->nd_brace
= METHOD_CALL_KEYWORDS
;
3994 NO_CHECK(COMPILE_(ret
, "array element", node
->nd_head
, FALSE
));
3995 node
->nd_head
->nd_brace
= HASH_NO_BRACE
;
3999 NO_CHECK(COMPILE_(ret
, "array element", node
->nd_head
, FALSE
));
4007 static_literal_node_p(const NODE
*node
, const rb_iseq_t
*iseq
)
4009 node
= node
->nd_head
;
4010 switch (nd_type(node
)) {
4017 return ISEQ_COMPILE_DATA(iseq
)->option
->frozen_string_literal
;
4024 static_literal_value(const NODE
*node
, rb_iseq_t
*iseq
)
4026 node
= node
->nd_head
;
4027 switch (nd_type(node
)) {
4035 if (ISEQ_COMPILE_DATA(iseq
)->option
->debug_frozen_string_literal
|| RTEST(ruby_debug
)) {
4037 VALUE debug_info
= rb_ary_new_from_args(2, rb_iseq_path(iseq
), INT2FIX((int)nd_line(node
)));
4038 lit
= rb_str_dup(node
->nd_lit
);
4039 rb_ivar_set(lit
, id_debug_created_info
, rb_obj_freeze(debug_info
));
4040 return rb_str_freeze(lit
);
4043 return rb_fstring(node
->nd_lit
);
4046 return node
->nd_lit
;
4051 compile_array(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*node
, int popped
)
4053 int line
= (int)nd_line(node
);
4055 if (nd_type(node
) == NODE_ZLIST
) {
4057 ADD_INSN1(ret
, line
, newarray
, INT2FIX(0));
4062 EXPECT_NODE("compile_array", node
, NODE_LIST
, -1);
4065 for (; node
; node
= node
->nd_next
) {
4066 NO_CHECK(COMPILE_(ret
, "array element", node
->nd_head
, popped
));
4071 /* Compilation of an array literal.
4072 * The following code is essentially the same as:
4074 * for (int count = 0; node; count++; node->nd_next) {
4075 * compile(node->nd_head);
4077 * ADD_INSN(newarray, count);
4079 * However, there are three points.
4081 * - The code above causes stack overflow for a big string literal.
4082 * The following limits the stack length up to max_stack_len.
4084 * [x1,x2,...,x10000] =>
4085 * push x1 ; push x2 ; ...; push x256; newarray 256;
4086 * push x257; push x258; ...; push x512; newarray 256; concatarray;
4087 * push x513; push x514; ...; push x768; newarray 256; concatarray;
4090 * - Long subarray can be optimized by pre-allocating a hidden array.
4092 * [1,2,3,...,100] =>
4093 * duparray [1,2,3,...,100]
4095 * [x, 1,2,3,...,100, z] =>
4096 * push x; newarray 1;
4097 * putobject [1,2,3,...,100] (<- hidden array); concatarray;
4098 * push z; newarray 1; concatarray
4100 * - If the last element is a keyword, newarraykwsplat should be emitted
4101 * to check and remove empty keyword arguments hash from array.
4102 * (Note: a keyword is NODE_HASH which is not static_literal_node_p.)
4105 * putobject 1; putobject 2; putobject 3; push kw; newarraykwsplat
4108 const int max_stack_len
= 0x100;
4109 const int min_tmp_ary_len
= 0x40;
4111 int first_chunk
= 1;
4113 /* Convert pushed elements to an array, and concatarray if needed */
4114 #define FLUSH_CHUNK(newarrayinsn) \
4116 ADD_INSN1(ret, line, newarrayinsn, INT2FIX(stack_len)); \
4117 if (!first_chunk) ADD_INSN(ret, line, concatarray); \
4118 first_chunk = stack_len = 0; \
4124 /* pre-allocation check (this branch can be omittable) */
4125 if (static_literal_node_p(node
, iseq
)) {
4126 /* count the elements that are optimizable */
4127 const NODE
*node_tmp
= node
->nd_next
;
4128 for (; node_tmp
&& static_literal_node_p(node_tmp
, iseq
); node_tmp
= node_tmp
->nd_next
)
4131 if ((first_chunk
&& stack_len
== 0 && !node_tmp
) || count
>= min_tmp_ary_len
) {
4132 /* The literal contains only optimizable elements, or the subarray is long enough */
4133 VALUE ary
= rb_ary_tmp_new(count
);
4135 /* Create a hidden array */
4136 for (; count
; count
--, node
= node
->nd_next
)
4137 rb_ary_push(ary
, static_literal_value(node
, iseq
));
4140 /* Emit optimized code */
4141 FLUSH_CHUNK(newarray
);
4143 ADD_INSN1(ret
, line
, duparray
, ary
);
4147 ADD_INSN1(ret
, line
, putobject
, ary
);
4148 ADD_INSN(ret
, line
, concatarray
);
4150 RB_OBJ_WRITTEN(iseq
, Qundef
, ary
);
4154 /* Base case: Compile "count" elements */
4155 for (; count
; count
--, node
= node
->nd_next
) {
4157 EXPECT_NODE("compile_array", node
, NODE_LIST
, -1);
4160 NO_CHECK(COMPILE_(ret
, "array element", node
->nd_head
, 0));
4163 if (!node
->nd_next
&& keyword_node_p(node
->nd_head
)) {
4164 /* Reached the end, and the last element is a keyword */
4165 FLUSH_CHUNK(newarraykwsplat
);
4169 /* If there are many pushed elements, flush them to avoid stack overflow */
4170 if (stack_len
>= max_stack_len
) FLUSH_CHUNK(newarray
);
4174 FLUSH_CHUNK(newarray
);
4180 static_literal_node_pair_p(const NODE
*node
, const rb_iseq_t
*iseq
)
4182 return node
->nd_head
&& static_literal_node_p(node
, iseq
) && static_literal_node_p(node
->nd_next
, iseq
);
4186 compile_hash(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*node
, int popped
)
4188 int line
= (int)nd_line(node
);
4189 int method_call_keywords
= node
->nd_brace
== METHOD_CALL_KEYWORDS
;
4191 node
= node
->nd_head
;
4193 if (!node
|| nd_type(node
) == NODE_ZLIST
) {
4195 ADD_INSN1(ret
, line
, newhash
, INT2FIX(0));
4200 EXPECT_NODE("compile_hash", node
, NODE_LIST
, -1);
4203 for (; node
; node
= node
->nd_next
) {
4204 NO_CHECK(COMPILE_(ret
, "hash element", node
->nd_head
, popped
));
4209 /* Compilation of a hash literal (or keyword arguments).
4210 * This is very similar to compile_array, but there are some differences:
4212 * - It contains key-value pairs. So we need to take every two elements.
4213 * We can assume that the length is always even.
4215 * - Merging is done by a method call (id_core_hash_merge_ptr).
4216 * Sometimes we need to insert the receiver, so "anchor" is needed.
4217 * In addition, a method call is much slower than concatarray.
4218 * So it pays only when the subsequence is really long.
4219 * (min_tmp_hash_len must be much larger than min_tmp_ary_len.)
4221 * - We need to handle keyword splat: **kw.
4222 * For **kw, the key part (node->nd_head) is NULL, and the value part
4223 * (node->nd_next->nd_head) is "kw".
4224 * The code is a bit difficult to avoid hash allocation for **{}.
4227 const int max_stack_len
= 0x100;
4228 const int min_tmp_hash_len
= 0x800;
4230 int first_chunk
= 1;
4231 DECL_ANCHOR(anchor
);
4232 INIT_ANCHOR(anchor
);
4234 /* Convert pushed elements to a hash, and merge if needed */
4235 #define FLUSH_CHUNK() \
4237 if (first_chunk) { \
4238 APPEND_LIST(ret, anchor); \
4239 ADD_INSN1(ret, line, newhash, INT2FIX(stack_len)); \
4242 ADD_INSN1(ret, line, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE)); \
4243 ADD_INSN(ret, line, swap); \
4244 APPEND_LIST(ret, anchor); \
4245 ADD_SEND(ret, line, id_core_hash_merge_ptr, INT2FIX(stack_len + 1)); \
4247 INIT_ANCHOR(anchor); \
4248 first_chunk = stack_len = 0; \
4254 /* pre-allocation check (this branch can be omittable) */
4255 if (static_literal_node_pair_p(node
, iseq
)) {
4256 /* count the elements that are optimizable */
4257 const NODE
*node_tmp
= node
->nd_next
->nd_next
;
4258 for (; node_tmp
&& static_literal_node_pair_p(node_tmp
, iseq
); node_tmp
= node_tmp
->nd_next
->nd_next
)
4261 if ((first_chunk
&& stack_len
== 0 && !node_tmp
) || count
>= min_tmp_hash_len
) {
4262 /* The literal contains only optimizable elements, or the subsequence is long enough */
4263 VALUE ary
= rb_ary_tmp_new(count
);
4265 /* Create a hidden hash */
4266 for (; count
; count
--, node
= node
->nd_next
->nd_next
) {
4268 elem
[0] = static_literal_value(node
, iseq
);
4269 elem
[1] = static_literal_value(node
->nd_next
, iseq
);
4270 rb_ary_cat(ary
, elem
, 2);
4272 VALUE hash
= rb_hash_new_with_size(RARRAY_LEN(ary
) / 2);
4273 rb_hash_bulk_insert(RARRAY_LEN(ary
), RARRAY_CONST_PTR_TRANSIENT(ary
), hash
);
4274 hash
= rb_obj_hide(hash
);
4277 /* Emit optimized code */
4280 ADD_INSN1(ret
, line
, duphash
, hash
);
4284 ADD_INSN1(ret
, line
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
4285 ADD_INSN(ret
, line
, swap
);
4287 ADD_INSN1(ret
, line
, putobject
, hash
);
4289 ADD_SEND(ret
, line
, id_core_hash_merge_kwd
, INT2FIX(2));
4291 RB_OBJ_WRITTEN(iseq
, Qundef
, hash
);
4295 /* Base case: Compile "count" elements */
4296 for (; count
; count
--, node
= node
->nd_next
->nd_next
) {
4299 EXPECT_NODE("compile_hash", node
, NODE_LIST
, -1);
4302 if (node
->nd_head
) {
4303 /* Normal key-value pair */
4304 NO_CHECK(COMPILE_(anchor
, "hash key element", node
->nd_head
, 0));
4305 NO_CHECK(COMPILE_(anchor
, "hash value element", node
->nd_next
->nd_head
, 0));
4308 /* If there are many pushed elements, flush them to avoid stack overflow */
4309 if (stack_len
>= max_stack_len
) FLUSH_CHUNK();
4312 /* kwsplat case: foo(..., **kw, ...) */
4315 const NODE
*kw
= node
->nd_next
->nd_head
;
4316 int empty_kw
= nd_type(kw
) == NODE_LIT
&& RB_TYPE_P(kw
->nd_lit
, T_HASH
); /* foo( ..., **{}, ...) */
4317 int first_kw
= first_chunk
&& stack_len
== 0; /* foo(1,2,3, **kw, ...) */
4318 int last_kw
= !node
->nd_next
->nd_next
; /* foo( ..., **kw) */
4319 int only_kw
= last_kw
&& first_kw
; /* foo(1,2,3, **kw) */
4322 if (only_kw
&& method_call_keywords
) {
4323 /* **{} appears at the only keyword argument in method call,
4324 * so it won't be modified.
4325 * kw is a special NODE_LIT that contains a special empty hash,
4326 * so this emits: putobject {}.
4327 * This is only done for method calls and not for literal hashes,
4328 * because literal hashes should always result in a new hash.
4330 NO_CHECK(COMPILE(ret
, "keyword splat", kw
));
4332 else if (first_kw
) {
4333 /* **{} appears as the first keyword argument, so it may be modified.
4334 * We need to create a fresh hash object.
4336 ADD_INSN1(ret
, line
, newhash
, INT2FIX(0));
4338 /* Any empty keyword splats that are not the first can be ignored.
4339 * since merging an empty hash into the existing hash is the same
4340 * as not merging it. */
4343 if (only_kw
&& method_call_keywords
) {
4344 /* **kw is only keyword argument in method call.
4345 * Use directly. This will be not be flagged as mutable.
4346 * This is only done for method calls and not for literal hashes,
4347 * because literal hashes should always result in a new hash.
4349 NO_CHECK(COMPILE(ret
, "keyword splat", kw
));
4352 /* There is more than one keyword argument, or this is not a method
4353 * call. In that case, we need to add an empty hash (if first keyword),
4354 * or merge the hash to the accumulated hash (if not the first keyword).
4356 ADD_INSN1(ret
, line
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
4357 if (first_kw
) ADD_INSN1(ret
, line
, newhash
, INT2FIX(0));
4358 else ADD_INSN(ret
, line
, swap
);
4360 NO_CHECK(COMPILE(ret
, "keyword splat", kw
));
4362 ADD_SEND(ret
, line
, id_core_hash_merge_kwd
, INT2FIX(2));
4377 rb_node_case_when_optimizable_literal(const NODE
*const node
)
4379 switch (nd_type(node
)) {
4381 VALUE v
= node
->nd_lit
;
4383 if (RB_TYPE_P(v
, T_FLOAT
) &&
4384 modf(RFLOAT_VALUE(v
), &ival
) == 0.0) {
4385 return FIXABLE(ival
) ? LONG2FIX((long)ival
) : rb_dbl2big(ival
);
4387 if (SYMBOL_P(v
) || rb_obj_is_kind_of(v
, rb_cNumeric
)) {
4399 return rb_fstring(node
->nd_lit
);
4405 when_vals(rb_iseq_t
*iseq
, LINK_ANCHOR
*const cond_seq
, const NODE
*vals
,
4406 LABEL
*l1
, int only_special_literals
, VALUE literals
)
4409 const NODE
*val
= vals
->nd_head
;
4410 VALUE lit
= rb_node_case_when_optimizable_literal(val
);
4412 if (lit
== Qundef
) {
4413 only_special_literals
= 0;
4415 else if (NIL_P(rb_hash_lookup(literals
, lit
))) {
4416 rb_hash_aset(literals
, lit
, (VALUE
)(l1
) | 1);
4419 ADD_INSN(cond_seq
, nd_line(val
), dup
); /* dup target */
4421 if (nd_type(val
) == NODE_STR
) {
4422 debugp_param("nd_lit", val
->nd_lit
);
4423 lit
= rb_fstring(val
->nd_lit
);
4424 ADD_INSN1(cond_seq
, nd_line(val
), putobject
, lit
);
4425 RB_OBJ_WRITTEN(iseq
, Qundef
, lit
);
4428 if (!COMPILE(cond_seq
, "when cond", val
)) return -1;
4431 ADD_INSN1(cond_seq
, nd_line(vals
), checkmatch
, INT2FIX(VM_CHECKMATCH_TYPE_CASE
));
4432 ADD_INSNL(cond_seq
, nd_line(val
), branchif
, l1
);
4433 vals
= vals
->nd_next
;
4435 return only_special_literals
;
4439 when_splat_vals(rb_iseq_t
*iseq
, LINK_ANCHOR
*const cond_seq
, const NODE
*vals
,
4440 LABEL
*l1
, int only_special_literals
, VALUE literals
)
4442 const int line
= nd_line(vals
);
4444 switch (nd_type(vals
)) {
4446 if (when_vals(iseq
, cond_seq
, vals
, l1
, only_special_literals
, literals
) < 0)
4450 ADD_INSN (cond_seq
, line
, dup
);
4451 CHECK(COMPILE(cond_seq
, "when splat", vals
->nd_head
));
4452 ADD_INSN1(cond_seq
, line
, splatarray
, Qfalse
);
4453 ADD_INSN1(cond_seq
, line
, checkmatch
, INT2FIX(VM_CHECKMATCH_TYPE_CASE
| VM_CHECKMATCH_ARRAY
));
4454 ADD_INSNL(cond_seq
, line
, branchif
, l1
);
4457 CHECK(when_splat_vals(iseq
, cond_seq
, vals
->nd_head
, l1
, only_special_literals
, literals
));
4458 CHECK(when_splat_vals(iseq
, cond_seq
, vals
->nd_body
, l1
, only_special_literals
, literals
));
4461 CHECK(when_splat_vals(iseq
, cond_seq
, vals
->nd_head
, l1
, only_special_literals
, literals
));
4462 ADD_INSN (cond_seq
, line
, dup
);
4463 CHECK(COMPILE(cond_seq
, "when argspush body", vals
->nd_body
));
4464 ADD_INSN1(cond_seq
, line
, checkmatch
, INT2FIX(VM_CHECKMATCH_TYPE_CASE
));
4465 ADD_INSNL(cond_seq
, line
, branchif
, l1
);
4468 ADD_INSN (cond_seq
, line
, dup
);
4469 CHECK(COMPILE(cond_seq
, "when val", vals
));
4470 ADD_INSN1(cond_seq
, line
, splatarray
, Qfalse
);
4471 ADD_INSN1(cond_seq
, line
, checkmatch
, INT2FIX(VM_CHECKMATCH_TYPE_CASE
| VM_CHECKMATCH_ARRAY
));
4472 ADD_INSNL(cond_seq
, line
, branchif
, l1
);
4480 compile_massign_lhs(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
)
4482 switch (nd_type(node
)) {
4483 case NODE_ATTRASGN
: {
4486 int line
= nd_line(node
);
4488 CHECK(COMPILE_POPPED(ret
, "masgn lhs (NODE_ATTRASGN)", node
));
4490 iobj
= (INSN
*)get_prev_insn((INSN
*)LAST_ELEMENT(ret
)); /* send insn */
4491 const struct rb_callinfo
*ci
= (struct rb_callinfo
*)OPERAND_AT(iobj
, 0);
4492 int argc
= vm_ci_argc(ci
) + 1;
4493 ci
= ci_argc_set(iseq
, ci
, argc
);
4494 OPERAND_AT(iobj
, 0) = (VALUE
)ci
;
4495 RB_OBJ_WRITTEN(iseq
, Qundef
, ci
);
4496 dupidx
= INT2FIX(argc
);
4498 INSERT_BEFORE_INSN1(iobj
, line
, topn
, dupidx
);
4499 if (vm_ci_flag(ci
) & VM_CALL_ARGS_SPLAT
) {
4500 int argc
= vm_ci_argc(ci
);
4501 ci
= ci_argc_set(iseq
, ci
, argc
- 1);
4502 OPERAND_AT(iobj
, 0) = (VALUE
)ci
;
4503 RB_OBJ_WRITTEN(iseq
, Qundef
, iobj
);
4504 INSERT_BEFORE_INSN1(iobj
, line
, newarray
, INT2FIX(1));
4505 INSERT_BEFORE_INSN(iobj
, line
, concatarray
);
4507 ADD_INSN(ret
, line
, pop
); /* result */
4511 DECL_ANCHOR(anchor
);
4512 INIT_ANCHOR(anchor
);
4513 CHECK(COMPILE_POPPED(anchor
, "nest masgn lhs", node
));
4514 ELEM_REMOVE(FIRST_ELEMENT(anchor
));
4515 ADD_SEQ(ret
, anchor
);
4519 DECL_ANCHOR(anchor
);
4520 INIT_ANCHOR(anchor
);
4521 CHECK(COMPILE_POPPED(anchor
, "masgn lhs", node
));
4522 ELEM_REMOVE(FIRST_ELEMENT(anchor
));
4523 ADD_SEQ(ret
, anchor
);
4531 compile_massign_opt_lhs(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*lhsn
)
4534 CHECK(compile_massign_opt_lhs(iseq
, ret
, lhsn
->nd_next
));
4535 CHECK(compile_massign_lhs(iseq
, ret
, lhsn
->nd_head
));
4541 compile_massign_opt(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
,
4542 const NODE
*rhsn
, const NODE
*orig_lhsn
)
4545 const int memsize
= numberof(mem
);
4547 int llen
= 0, rlen
= 0;
4549 const NODE
*lhsn
= orig_lhsn
;
4551 #define MEMORY(v) { \
4553 if (memindex == memsize) return 0; \
4554 for (i=0; i<memindex; i++) { \
4555 if (mem[i] == (v)) return 0; \
4557 mem[memindex++] = (v); \
4560 if (rhsn
== 0 || nd_type(rhsn
) != NODE_LIST
) {
4565 const NODE
*ln
= lhsn
->nd_head
;
4566 switch (nd_type(ln
)) {
4571 case NODE_DASGN_CURR
:
4579 lhsn
= lhsn
->nd_next
;
4585 NO_CHECK(COMPILE_POPPED(ret
, "masgn val (popped)", rhsn
->nd_head
));
4588 NO_CHECK(COMPILE(ret
, "masgn val", rhsn
->nd_head
));
4590 rhsn
= rhsn
->nd_next
;
4595 for (i
=0; i
<llen
-rlen
; i
++) {
4596 ADD_INSN(ret
, nd_line(orig_lhsn
), putnil
);
4600 compile_massign_opt_lhs(iseq
, ret
, orig_lhsn
);
4605 adjust_stack(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, int line
, int rlen
, int llen
)
4608 do {ADD_INSN(ret
, line
, putnil
);} while (++rlen
< llen
);
4610 else if (rlen
> llen
) {
4611 do {ADD_INSN(ret
, line
, pop
);} while (--rlen
> llen
);
4616 compile_massign(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
4618 const NODE
*rhsn
= node
->nd_value
;
4619 const NODE
*splatn
= node
->nd_args
;
4620 const NODE
*lhsn
= node
->nd_head
;
4621 int lhs_splat
= (splatn
&& NODE_NAMED_REST_P(splatn
)) ? 1 : 0;
4623 if (!popped
|| splatn
|| !compile_massign_opt(iseq
, ret
, rhsn
, lhsn
)) {
4626 DECL_ANCHOR(lhsseq
);
4628 INIT_ANCHOR(lhsseq
);
4631 CHECK(compile_massign_lhs(iseq
, lhsseq
, lhsn
->nd_head
));
4633 lhsn
= lhsn
->nd_next
;
4636 NO_CHECK(COMPILE(ret
, "normal masgn rhs", rhsn
));
4639 ADD_INSN(ret
, nd_line(node
), dup
);
4641 else if (!lhs_splat
) {
4642 INSN
*last
= (INSN
*)ret
->last
;
4643 if (IS_INSN(&last
->link
) &&
4644 IS_INSN_ID(last
, newarray
) &&
4645 last
->operand_size
== 1) {
4646 int rlen
= FIX2INT(OPERAND_AT(last
, 0));
4647 /* special case: assign to aset or attrset */
4650 adjust_stack(iseq
, ret
, nd_line(node
), rlen
, llen
);
4651 ADD_INSN(ret
, nd_line(node
), swap
);
4654 else if (llen
> 2 && llen
!= rlen
) {
4656 adjust_stack(iseq
, ret
, nd_line(node
), rlen
, llen
);
4657 ADD_INSN1(ret
, nd_line(node
), reverse
, INT2FIX(llen
));
4660 else if (llen
> 2) {
4661 last
->insn_id
= BIN(reverse
);
4667 ADD_INSN2(ret
, nd_line(node
), expandarray
,
4668 INT2FIX(llen
), INT2FIX(lhs_splat
));
4670 ADD_SEQ(ret
, lhsseq
);
4673 if (nd_type(splatn
) == NODE_POSTARG
) {
4674 /*a, b, *r, p1, p2 */
4675 const NODE
*postn
= splatn
->nd_2nd
;
4676 const NODE
*restn
= splatn
->nd_1st
;
4677 int num
= (int)postn
->nd_alen
;
4678 int flag
= 0x02 | (NODE_NAMED_REST_P(restn
) ? 0x01 : 0x00);
4680 ADD_INSN2(ret
, nd_line(splatn
), expandarray
,
4681 INT2FIX(num
), INT2FIX(flag
));
4683 if (NODE_NAMED_REST_P(restn
)) {
4684 CHECK(compile_massign_lhs(iseq
, ret
, restn
));
4687 CHECK(compile_massign_lhs(iseq
, ret
, postn
->nd_head
));
4688 postn
= postn
->nd_next
;
4693 CHECK(compile_massign_lhs(iseq
, ret
, splatn
));
4701 compile_const_prefix(rb_iseq_t
*iseq
, const NODE
*const node
,
4702 LINK_ANCHOR
*const pref
, LINK_ANCHOR
*const body
)
4704 switch (nd_type(node
)) {
4706 debugi("compile_const_prefix - colon", node
->nd_vid
);
4707 ADD_INSN1(body
, nd_line(node
), putobject
, Qtrue
);
4708 ADD_INSN1(body
, nd_line(node
), getconstant
, ID2SYM(node
->nd_vid
));
4711 debugi("compile_const_prefix - colon3", node
->nd_mid
);
4712 ADD_INSN(body
, nd_line(node
), pop
);
4713 ADD_INSN1(body
, nd_line(node
), putobject
, rb_cObject
);
4714 ADD_INSN1(body
, nd_line(node
), putobject
, Qtrue
);
4715 ADD_INSN1(body
, nd_line(node
), getconstant
, ID2SYM(node
->nd_mid
));
4718 CHECK(compile_const_prefix(iseq
, node
->nd_head
, pref
, body
));
4719 debugi("compile_const_prefix - colon2", node
->nd_mid
);
4720 ADD_INSN1(body
, nd_line(node
), putobject
, Qfalse
);
4721 ADD_INSN1(body
, nd_line(node
), getconstant
, ID2SYM(node
->nd_mid
));
4724 CHECK(COMPILE(pref
, "const colon2 prefix", node
));
4731 compile_cpath(LINK_ANCHOR
*const ret
, rb_iseq_t
*iseq
, const NODE
*cpath
)
4733 if (nd_type(cpath
) == NODE_COLON3
) {
4734 /* toplevel class ::Foo */
4735 ADD_INSN1(ret
, nd_line(cpath
), putobject
, rb_cObject
);
4736 return VM_DEFINECLASS_FLAG_SCOPED
;
4738 else if (cpath
->nd_head
) {
4740 NO_CHECK(COMPILE(ret
, "nd_else->nd_head", cpath
->nd_head
));
4741 return VM_DEFINECLASS_FLAG_SCOPED
;
4744 /* class at cbase Foo */
4745 ADD_INSN1(ret
, nd_line(cpath
), putspecialobject
,
4746 INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE
));
4752 private_recv_p(const NODE
*node
)
4754 if (nd_type(node
->nd_recv
) == NODE_SELF
) {
4755 NODE
*self
= node
->nd_recv
;
4756 return self
->nd_state
!= 0;
4762 defined_expr(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
,
4763 const NODE
*const node
, LABEL
**lfinish
, VALUE needstr
);
4766 defined_expr0(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
,
4767 const NODE
*const node
, LABEL
**lfinish
, VALUE needstr
)
4769 enum defined_type expr_type
= DEFINED_NOT_DEFINED
;
4770 enum node_type type
;
4771 const int line
= nd_line(node
);
4773 switch (type
= nd_type(node
)) {
4777 expr_type
= DEFINED_NIL
;
4780 expr_type
= DEFINED_SELF
;
4783 expr_type
= DEFINED_TRUE
;
4786 expr_type
= DEFINED_FALSE
;
4790 const NODE
*vals
= node
;
4793 defined_expr0(iseq
, ret
, vals
->nd_head
, lfinish
, Qfalse
);
4796 lfinish
[1] = NEW_LABEL(line
);
4798 ADD_INSNL(ret
, line
, branchunless
, lfinish
[1]);
4799 } while ((vals
= vals
->nd_next
) != NULL
);
4808 expr_type
= DEFINED_EXPR
;
4814 expr_type
= DEFINED_LVAR
;
4818 ADD_INSN(ret
, line
, putnil
);
4819 ADD_INSN3(ret
, line
, defined
, INT2FIX(DEFINED_IVAR
),
4820 ID2SYM(node
->nd_vid
), needstr
);
4824 ADD_INSN(ret
, line
, putnil
);
4825 ADD_INSN3(ret
, line
, defined
, INT2FIX(DEFINED_GVAR
),
4826 ID2SYM(node
->nd_entry
->id
), needstr
);
4830 ADD_INSN(ret
, line
, putnil
);
4831 ADD_INSN3(ret
, line
, defined
, INT2FIX(DEFINED_CVAR
),
4832 ID2SYM(node
->nd_vid
), needstr
);
4836 ADD_INSN(ret
, line
, putnil
);
4837 ADD_INSN3(ret
, line
, defined
, INT2FIX(DEFINED_CONST
),
4838 ID2SYM(node
->nd_vid
), needstr
);
4842 lfinish
[1] = NEW_LABEL(line
);
4844 defined_expr0(iseq
, ret
, node
->nd_head
, lfinish
, Qfalse
);
4845 ADD_INSNL(ret
, line
, branchunless
, lfinish
[1]);
4846 NO_CHECK(COMPILE(ret
, "defined/colon2#nd_head", node
->nd_head
));
4848 ADD_INSN3(ret
, line
, defined
,
4849 (rb_is_const_id(node
->nd_mid
) ?
4850 INT2FIX(DEFINED_CONST_FROM
) : INT2FIX(DEFINED_METHOD
)),
4851 ID2SYM(node
->nd_mid
), needstr
);
4854 ADD_INSN1(ret
, line
, putobject
, rb_cObject
);
4855 ADD_INSN3(ret
, line
, defined
,
4856 INT2FIX(DEFINED_CONST_FROM
), ID2SYM(node
->nd_mid
), needstr
);
4859 /* method dispatch */
4864 case NODE_ATTRASGN
:{
4865 const int explicit_receiver
=
4866 (type
== NODE_CALL
|| type
== NODE_OPCALL
||
4867 (type
== NODE_ATTRASGN
&& !private_recv_p(node
)));
4869 if (!lfinish
[1] && (node
->nd_args
|| explicit_receiver
)) {
4870 lfinish
[1] = NEW_LABEL(line
);
4872 if (node
->nd_args
) {
4873 defined_expr0(iseq
, ret
, node
->nd_args
, lfinish
, Qfalse
);
4874 ADD_INSNL(ret
, line
, branchunless
, lfinish
[1]);
4876 if (explicit_receiver
) {
4877 defined_expr0(iseq
, ret
, node
->nd_recv
, lfinish
, Qfalse
);
4878 ADD_INSNL(ret
, line
, branchunless
, lfinish
[1]);
4879 NO_CHECK(COMPILE(ret
, "defined/recv", node
->nd_recv
));
4880 ADD_INSN3(ret
, line
, defined
, INT2FIX(DEFINED_METHOD
),
4881 ID2SYM(node
->nd_mid
), needstr
);
4884 ADD_INSN(ret
, line
, putself
);
4885 ADD_INSN3(ret
, line
, defined
, INT2FIX(DEFINED_FUNC
),
4886 ID2SYM(node
->nd_mid
), needstr
);
4892 ADD_INSN(ret
, line
, putnil
);
4893 ADD_INSN3(ret
, line
, defined
, INT2FIX(DEFINED_YIELD
), 0,
4899 ADD_INSN(ret
, line
, putnil
);
4900 ADD_INSN3(ret
, line
, defined
, INT2FIX(DEFINED_REF
),
4901 INT2FIX((node
->nd_nth
<< 1) | (type
== NODE_BACK_REF
)),
4907 ADD_INSN(ret
, line
, putnil
);
4908 ADD_INSN3(ret
, line
, defined
, INT2FIX(DEFINED_ZSUPER
), 0,
4914 case NODE_OP_ASGN_OR
:
4915 case NODE_OP_ASGN_AND
:
4919 case NODE_DASGN_CURR
:
4924 expr_type
= DEFINED_ASGN
;
4928 assert(expr_type
!= DEFINED_NOT_DEFINED
);
4930 if (needstr
!= Qfalse
) {
4931 VALUE str
= rb_iseq_defined_string(expr_type
);
4932 ADD_INSN1(ret
, line
, putobject
, str
);
4935 ADD_INSN1(ret
, line
, putobject
, Qtrue
);
4940 build_defined_rescue_iseq(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const void *unused
)
4942 ADD_INSN(ret
, 0, putnil
);
4943 iseq_set_exception_local_table(iseq
);
4947 defined_expr(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
,
4948 const NODE
*const node
, LABEL
**lfinish
, VALUE needstr
)
4950 LINK_ELEMENT
*lcur
= ret
->last
;
4951 defined_expr0(iseq
, ret
, node
, lfinish
, needstr
);
4953 int line
= nd_line(node
);
4954 LABEL
*lstart
= NEW_LABEL(line
);
4955 LABEL
*lend
= NEW_LABEL(line
);
4956 const rb_iseq_t
*rescue
;
4957 struct rb_iseq_new_with_callback_callback_func
*ifunc
=
4958 rb_iseq_new_with_callback_new_callback(build_defined_rescue_iseq
, NULL
);
4959 rescue
= new_child_iseq_with_callback(iseq
, ifunc
,
4960 rb_str_concat(rb_str_new2("defined guard in "),
4961 iseq
->body
->location
.label
),
4962 iseq
, ISEQ_TYPE_RESCUE
, 0);
4963 lstart
->rescued
= LABEL_RESCUE_BEG
;
4964 lend
->rescued
= LABEL_RESCUE_END
;
4965 APPEND_LABEL(ret
, lcur
, lstart
);
4966 ADD_LABEL(ret
, lend
);
4967 ADD_CATCH_ENTRY(CATCH_TYPE_RESCUE
, lstart
, lend
, rescue
, lfinish
[1]);
4972 compile_defined_expr(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, VALUE needstr
)
4974 const int line
= nd_line(node
);
4975 if (!node
->nd_head
) {
4976 VALUE str
= rb_iseq_defined_string(DEFINED_NIL
);
4977 ADD_INSN1(ret
, line
, putobject
, str
);
4981 LINK_ELEMENT
*last
= ret
->last
;
4982 lfinish
[0] = NEW_LABEL(line
);
4984 defined_expr(iseq
, ret
, node
->nd_head
, lfinish
, needstr
);
4986 ELEM_INSERT_NEXT(last
, &new_insn_body(iseq
, line
, BIN(putnil
), 0)->link
);
4987 ADD_INSN(ret
, line
, swap
);
4988 ADD_INSN(ret
, line
, pop
);
4989 ADD_LABEL(ret
, lfinish
[1]);
4991 ADD_LABEL(ret
, lfinish
[0]);
4997 make_name_for_block(const rb_iseq_t
*orig_iseq
)
5000 const rb_iseq_t
*iseq
= orig_iseq
;
5002 if (orig_iseq
->body
->parent_iseq
!= 0) {
5003 while (orig_iseq
->body
->local_iseq
!= iseq
) {
5004 if (iseq
->body
->type
== ISEQ_TYPE_BLOCK
) {
5007 iseq
= iseq
->body
->parent_iseq
;
5012 return rb_sprintf("block in %"PRIsVALUE
, iseq
->body
->location
.label
);
5015 return rb_sprintf("block (%d levels) in %"PRIsVALUE
, level
, iseq
->body
->location
.label
);
5020 push_ensure_entry(rb_iseq_t
*iseq
,
5021 struct iseq_compile_data_ensure_node_stack
*enl
,
5022 struct ensure_range
*er
, const NODE
*const node
)
5024 enl
->ensure_node
= node
;
5025 enl
->prev
= ISEQ_COMPILE_DATA(iseq
)->ensure_node_stack
; /* prev */
5027 ISEQ_COMPILE_DATA(iseq
)->ensure_node_stack
= enl
;
5031 add_ensure_range(rb_iseq_t
*iseq
, struct ensure_range
*erange
,
5032 LABEL
*lstart
, LABEL
*lend
)
5034 struct ensure_range
*ne
=
5035 compile_data_alloc(iseq
, sizeof(struct ensure_range
));
5037 while (erange
->next
!= 0) {
5038 erange
= erange
->next
;
5042 ne
->end
= erange
->end
;
5043 erange
->end
= lstart
;
5049 add_ensure_iseq(LINK_ANCHOR
*const ret
, rb_iseq_t
*iseq
, int is_return
)
5051 struct iseq_compile_data_ensure_node_stack
*enlp
=
5052 ISEQ_COMPILE_DATA(iseq
)->ensure_node_stack
;
5053 struct iseq_compile_data_ensure_node_stack
*prev_enlp
= enlp
;
5054 DECL_ANCHOR(ensure
);
5056 INIT_ANCHOR(ensure
);
5058 if (enlp
->erange
!= NULL
) {
5059 DECL_ANCHOR(ensure_part
);
5060 LABEL
*lstart
= NEW_LABEL(0);
5061 LABEL
*lend
= NEW_LABEL(0);
5062 INIT_ANCHOR(ensure_part
);
5064 add_ensure_range(iseq
, enlp
->erange
, lstart
, lend
);
5066 ISEQ_COMPILE_DATA(iseq
)->ensure_node_stack
= enlp
->prev
;
5067 ADD_LABEL(ensure_part
, lstart
);
5068 NO_CHECK(COMPILE_POPPED(ensure_part
, "ensure part", enlp
->ensure_node
));
5069 ADD_LABEL(ensure_part
, lend
);
5070 ADD_SEQ(ensure
, ensure_part
);
5079 ISEQ_COMPILE_DATA(iseq
)->ensure_node_stack
= prev_enlp
;
5080 ADD_SEQ(ret
, ensure
);
5084 check_keyword(const NODE
*node
)
5086 /* This check is essentially a code clone of compile_keyword_arg. */
5088 if (nd_type(node
) == NODE_LIST
) {
5089 while (node
->nd_next
) {
5090 node
= node
->nd_next
;
5092 node
= node
->nd_head
;
5095 return keyword_node_p(node
);
5099 setup_args_core(rb_iseq_t
*iseq
, LINK_ANCHOR
*const args
, const NODE
*argn
,
5100 int dup_rest
, unsigned int *flag
, struct rb_callinfo_kwarg
**keywords
)
5103 switch (nd_type(argn
)) {
5105 NO_CHECK(COMPILE(args
, "args (splat)", argn
->nd_head
));
5106 ADD_INSN1(args
, nd_line(argn
), splatarray
, dup_rest
? Qtrue
: Qfalse
);
5107 if (flag
) *flag
|= VM_CALL_ARGS_SPLAT
;
5111 case NODE_ARGSPUSH
: {
5112 int next_is_list
= (nd_type(argn
->nd_head
) == NODE_LIST
);
5113 VALUE argc
= setup_args_core(iseq
, args
, argn
->nd_head
, 1, NULL
, NULL
);
5114 if (nd_type(argn
->nd_body
) == NODE_LIST
) {
5115 /* This branch is needed to avoid "newarraykwsplat" [Bug #16442] */
5116 int rest_len
= compile_args(iseq
, args
, argn
->nd_body
, NULL
, NULL
);
5117 ADD_INSN1(args
, nd_line(argn
), newarray
, INT2FIX(rest_len
));
5120 NO_CHECK(COMPILE(args
, "args (cat: splat)", argn
->nd_body
));
5123 *flag
|= VM_CALL_ARGS_SPLAT
;
5124 /* This is a dirty hack. It traverses the AST twice.
5125 * In a long term, it should be fixed by a redesign of keyword arguments */
5126 if (check_keyword(argn
->nd_body
))
5127 *flag
|= VM_CALL_KW_SPLAT
;
5129 if (nd_type(argn
) == NODE_ARGSCAT
) {
5131 ADD_INSN1(args
, nd_line(argn
), splatarray
, Qtrue
);
5132 return INT2FIX(FIX2INT(argc
) + 1);
5135 ADD_INSN1(args
, nd_line(argn
), splatarray
, Qfalse
);
5136 ADD_INSN(args
, nd_line(argn
), concatarray
);
5141 ADD_INSN1(args
, nd_line(argn
), newarray
, INT2FIX(1));
5142 ADD_INSN(args
, nd_line(argn
), concatarray
);
5147 int len
= compile_args(iseq
, args
, argn
, keywords
, flag
);
5148 return INT2FIX(len
);
5151 UNKNOWN_NODE("setup_arg", argn
, Qnil
);
5159 setup_args(rb_iseq_t
*iseq
, LINK_ANCHOR
*const args
, const NODE
*argn
,
5160 unsigned int *flag
, struct rb_callinfo_kwarg
**keywords
)
5163 if (argn
&& nd_type(argn
) == NODE_BLOCK_PASS
) {
5164 DECL_ANCHOR(arg_block
);
5165 INIT_ANCHOR(arg_block
);
5166 NO_CHECK(COMPILE(arg_block
, "block", argn
->nd_body
));
5168 *flag
|= VM_CALL_ARGS_BLOCKARG
;
5169 ret
= setup_args_core(iseq
, args
, argn
->nd_head
, 0, flag
, keywords
);
5171 if (LIST_INSN_SIZE_ONE(arg_block
)) {
5172 LINK_ELEMENT
*elem
= FIRST_ELEMENT(arg_block
);
5173 if (elem
->type
== ISEQ_ELEMENT_INSN
) {
5174 INSN
*iobj
= (INSN
*)elem
;
5175 if (iobj
->insn_id
== BIN(getblockparam
)) {
5176 iobj
->insn_id
= BIN(getblockparamproxy
);
5180 ADD_SEQ(args
, arg_block
);
5183 ret
= setup_args_core(iseq
, args
, argn
, 0, flag
, keywords
);
5189 build_postexe_iseq(rb_iseq_t
*iseq
, LINK_ANCHOR
*ret
, const void *ptr
)
5191 const NODE
*body
= ptr
;
5192 int line
= nd_line(body
);
5193 VALUE argc
= INT2FIX(0);
5194 const rb_iseq_t
*block
= NEW_CHILD_ISEQ(body
, make_name_for_block(iseq
->body
->parent_iseq
), ISEQ_TYPE_BLOCK
, line
);
5196 ADD_INSN1(ret
, line
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
5197 ADD_CALL_WITH_BLOCK(ret
, line
, id_core_set_postexe
, argc
, block
);
5198 RB_OBJ_WRITTEN(iseq
, Qundef
, (VALUE
)block
);
5199 iseq_set_local_table(iseq
, 0);
5203 compile_named_capture_assign(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
)
5207 int line
= nd_line(node
);
5208 LABEL
*fail_label
= NEW_LABEL(line
), *end_label
= NEW_LABEL(line
);
5210 #if !(defined(NAMED_CAPTURE_BY_SVAR) && NAMED_CAPTURE_BY_SVAR-0)
5211 ADD_INSN1(ret
, line
, getglobal
, ((VALUE
)rb_global_entry(idBACKREF
) | 1));
5213 ADD_INSN2(ret
, line
, getspecial
, INT2FIX(1) /* '~' */, INT2FIX(0));
5215 ADD_INSN(ret
, line
, dup
);
5216 ADD_INSNL(ret
, line
, branchunless
, fail_label
);
5218 for (vars
= node
; vars
; vars
= vars
->nd_next
) {
5220 if (vars
->nd_next
) {
5221 ADD_INSN(ret
, line
, dup
);
5224 NO_CHECK(COMPILE_POPPED(ret
, "capture", vars
->nd_head
));
5225 last
= last
->next
; /* putobject :var */
5226 cap
= new_insn_send(iseq
, line
, idAREF
, INT2FIX(1),
5227 NULL
, INT2FIX(0), NULL
);
5228 ELEM_INSERT_PREV(last
->next
, (LINK_ELEMENT
*)cap
);
5229 #if !defined(NAMED_CAPTURE_SINGLE_OPT) || NAMED_CAPTURE_SINGLE_OPT-0
5230 if (!vars
->nd_next
&& vars
== node
) {
5235 ADD_INSNL(nom
, line
, jump
, end_label
);
5236 ADD_LABEL(nom
, fail_label
);
5237 # if 0 /* $~ must be MatchData or nil */
5238 ADD_INSN(nom
, line
, pop
);
5239 ADD_INSN(nom
, line
, putnil
);
5241 ADD_LABEL(nom
, end_label
);
5242 (nom
->last
->next
= cap
->link
.next
)->prev
= nom
->last
;
5243 (cap
->link
.next
= nom
->anchor
.next
)->prev
= &cap
->link
;
5248 ADD_INSNL(ret
, line
, jump
, end_label
);
5249 ADD_LABEL(ret
, fail_label
);
5250 ADD_INSN(ret
, line
, pop
);
5251 for (vars
= node
; vars
; vars
= vars
->nd_next
) {
5253 NO_CHECK(COMPILE_POPPED(ret
, "capture", vars
->nd_head
));
5254 last
= last
->next
; /* putobject :var */
5255 ((INSN
*)last
)->insn_id
= BIN(putnil
);
5256 ((INSN
*)last
)->operand_size
= 0;
5258 ADD_LABEL(ret
, end_label
);
5262 optimizable_range_item_p(const NODE
*n
)
5264 if (!n
) return FALSE
;
5265 switch (nd_type(n
)) {
5267 return RB_INTEGER_TYPE_P(n
->nd_lit
);
5276 compile_if(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
, const enum node_type type
)
5278 struct rb_iseq_constant_body
*const body
= iseq
->body
;
5279 const NODE
*const node_body
= type
== NODE_IF
? node
->nd_body
: node
->nd_else
;
5280 const NODE
*const node_else
= type
== NODE_IF
? node
->nd_else
: node
->nd_body
;
5282 const int line
= nd_line(node
);
5283 const int lineno
= nd_first_lineno(node
);
5284 const int column
= nd_first_column(node
);
5285 const int last_lineno
= nd_last_lineno(node
);
5286 const int last_column
= nd_last_column(node
);
5287 DECL_ANCHOR(cond_seq
);
5288 DECL_ANCHOR(then_seq
);
5289 DECL_ANCHOR(else_seq
);
5290 LABEL
*then_label
, *else_label
, *end_label
;
5291 VALUE branches
= Qfalse
;
5293 VALUE catch_table
= ISEQ_COMPILE_DATA(iseq
)->catch_table_ary
;
5294 long catch_table_size
= NIL_P(catch_table
) ? 0 : RARRAY_LEN(catch_table
);
5296 INIT_ANCHOR(cond_seq
);
5297 INIT_ANCHOR(then_seq
);
5298 INIT_ANCHOR(else_seq
);
5299 then_label
= NEW_LABEL(line
);
5300 else_label
= NEW_LABEL(line
);
5303 compile_branch_condition(iseq
, cond_seq
, node
->nd_cond
,
5304 then_label
, else_label
);
5306 ci_size
= body
->ci_size
;
5307 CHECK(COMPILE_(then_seq
, "then", node_body
, popped
));
5308 catch_table
= ISEQ_COMPILE_DATA(iseq
)->catch_table_ary
;
5309 if (!then_label
->refcnt
) {
5310 body
->ci_size
= ci_size
;
5311 if (!NIL_P(catch_table
)) rb_ary_set_len(catch_table
, catch_table_size
);
5314 if (!NIL_P(catch_table
)) catch_table_size
= RARRAY_LEN(catch_table
);
5317 ci_size
= body
->ci_size
;
5318 CHECK(COMPILE_(else_seq
, "else", node_else
, popped
));
5319 catch_table
= ISEQ_COMPILE_DATA(iseq
)->catch_table_ary
;
5320 if (!else_label
->refcnt
) {
5321 body
->ci_size
= ci_size
;
5322 if (!NIL_P(catch_table
)) rb_ary_set_len(catch_table
, catch_table_size
);
5325 if (!NIL_P(catch_table
)) catch_table_size
= RARRAY_LEN(catch_table
);
5328 ADD_SEQ(ret
, cond_seq
);
5330 if (then_label
->refcnt
&& else_label
->refcnt
) {
5331 DECL_BRANCH_BASE(branches
, lineno
, column
, last_lineno
, last_column
, type
== NODE_IF
? "if" : "unless");
5334 if (then_label
->refcnt
) {
5335 ADD_LABEL(ret
, then_label
);
5336 if (else_label
->refcnt
) {
5337 ADD_TRACE_BRANCH_COVERAGE(
5339 node_body
? nd_first_lineno(node_body
) : lineno
,
5340 node_body
? nd_first_column(node_body
) : column
,
5341 node_body
? nd_last_lineno(node_body
) : last_lineno
,
5342 node_body
? nd_last_column(node_body
) : last_column
,
5343 type
== NODE_IF
? "then" : "else",
5345 end_label
= NEW_LABEL(line
);
5346 ADD_INSNL(then_seq
, line
, jump
, end_label
);
5348 ADD_SEQ(ret
, then_seq
);
5351 if (else_label
->refcnt
) {
5352 ADD_LABEL(ret
, else_label
);
5353 if (then_label
->refcnt
) {
5354 ADD_TRACE_BRANCH_COVERAGE(
5356 node_else
? nd_first_lineno(node_else
) : lineno
,
5357 node_else
? nd_first_column(node_else
) : column
,
5358 node_else
? nd_last_lineno(node_else
) : last_lineno
,
5359 node_else
? nd_last_column(node_else
) : last_column
,
5360 type
== NODE_IF
? "else" : "then",
5363 ADD_SEQ(ret
, else_seq
);
5367 ADD_LABEL(ret
, end_label
);
5374 compile_case(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const orig_node
, int popped
)
5377 const NODE
*node
= orig_node
;
5378 LABEL
*endlabel
, *elselabel
;
5380 DECL_ANCHOR(body_seq
);
5381 DECL_ANCHOR(cond_seq
);
5382 int only_special_literals
= 1;
5383 VALUE literals
= rb_hash_new();
5384 int line
, lineno
, column
, last_lineno
, last_column
;
5385 enum node_type type
;
5386 VALUE branches
= Qfalse
;
5389 INIT_ANCHOR(body_seq
);
5390 INIT_ANCHOR(cond_seq
);
5392 RHASH_TBL_RAW(literals
)->type
= &cdhash_type
;
5394 CHECK(COMPILE(head
, "case base", node
->nd_head
));
5396 DECL_BRANCH_BASE(branches
, nd_first_lineno(node
), nd_first_column(node
), nd_last_lineno(node
), nd_last_column(node
), "case");
5398 node
= node
->nd_body
;
5399 EXPECT_NODE("NODE_CASE", node
, NODE_WHEN
, COMPILE_NG
);
5400 type
= nd_type(node
);
5401 line
= nd_line(node
);
5402 lineno
= nd_first_lineno(node
);
5403 column
= nd_first_column(node
);
5404 last_lineno
= nd_last_lineno(node
);
5405 last_column
= nd_last_column(node
);
5407 endlabel
= NEW_LABEL(line
);
5408 elselabel
= NEW_LABEL(line
);
5410 ADD_SEQ(ret
, head
); /* case VAL */
5412 while (type
== NODE_WHEN
) {
5415 l1
= NEW_LABEL(line
);
5416 ADD_LABEL(body_seq
, l1
);
5417 ADD_INSN(body_seq
, line
, pop
);
5418 ADD_TRACE_BRANCH_COVERAGE(
5420 node
->nd_body
? nd_first_lineno(node
->nd_body
) : lineno
,
5421 node
->nd_body
? nd_first_column(node
->nd_body
) : column
,
5422 node
->nd_body
? nd_last_lineno(node
->nd_body
) : last_lineno
,
5423 node
->nd_body
? nd_last_column(node
->nd_body
) : last_column
,
5426 CHECK(COMPILE_(body_seq
, "when body", node
->nd_body
, popped
));
5427 ADD_INSNL(body_seq
, line
, jump
, endlabel
);
5429 vals
= node
->nd_head
;
5431 switch (nd_type(vals
)) {
5433 only_special_literals
= when_vals(iseq
, cond_seq
, vals
, l1
, only_special_literals
, literals
);
5434 if (only_special_literals
< 0) return COMPILE_NG
;
5439 only_special_literals
= 0;
5440 CHECK(when_splat_vals(iseq
, cond_seq
, vals
, l1
, only_special_literals
, literals
));
5443 UNKNOWN_NODE("NODE_CASE", vals
, COMPILE_NG
);
5447 EXPECT_NODE_NONULL("NODE_CASE", node
, NODE_LIST
, COMPILE_NG
);
5450 node
= node
->nd_next
;
5454 type
= nd_type(node
);
5455 line
= nd_line(node
);
5456 lineno
= nd_first_lineno(node
);
5457 column
= nd_first_column(node
);
5458 last_lineno
= nd_last_lineno(node
);
5459 last_column
= nd_last_column(node
);
5463 ADD_LABEL(cond_seq
, elselabel
);
5464 ADD_INSN(cond_seq
, line
, pop
);
5465 ADD_TRACE_BRANCH_COVERAGE(cond_seq
, nd_first_lineno(node
), nd_first_column(node
), nd_last_lineno(node
), nd_last_column(node
), "else", branches
);
5466 CHECK(COMPILE_(cond_seq
, "else", node
, popped
));
5467 ADD_INSNL(cond_seq
, line
, jump
, endlabel
);
5470 debugs("== else (implicit)\n");
5471 ADD_LABEL(cond_seq
, elselabel
);
5472 ADD_INSN(cond_seq
, nd_line(orig_node
), pop
);
5473 ADD_TRACE_BRANCH_COVERAGE(cond_seq
, nd_first_lineno(orig_node
), nd_first_column(orig_node
), nd_last_lineno(orig_node
), nd_last_column(orig_node
), "else", branches
);
5475 ADD_INSN(cond_seq
, nd_line(orig_node
), putnil
);
5477 ADD_INSNL(cond_seq
, nd_line(orig_node
), jump
, endlabel
);
5480 if (only_special_literals
&& ISEQ_COMPILE_DATA(iseq
)->option
->specialized_instruction
) {
5481 ADD_INSN(ret
, nd_line(orig_node
), dup
);
5482 ADD_INSN2(ret
, nd_line(orig_node
), opt_case_dispatch
, literals
, elselabel
);
5483 RB_OBJ_WRITTEN(iseq
, Qundef
, literals
);
5484 LABEL_REF(elselabel
);
5487 ADD_SEQ(ret
, cond_seq
);
5488 ADD_SEQ(ret
, body_seq
);
5489 ADD_LABEL(ret
, endlabel
);
5494 compile_case2(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const orig_node
, int popped
)
5498 const NODE
*node
= orig_node
->nd_body
;
5500 DECL_ANCHOR(body_seq
);
5501 VALUE branches
= Qfalse
;
5503 DECL_BRANCH_BASE(branches
, nd_first_lineno(orig_node
), nd_first_column(orig_node
), nd_last_lineno(orig_node
), nd_last_column(orig_node
), "case");
5505 INIT_ANCHOR(body_seq
);
5506 endlabel
= NEW_LABEL(nd_line(node
));
5508 while (node
&& nd_type(node
) == NODE_WHEN
) {
5509 const int line
= nd_line(node
);
5510 const int lineno
= nd_first_lineno(node
);
5511 const int column
= nd_first_column(node
);
5512 const int last_lineno
= nd_last_lineno(node
);
5513 const int last_column
= nd_last_column(node
);
5514 LABEL
*l1
= NEW_LABEL(line
);
5515 ADD_LABEL(body_seq
, l1
);
5516 ADD_TRACE_BRANCH_COVERAGE(
5518 node
->nd_body
? nd_first_lineno(node
->nd_body
) : lineno
,
5519 node
->nd_body
? nd_first_column(node
->nd_body
) : column
,
5520 node
->nd_body
? nd_last_lineno(node
->nd_body
) : last_lineno
,
5521 node
->nd_body
? nd_last_column(node
->nd_body
) : last_column
,
5524 CHECK(COMPILE_(body_seq
, "when", node
->nd_body
, popped
));
5525 ADD_INSNL(body_seq
, line
, jump
, endlabel
);
5527 vals
= node
->nd_head
;
5529 EXPECT_NODE_NONULL("NODE_WHEN", node
, NODE_LIST
, COMPILE_NG
);
5531 switch (nd_type(vals
)) {
5535 val
= vals
->nd_head
;
5536 lnext
= NEW_LABEL(nd_line(val
));
5537 debug_compile("== when2\n", (void)0);
5538 CHECK(compile_branch_condition(iseq
, ret
, val
, l1
, lnext
));
5539 ADD_LABEL(ret
, lnext
);
5540 vals
= vals
->nd_next
;
5546 ADD_INSN(ret
, nd_line(vals
), putnil
);
5547 CHECK(COMPILE(ret
, "when2/cond splat", vals
));
5548 ADD_INSN1(ret
, nd_line(vals
), checkmatch
, INT2FIX(VM_CHECKMATCH_TYPE_WHEN
| VM_CHECKMATCH_ARRAY
));
5549 ADD_INSNL(ret
, nd_line(vals
), branchif
, l1
);
5552 UNKNOWN_NODE("NODE_WHEN", vals
, COMPILE_NG
);
5554 node
= node
->nd_next
;
5557 ADD_TRACE_BRANCH_COVERAGE(
5559 node
? nd_first_lineno(node
) : nd_first_lineno(orig_node
),
5560 node
? nd_first_column(node
) : nd_first_column(orig_node
),
5561 node
? nd_last_lineno(node
) : nd_last_lineno(orig_node
),
5562 node
? nd_last_column(node
) : nd_last_column(orig_node
),
5565 CHECK(COMPILE_(ret
, "else", node
, popped
));
5566 ADD_INSNL(ret
, nd_line(orig_node
), jump
, endlabel
);
5568 ADD_SEQ(ret
, body_seq
);
5569 ADD_LABEL(ret
, endlabel
);
5573 static int iseq_compile_pattern_match(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, LABEL
*unmatched
, int in_alt_pattern
);
5576 iseq_compile_pattern_each(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, LABEL
*matched
, LABEL
*unmatched
, int in_alt_pattern
)
5578 const int line
= nd_line(node
);
5580 switch (nd_type(node
)) {
5583 * if pattern.use_rest_num?
5586 * if pattern.has_constant_node?
5587 * unless pattern.constant === obj
5591 * unless obj.respond_to?(:deconstruct)
5594 * d = obj.deconstruct
5595 * unless Array === d
5598 * min_argc = pattern.pre_args_num + pattern.post_args_num
5599 * if pattern.has_rest_arg?
5600 * unless d.length >= min_argc
5604 * unless d.length == min_argc
5608 * pattern.pre_args_num.each do |i|
5609 * unless pattern.pre_args[i].match?(d[i])
5613 * if pattern.use_rest_num?
5614 * rest_num = d.length - min_argc
5615 * if pattern.has_rest_arg? && pattern.has_rest_arg_id # not `*`, but `*rest`
5616 * unless pattern.rest_arg.match?(d[pattern.pre_args_num, rest_num])
5621 * pattern.post_args_num.each do |i|
5622 * j = pattern.pre_args_num + i
5624 * unless pattern.post_args[i].match?(d[j])
5631 * FrozenCore.raise TypeError
5635 struct rb_ary_pattern_info
*apinfo
= node
->nd_apinfo
;
5636 const NODE
*args
= apinfo
->pre_args
;
5637 const int pre_args_num
= apinfo
->pre_args
? rb_long2int(apinfo
->pre_args
->nd_alen
) : 0;
5638 const int post_args_num
= apinfo
->post_args
? rb_long2int(apinfo
->post_args
->nd_alen
) : 0;
5640 const int min_argc
= pre_args_num
+ post_args_num
;
5641 const int use_rest_num
= apinfo
->rest_arg
&& (NODE_NAMED_REST_P(apinfo
->rest_arg
) ||
5642 (!NODE_NAMED_REST_P(apinfo
->rest_arg
) && post_args_num
> 0));
5644 LABEL
*match_failed
, *type_error
;
5646 match_failed
= NEW_LABEL(line
);
5647 type_error
= NEW_LABEL(line
);
5650 ADD_INSN1(ret
, line
, putobject
, INT2FIX(0)); /* allocate stack for rest_num */
5651 ADD_INSN(ret
, line
, swap
);
5654 if (node
->nd_pconst
) {
5655 ADD_INSN(ret
, line
, dup
);
5656 CHECK(COMPILE(ret
, "constant", node
->nd_pconst
));
5657 ADD_INSN1(ret
, line
, checkmatch
, INT2FIX(VM_CHECKMATCH_TYPE_CASE
));
5658 ADD_INSNL(ret
, line
, branchunless
, match_failed
);
5661 ADD_INSN(ret
, line
, dup
);
5662 ADD_INSN1(ret
, line
, putobject
, ID2SYM(rb_intern("deconstruct")));
5663 ADD_SEND(ret
, line
, idRespond_to
, INT2FIX(1));
5664 ADD_INSNL(ret
, line
, branchunless
, match_failed
);
5666 ADD_SEND(ret
, line
, rb_intern("deconstruct"), INT2FIX(0));
5668 ADD_INSN(ret
, line
, dup
);
5669 ADD_INSN1(ret
, line
, checktype
, INT2FIX(T_ARRAY
));
5670 ADD_INSNL(ret
, line
, branchunless
, type_error
);
5672 ADD_INSN(ret
, line
, dup
);
5673 ADD_SEND(ret
, line
, idLength
, INT2FIX(0));
5674 ADD_INSN1(ret
, line
, putobject
, INT2FIX(min_argc
));
5675 ADD_SEND(ret
, line
, apinfo
->rest_arg
? idGE
: idEq
, INT2FIX(1));
5676 ADD_INSNL(ret
, line
, branchunless
, match_failed
);
5678 for (i
= 0; i
< pre_args_num
; i
++) {
5679 ADD_INSN(ret
, line
, dup
);
5680 ADD_INSN1(ret
, line
, putobject
, INT2FIX(i
));
5681 ADD_SEND(ret
, line
, idAREF
, INT2FIX(1));
5682 CHECK(iseq_compile_pattern_match(iseq
, ret
, args
->nd_head
, match_failed
, in_alt_pattern
));
5683 args
= args
->nd_next
;
5686 if (apinfo
->rest_arg
) {
5687 if (NODE_NAMED_REST_P(apinfo
->rest_arg
)) {
5688 ADD_INSN(ret
, line
, dup
);
5689 ADD_INSN1(ret
, line
, putobject
, INT2FIX(pre_args_num
));
5690 ADD_INSN1(ret
, line
, topn
, INT2FIX(1));
5691 ADD_SEND(ret
, line
, idLength
, INT2FIX(0));
5692 ADD_INSN1(ret
, line
, putobject
, INT2FIX(min_argc
));
5693 ADD_SEND(ret
, line
, idMINUS
, INT2FIX(1));
5694 ADD_INSN1(ret
, line
, setn
, INT2FIX(4));
5695 ADD_SEND(ret
, line
, idAREF
, INT2FIX(2));
5697 CHECK(iseq_compile_pattern_match(iseq
, ret
, apinfo
->rest_arg
, match_failed
, in_alt_pattern
));
5700 if (post_args_num
> 0) {
5701 ADD_INSN(ret
, line
, dup
);
5702 ADD_SEND(ret
, line
, idLength
, INT2FIX(0));
5703 ADD_INSN1(ret
, line
, putobject
, INT2FIX(min_argc
));
5704 ADD_SEND(ret
, line
, idMINUS
, INT2FIX(1));
5705 ADD_INSN1(ret
, line
, setn
, INT2FIX(2));
5706 ADD_INSN(ret
, line
, pop
);
5711 args
= apinfo
->post_args
;
5712 for (i
= 0; i
< post_args_num
; i
++) {
5713 ADD_INSN(ret
, line
, dup
);
5715 ADD_INSN1(ret
, line
, putobject
, INT2FIX(pre_args_num
+ i
));
5716 ADD_INSN1(ret
, line
, topn
, INT2FIX(3));
5717 ADD_SEND(ret
, line
, idPLUS
, INT2FIX(1));
5719 ADD_SEND(ret
, line
, idAREF
, INT2FIX(1));
5720 CHECK(iseq_compile_pattern_match(iseq
, ret
, args
->nd_head
, match_failed
, in_alt_pattern
));
5721 args
= args
->nd_next
;
5724 ADD_INSN(ret
, line
, pop
);
5726 ADD_INSN(ret
, line
, pop
);
5728 ADD_INSNL(ret
, line
, jump
, matched
);
5730 ADD_LABEL(ret
, type_error
);
5731 ADD_INSN1(ret
, line
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
5732 ADD_INSN1(ret
, line
, putobject
, rb_eTypeError
);
5733 ADD_INSN1(ret
, line
, putobject
, rb_fstring_lit("deconstruct must return Array"));
5734 ADD_SEND(ret
, line
, id_core_raise
, INT2FIX(2));
5736 ADD_LABEL(ret
, match_failed
);
5737 ADD_INSN(ret
, line
, pop
);
5739 ADD_INSN(ret
, line
, pop
);
5741 ADD_INSNL(ret
, line
, jump
, unmatched
);
5748 * if pattern.has_kw_args_node? && !pattern.has_kw_rest_arg_node?
5749 * keys = pattern.kw_args_node.keys
5751 * if pattern.has_constant_node?
5752 * unless pattern.constant === obj
5756 * unless obj.respond_to?(:deconstruct_keys)
5759 * d = obj.deconstruct_keys(keys)
5763 * if pattern.has_kw_rest_arg_node?
5766 * if pattern.has_kw_args_node?
5767 * pattern.kw_args_node.each |k,|
5772 * pattern.kw_args_node.each |k, pat|
5773 * if pattern.has_kw_rest_arg_node?
5774 * unless pat.match?(d.delete(k))
5778 * unless pat.match?(d[k])
5788 * if pattern.has_kw_rest_arg_node?
5789 * if pattern.no_rest_keyword?
5794 * unless pattern.kw_rest_arg_node.match?(d)
5802 * FrozenCore.raise TypeError
5806 LABEL
*match_failed
, *type_error
;
5809 match_failed
= NEW_LABEL(line
);
5810 type_error
= NEW_LABEL(line
);
5812 if (node
->nd_pkwargs
&& !node
->nd_pkwrestarg
) {
5813 const NODE
*kw_args
= node
->nd_pkwargs
->nd_head
;
5814 keys
= rb_ary_new_capa(kw_args
? kw_args
->nd_alen
/2 : 0);
5816 rb_ary_push(keys
, kw_args
->nd_head
->nd_lit
);
5817 kw_args
= kw_args
->nd_next
->nd_next
;
5821 if (node
->nd_pconst
) {
5822 ADD_INSN(ret
, line
, dup
);
5823 CHECK(COMPILE(ret
, "constant", node
->nd_pconst
));
5824 ADD_INSN1(ret
, line
, checkmatch
, INT2FIX(VM_CHECKMATCH_TYPE_CASE
));
5825 ADD_INSNL(ret
, line
, branchunless
, match_failed
);
5828 ADD_INSN(ret
, line
, dup
);
5829 ADD_INSN1(ret
, line
, putobject
, ID2SYM(rb_intern("deconstruct_keys")));
5830 ADD_SEND(ret
, line
, idRespond_to
, INT2FIX(1));
5831 ADD_INSNL(ret
, line
, branchunless
, match_failed
);
5834 ADD_INSN(ret
, line
, putnil
);
5837 ADD_INSN1(ret
, line
, duparray
, keys
);
5838 RB_OBJ_WRITTEN(iseq
, Qundef
, rb_obj_hide(keys
));
5840 ADD_SEND(ret
, line
, rb_intern("deconstruct_keys"), INT2FIX(1));
5842 ADD_INSN(ret
, line
, dup
);
5843 ADD_INSN1(ret
, line
, checktype
, INT2FIX(T_HASH
));
5844 ADD_INSNL(ret
, line
, branchunless
, type_error
);
5846 if (node
->nd_pkwrestarg
) {
5847 ADD_SEND(ret
, line
, rb_intern("dup"), INT2FIX(0));
5850 if (node
->nd_pkwargs
) {
5854 args
= node
->nd_pkwargs
->nd_head
;
5856 DECL_ANCHOR(match_values
);
5857 INIT_ANCHOR(match_values
);
5858 keys_num
= rb_long2int(args
->nd_alen
) / 2;
5859 for (i
= 0; i
< keys_num
; i
++) {
5860 NODE
*key_node
= args
->nd_head
;
5861 NODE
*value_node
= args
->nd_next
->nd_head
;
5864 if (nd_type(key_node
) != NODE_LIT
) {
5865 UNKNOWN_NODE("NODE_IN", key_node
, COMPILE_NG
);
5867 key
= key_node
->nd_lit
;
5869 ADD_INSN(ret
, line
, dup
);
5870 ADD_INSN1(ret
, line
, putobject
, key
);
5871 ADD_SEND(ret
, line
, rb_intern("key?"), INT2FIX(1));
5872 ADD_INSNL(ret
, line
, branchunless
, match_failed
);
5874 ADD_INSN(match_values
, line
, dup
);
5875 ADD_INSN1(match_values
, line
, putobject
, key
);
5876 ADD_SEND(match_values
, line
, node
->nd_pkwrestarg
? rb_intern("delete") : idAREF
, INT2FIX(1));
5877 CHECK(iseq_compile_pattern_match(iseq
, match_values
, value_node
, match_failed
, in_alt_pattern
));
5878 args
= args
->nd_next
->nd_next
;
5880 ADD_SEQ(ret
, match_values
);
5884 ADD_INSN(ret
, line
, dup
);
5885 ADD_SEND(ret
, line
, idEmptyP
, INT2FIX(0));
5886 ADD_INSNL(ret
, line
, branchunless
, match_failed
);
5889 if (node
->nd_pkwrestarg
) {
5890 if (node
->nd_pkwrestarg
== NODE_SPECIAL_NO_REST_KEYWORD
) {
5891 ADD_INSN(ret
, line
, dup
);
5892 ADD_SEND(ret
, line
, idEmptyP
, INT2FIX(0));
5893 ADD_INSNL(ret
, line
, branchunless
, match_failed
);
5896 ADD_INSN(ret
, line
, dup
);
5897 CHECK(iseq_compile_pattern_match(iseq
, ret
, node
->nd_pkwrestarg
, match_failed
, in_alt_pattern
));
5901 ADD_INSN(ret
, line
, pop
);
5902 ADD_INSNL(ret
, line
, jump
, matched
);
5904 ADD_LABEL(ret
, type_error
);
5905 ADD_INSN1(ret
, line
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
5906 ADD_INSN1(ret
, line
, putobject
, rb_eTypeError
);
5907 ADD_INSN1(ret
, line
, putobject
, rb_fstring_lit("deconstruct_keys must return Hash"));
5908 ADD_SEND(ret
, line
, id_core_raise
, INT2FIX(2));
5910 ADD_LABEL(ret
, match_failed
);
5911 ADD_INSN(ret
, line
, pop
);
5912 ADD_INSNL(ret
, line
, jump
, unmatched
);
5935 CHECK(COMPILE(ret
, "case in literal", node
));
5936 ADD_INSN1(ret
, line
, checkmatch
, INT2FIX(VM_CHECKMATCH_TYPE_CASE
));
5937 ADD_INSNL(ret
, line
, branchif
, matched
);
5938 ADD_INSNL(ret
, line
, jump
, unmatched
);
5941 struct rb_iseq_constant_body
*const body
= iseq
->body
;
5942 ID id
= node
->nd_vid
;
5943 int idx
= body
->local_iseq
->body
->local_table_size
- get_local_var_idx(iseq
, id
);
5945 if (in_alt_pattern
) {
5946 const char *name
= rb_id2name(id
);
5947 if (name
&& strlen(name
) > 0 && name
[0] != '_') {
5948 COMPILE_ERROR(ERROR_ARGS
"illegal variable in alternative pattern (%"PRIsVALUE
")",
5954 ADD_SETLOCAL(ret
, line
, idx
, get_lvar_level(iseq
));
5955 ADD_INSNL(ret
, line
, jump
, matched
);
5959 case NODE_DASGN_CURR
: {
5961 ID id
= node
->nd_vid
;
5963 idx
= get_dyna_var_idx(iseq
, id
, &lv
, &ls
);
5965 if (in_alt_pattern
) {
5966 const char *name
= rb_id2name(id
);
5967 if (name
&& strlen(name
) > 0 && name
[0] != '_') {
5968 COMPILE_ERROR(ERROR_ARGS
"illegal variable in alternative pattern (%"PRIsVALUE
")",
5975 COMPILE_ERROR(ERROR_ARGS
"NODE_DASGN(_CURR): unknown id (%"PRIsVALUE
")",
5979 ADD_SETLOCAL(ret
, line
, ls
- idx
, lv
);
5980 ADD_INSNL(ret
, line
, jump
, matched
);
5985 LABEL
*match_failed
;
5986 match_failed
= unmatched
;
5987 CHECK(iseq_compile_pattern_match(iseq
, ret
, node
->nd_body
, unmatched
, in_alt_pattern
));
5988 CHECK(COMPILE(ret
, "case in if", node
->nd_cond
));
5989 if (nd_type(node
) == NODE_IF
) {
5990 ADD_INSNL(ret
, line
, branchunless
, match_failed
);
5993 ADD_INSNL(ret
, line
, branchif
, match_failed
);
5995 ADD_INSNL(ret
, line
, jump
, matched
);
6000 LABEL
*match_failed
;
6001 match_failed
= NEW_LABEL(line
);
6004 if (! (nd_type(n
) == NODE_LIST
&& n
->nd_alen
== 2)) {
6005 COMPILE_ERROR(ERROR_ARGS
"unexpected node");
6009 ADD_INSN(ret
, line
, dup
);
6010 CHECK(iseq_compile_pattern_match(iseq
, ret
, n
->nd_head
, match_failed
, in_alt_pattern
));
6011 CHECK(iseq_compile_pattern_each(iseq
, ret
, n
->nd_next
->nd_head
, matched
, match_failed
, in_alt_pattern
));
6013 ADD_LABEL(ret
, match_failed
);
6014 ADD_INSN(ret
, line
, pop
);
6015 ADD_INSNL(ret
, line
, jump
, unmatched
);
6019 LABEL
*match_succeeded
, *fin
;
6020 match_succeeded
= NEW_LABEL(line
);
6021 fin
= NEW_LABEL(line
);
6023 ADD_INSN(ret
, line
, dup
);
6024 CHECK(iseq_compile_pattern_each(iseq
, ret
, node
->nd_1st
, match_succeeded
, fin
, TRUE
));
6025 ADD_LABEL(ret
, match_succeeded
);
6026 ADD_INSN(ret
, line
, pop
);
6027 ADD_INSNL(ret
, line
, jump
, matched
);
6028 ADD_LABEL(ret
, fin
);
6029 CHECK(iseq_compile_pattern_each(iseq
, ret
, node
->nd_2nd
, matched
, unmatched
, TRUE
));
6033 UNKNOWN_NODE("NODE_IN", node
, COMPILE_NG
);
6039 iseq_compile_pattern_match(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, LABEL
*unmatched
, int in_alt_pattern
)
6041 LABEL
*fin
= NEW_LABEL(nd_line(node
));
6042 CHECK(iseq_compile_pattern_each(iseq
, ret
, node
, fin
, unmatched
, in_alt_pattern
));
6043 ADD_LABEL(ret
, fin
);
6048 compile_case3(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const orig_node
, int popped
)
6050 const NODE
*pattern
;
6051 const NODE
*node
= orig_node
;
6052 LABEL
*endlabel
, *elselabel
;
6054 DECL_ANCHOR(body_seq
);
6055 DECL_ANCHOR(cond_seq
);
6056 int line
, lineno
, column
, last_lineno
, last_column
;
6057 enum node_type type
;
6061 INIT_ANCHOR(body_seq
);
6062 INIT_ANCHOR(cond_seq
);
6064 CHECK(COMPILE(head
, "case base", node
->nd_head
));
6066 DECL_BRANCH_BASE(branches
, nd_first_lineno(node
), nd_first_column(node
), nd_last_lineno(node
), nd_last_column(node
), "case");
6068 node
= node
->nd_body
;
6069 EXPECT_NODE("NODE_CASE3", node
, NODE_IN
, COMPILE_NG
);
6070 type
= nd_type(node
);
6071 line
= nd_line(node
);
6072 lineno
= nd_first_lineno(node
);
6073 column
= nd_first_column(node
);
6074 last_lineno
= nd_last_lineno(node
);
6075 last_column
= nd_last_column(node
);
6077 endlabel
= NEW_LABEL(line
);
6078 elselabel
= NEW_LABEL(line
);
6080 ADD_SEQ(ret
, head
); /* case VAL */
6082 while (type
== NODE_IN
) {
6085 l1
= NEW_LABEL(line
);
6086 ADD_LABEL(body_seq
, l1
);
6087 ADD_INSN(body_seq
, line
, pop
);
6088 ADD_TRACE_BRANCH_COVERAGE(
6090 node
->nd_body
? nd_first_lineno(node
->nd_body
) : lineno
,
6091 node
->nd_body
? nd_first_column(node
->nd_body
) : column
,
6092 node
->nd_body
? nd_last_lineno(node
->nd_body
) : last_lineno
,
6093 node
->nd_body
? nd_last_column(node
->nd_body
) : last_column
,
6096 CHECK(COMPILE_(body_seq
, "in body", node
->nd_body
, popped
));
6097 ADD_INSNL(body_seq
, line
, jump
, endlabel
);
6099 pattern
= node
->nd_head
;
6101 int pat_line
= nd_line(pattern
);
6102 LABEL
*next_pat
= NEW_LABEL(pat_line
);
6103 ADD_INSN (cond_seq
, pat_line
, dup
);
6104 CHECK(iseq_compile_pattern_each(iseq
, cond_seq
, pattern
, l1
, next_pat
, FALSE
));
6105 ADD_LABEL(cond_seq
, next_pat
);
6108 COMPILE_ERROR(ERROR_ARGS
"unexpected node");
6112 node
= node
->nd_next
;
6116 type
= nd_type(node
);
6117 line
= nd_line(node
);
6118 lineno
= nd_first_lineno(node
);
6119 column
= nd_first_column(node
);
6120 last_lineno
= nd_last_lineno(node
);
6121 last_column
= nd_last_column(node
);
6125 ADD_LABEL(cond_seq
, elselabel
);
6126 ADD_INSN(cond_seq
, line
, pop
);
6127 ADD_TRACE_BRANCH_COVERAGE(cond_seq
, nd_first_lineno(node
), nd_first_column(node
), nd_last_lineno(node
), nd_last_column(node
), "else", branches
);
6128 CHECK(COMPILE_(cond_seq
, "else", node
, popped
));
6129 ADD_INSNL(cond_seq
, line
, jump
, endlabel
);
6132 debugs("== else (implicit)\n");
6133 ADD_LABEL(cond_seq
, elselabel
);
6134 ADD_TRACE_BRANCH_COVERAGE(cond_seq
, nd_first_lineno(orig_node
), nd_first_column(orig_node
), nd_last_lineno(orig_node
), nd_last_column(orig_node
), "else", branches
);
6135 ADD_INSN1(cond_seq
, nd_line(orig_node
), putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
6136 ADD_INSN1(cond_seq
, nd_line(orig_node
), putobject
, rb_eNoMatchingPatternError
);
6137 ADD_INSN1(cond_seq
, nd_line(orig_node
), topn
, INT2FIX(2));
6138 ADD_SEND(cond_seq
, nd_line(orig_node
), id_core_raise
, INT2FIX(2));
6139 ADD_INSN(cond_seq
, nd_line(orig_node
), pop
);
6140 ADD_INSN(cond_seq
, nd_line(orig_node
), pop
);
6142 ADD_INSN(cond_seq
, nd_line(orig_node
), putnil
);
6144 ADD_INSNL(cond_seq
, nd_line(orig_node
), jump
, endlabel
);
6147 ADD_SEQ(ret
, cond_seq
);
6148 ADD_SEQ(ret
, body_seq
);
6149 ADD_LABEL(ret
, endlabel
);
6154 compile_loop(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
, const enum node_type type
)
6156 const int line
= (int)nd_line(node
);
6157 const int lineno
= nd_first_lineno(node
);
6158 const int column
= nd_first_column(node
);
6159 const int last_lineno
= nd_last_lineno(node
);
6160 const int last_column
= nd_last_column(node
);
6161 LABEL
*prev_start_label
= ISEQ_COMPILE_DATA(iseq
)->start_label
;
6162 LABEL
*prev_end_label
= ISEQ_COMPILE_DATA(iseq
)->end_label
;
6163 LABEL
*prev_redo_label
= ISEQ_COMPILE_DATA(iseq
)->redo_label
;
6164 int prev_loopval_popped
= ISEQ_COMPILE_DATA(iseq
)->loopval_popped
;
6165 VALUE branches
= Qfalse
;
6167 struct iseq_compile_data_ensure_node_stack enl
;
6169 LABEL
*next_label
= ISEQ_COMPILE_DATA(iseq
)->start_label
= NEW_LABEL(line
); /* next */
6170 LABEL
*redo_label
= ISEQ_COMPILE_DATA(iseq
)->redo_label
= NEW_LABEL(line
); /* redo */
6171 LABEL
*break_label
= ISEQ_COMPILE_DATA(iseq
)->end_label
= NEW_LABEL(line
); /* break */
6172 LABEL
*end_label
= NEW_LABEL(line
);
6173 LABEL
*adjust_label
= NEW_LABEL(line
);
6175 LABEL
*next_catch_label
= NEW_LABEL(line
);
6176 LABEL
*tmp_label
= NULL
;
6178 ISEQ_COMPILE_DATA(iseq
)->loopval_popped
= 0;
6179 push_ensure_entry(iseq
, &enl
, NULL
, NULL
);
6181 if (node
->nd_state
== 1) {
6182 ADD_INSNL(ret
, line
, jump
, next_label
);
6185 tmp_label
= NEW_LABEL(line
);
6186 ADD_INSNL(ret
, line
, jump
, tmp_label
);
6188 ADD_LABEL(ret
, adjust_label
);
6189 ADD_INSN(ret
, line
, putnil
);
6190 ADD_LABEL(ret
, next_catch_label
);
6191 ADD_INSN(ret
, line
, pop
);
6192 ADD_INSNL(ret
, line
, jump
, next_label
);
6193 if (tmp_label
) ADD_LABEL(ret
, tmp_label
);
6195 ADD_LABEL(ret
, redo_label
);
6196 DECL_BRANCH_BASE(branches
, lineno
, column
, last_lineno
, last_column
, type
== NODE_WHILE
? "while" : "until");
6197 ADD_TRACE_BRANCH_COVERAGE(
6199 node
->nd_body
? nd_first_lineno(node
->nd_body
) : lineno
,
6200 node
->nd_body
? nd_first_column(node
->nd_body
) : column
,
6201 node
->nd_body
? nd_last_lineno(node
->nd_body
) : last_lineno
,
6202 node
->nd_body
? nd_last_column(node
->nd_body
) : last_column
,
6205 CHECK(COMPILE_POPPED(ret
, "while body", node
->nd_body
));
6206 ADD_LABEL(ret
, next_label
); /* next */
6208 if (type
== NODE_WHILE
) {
6209 compile_branch_condition(iseq
, ret
, node
->nd_cond
,
6210 redo_label
, end_label
);
6214 compile_branch_condition(iseq
, ret
, node
->nd_cond
,
6215 end_label
, redo_label
);
6218 ADD_LABEL(ret
, end_label
);
6219 ADD_ADJUST_RESTORE(ret
, adjust_label
);
6221 if (node
->nd_state
== Qundef
) {
6222 /* ADD_INSN(ret, line, putundef); */
6223 COMPILE_ERROR(ERROR_ARGS
"unsupported: putundef");
6227 ADD_INSN(ret
, line
, putnil
);
6230 ADD_LABEL(ret
, break_label
); /* break */
6233 ADD_INSN(ret
, line
, pop
);
6236 ADD_CATCH_ENTRY(CATCH_TYPE_BREAK
, redo_label
, break_label
, NULL
,
6238 ADD_CATCH_ENTRY(CATCH_TYPE_NEXT
, redo_label
, break_label
, NULL
,
6240 ADD_CATCH_ENTRY(CATCH_TYPE_REDO
, redo_label
, break_label
, NULL
,
6241 ISEQ_COMPILE_DATA(iseq
)->redo_label
);
6243 ISEQ_COMPILE_DATA(iseq
)->start_label
= prev_start_label
;
6244 ISEQ_COMPILE_DATA(iseq
)->end_label
= prev_end_label
;
6245 ISEQ_COMPILE_DATA(iseq
)->redo_label
= prev_redo_label
;
6246 ISEQ_COMPILE_DATA(iseq
)->loopval_popped
= prev_loopval_popped
;
6247 ISEQ_COMPILE_DATA(iseq
)->ensure_node_stack
= ISEQ_COMPILE_DATA(iseq
)->ensure_node_stack
->prev
;
6252 compile_iter(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
6254 const int line
= nd_line(node
);
6255 const rb_iseq_t
*prevblock
= ISEQ_COMPILE_DATA(iseq
)->current_block
;
6256 LABEL
*retry_label
= NEW_LABEL(line
);
6257 LABEL
*retry_end_l
= NEW_LABEL(line
);
6258 const rb_iseq_t
*child_iseq
;
6260 ADD_LABEL(ret
, retry_label
);
6261 if (nd_type(node
) == NODE_FOR
) {
6262 CHECK(COMPILE(ret
, "iter caller (for)", node
->nd_iter
));
6264 ISEQ_COMPILE_DATA(iseq
)->current_block
= child_iseq
=
6265 NEW_CHILD_ISEQ(node
->nd_body
, make_name_for_block(iseq
),
6266 ISEQ_TYPE_BLOCK
, line
);
6267 ADD_SEND_WITH_BLOCK(ret
, line
, idEach
, INT2FIX(0), child_iseq
);
6270 ISEQ_COMPILE_DATA(iseq
)->current_block
= child_iseq
=
6271 NEW_CHILD_ISEQ(node
->nd_body
, make_name_for_block(iseq
),
6272 ISEQ_TYPE_BLOCK
, line
);
6273 CHECK(COMPILE(ret
, "iter caller", node
->nd_iter
));
6275 ADD_LABEL(ret
, retry_end_l
);
6278 ADD_INSN(ret
, line
, pop
);
6281 ISEQ_COMPILE_DATA(iseq
)->current_block
= prevblock
;
6283 ADD_CATCH_ENTRY(CATCH_TYPE_BREAK
, retry_label
, retry_end_l
, child_iseq
, retry_end_l
);
6288 compile_for_masgn(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
6290 /* massign to var in "for"
6291 * (args.length == 1 && Array.try_convert(args[0])) || args
6293 const int line
= nd_line(node
);
6294 const NODE
*var
= node
->nd_var
;
6295 LABEL
*not_single
= NEW_LABEL(nd_line(var
));
6296 LABEL
*not_ary
= NEW_LABEL(nd_line(var
));
6297 CHECK(COMPILE(ret
, "for var", var
));
6298 ADD_INSN(ret
, line
, dup
);
6299 ADD_CALL(ret
, line
, idLength
, INT2FIX(0));
6300 ADD_INSN1(ret
, line
, putobject
, INT2FIX(1));
6301 ADD_CALL(ret
, line
, idEq
, INT2FIX(1));
6302 ADD_INSNL(ret
, line
, branchunless
, not_single
);
6303 ADD_INSN(ret
, line
, dup
);
6304 ADD_INSN1(ret
, line
, putobject
, INT2FIX(0));
6305 ADD_CALL(ret
, line
, idAREF
, INT2FIX(1));
6306 ADD_INSN1(ret
, line
, putobject
, rb_cArray
);
6307 ADD_INSN(ret
, line
, swap
);
6308 ADD_CALL(ret
, line
, rb_intern("try_convert"), INT2FIX(1));
6309 ADD_INSN(ret
, line
, dup
);
6310 ADD_INSNL(ret
, line
, branchunless
, not_ary
);
6311 ADD_INSN(ret
, line
, swap
);
6312 ADD_LABEL(ret
, not_ary
);
6313 ADD_INSN(ret
, line
, pop
);
6314 ADD_LABEL(ret
, not_single
);
6319 compile_break(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
6321 const int line
= nd_line(node
);
6322 unsigned long throw_flag
= 0;
6324 if (ISEQ_COMPILE_DATA(iseq
)->redo_label
!= 0) {
6326 LABEL
*splabel
= NEW_LABEL(0);
6327 ADD_LABEL(ret
, splabel
);
6328 ADD_ADJUST(ret
, line
, ISEQ_COMPILE_DATA(iseq
)->redo_label
);
6329 CHECK(COMPILE_(ret
, "break val (while/until)", node
->nd_stts
,
6330 ISEQ_COMPILE_DATA(iseq
)->loopval_popped
));
6331 add_ensure_iseq(ret
, iseq
, 0);
6332 ADD_INSNL(ret
, line
, jump
, ISEQ_COMPILE_DATA(iseq
)->end_label
);
6333 ADD_ADJUST_RESTORE(ret
, splabel
);
6336 ADD_INSN(ret
, line
, putnil
);
6339 else if (iseq
->body
->type
== ISEQ_TYPE_BLOCK
) {
6341 /* escape from block */
6342 CHECK(COMPILE(ret
, "break val (block)", node
->nd_stts
));
6343 ADD_INSN1(ret
, line
, throw, INT2FIX(throw_flag
| TAG_BREAK
));
6345 ADD_INSN(ret
, line
, pop
);
6348 else if (iseq
->body
->type
== ISEQ_TYPE_EVAL
) {
6350 COMPILE_ERROR(ERROR_ARGS
"Can't escape from eval with break");
6354 const rb_iseq_t
*ip
= iseq
->body
->parent_iseq
;
6357 if (!ISEQ_COMPILE_DATA(ip
)) {
6362 if (ISEQ_COMPILE_DATA(ip
)->redo_label
!= 0) {
6363 throw_flag
= VM_THROW_NO_ESCAPE_FLAG
;
6366 else if (ip
->body
->type
== ISEQ_TYPE_BLOCK
) {
6369 else if (ip
->body
->type
== ISEQ_TYPE_EVAL
) {
6373 ip
= ip
->body
->parent_iseq
;
6375 COMPILE_ERROR(ERROR_ARGS
"Invalid break");
6382 compile_next(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
6384 const int line
= nd_line(node
);
6385 unsigned long throw_flag
= 0;
6387 if (ISEQ_COMPILE_DATA(iseq
)->redo_label
!= 0) {
6388 LABEL
*splabel
= NEW_LABEL(0);
6389 debugs("next in while loop\n");
6390 ADD_LABEL(ret
, splabel
);
6391 CHECK(COMPILE(ret
, "next val/valid syntax?", node
->nd_stts
));
6392 add_ensure_iseq(ret
, iseq
, 0);
6393 ADD_ADJUST(ret
, line
, ISEQ_COMPILE_DATA(iseq
)->redo_label
);
6394 ADD_INSNL(ret
, line
, jump
, ISEQ_COMPILE_DATA(iseq
)->start_label
);
6395 ADD_ADJUST_RESTORE(ret
, splabel
);
6397 ADD_INSN(ret
, line
, putnil
);
6400 else if (ISEQ_COMPILE_DATA(iseq
)->end_label
) {
6401 LABEL
*splabel
= NEW_LABEL(0);
6402 debugs("next in block\n");
6403 ADD_LABEL(ret
, splabel
);
6404 ADD_ADJUST(ret
, line
, ISEQ_COMPILE_DATA(iseq
)->start_label
);
6405 CHECK(COMPILE(ret
, "next val", node
->nd_stts
));
6406 add_ensure_iseq(ret
, iseq
, 0);
6407 ADD_INSNL(ret
, line
, jump
, ISEQ_COMPILE_DATA(iseq
)->end_label
);
6408 ADD_ADJUST_RESTORE(ret
, splabel
);
6409 splabel
->unremovable
= FALSE
;
6412 ADD_INSN(ret
, line
, putnil
);
6415 else if (iseq
->body
->type
== ISEQ_TYPE_EVAL
) {
6417 COMPILE_ERROR(ERROR_ARGS
"Can't escape from eval with next");
6421 const rb_iseq_t
*ip
= iseq
;
6424 if (!ISEQ_COMPILE_DATA(ip
)) {
6429 throw_flag
= VM_THROW_NO_ESCAPE_FLAG
;
6430 if (ISEQ_COMPILE_DATA(ip
)->redo_label
!= 0) {
6434 else if (ip
->body
->type
== ISEQ_TYPE_BLOCK
) {
6437 else if (ip
->body
->type
== ISEQ_TYPE_EVAL
) {
6441 ip
= ip
->body
->parent_iseq
;
6444 CHECK(COMPILE(ret
, "next val", node
->nd_stts
));
6445 ADD_INSN1(ret
, line
, throw, INT2FIX(throw_flag
| TAG_NEXT
));
6448 ADD_INSN(ret
, line
, pop
);
6452 COMPILE_ERROR(ERROR_ARGS
"Invalid next");
6460 compile_redo(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
6462 const int line
= nd_line(node
);
6464 if (ISEQ_COMPILE_DATA(iseq
)->redo_label
) {
6465 LABEL
*splabel
= NEW_LABEL(0);
6466 debugs("redo in while");
6467 ADD_LABEL(ret
, splabel
);
6468 ADD_ADJUST(ret
, line
, ISEQ_COMPILE_DATA(iseq
)->redo_label
);
6469 add_ensure_iseq(ret
, iseq
, 0);
6470 ADD_INSNL(ret
, line
, jump
, ISEQ_COMPILE_DATA(iseq
)->redo_label
);
6471 ADD_ADJUST_RESTORE(ret
, splabel
);
6473 ADD_INSN(ret
, line
, putnil
);
6476 else if (iseq
->body
->type
== ISEQ_TYPE_EVAL
) {
6478 COMPILE_ERROR(ERROR_ARGS
"Can't escape from eval with redo");
6481 else if (ISEQ_COMPILE_DATA(iseq
)->start_label
) {
6482 LABEL
*splabel
= NEW_LABEL(0);
6484 debugs("redo in block");
6485 ADD_LABEL(ret
, splabel
);
6486 add_ensure_iseq(ret
, iseq
, 0);
6487 ADD_ADJUST(ret
, line
, ISEQ_COMPILE_DATA(iseq
)->start_label
);
6488 ADD_INSNL(ret
, line
, jump
, ISEQ_COMPILE_DATA(iseq
)->start_label
);
6489 ADD_ADJUST_RESTORE(ret
, splabel
);
6492 ADD_INSN(ret
, line
, putnil
);
6496 const rb_iseq_t
*ip
= iseq
;
6499 if (!ISEQ_COMPILE_DATA(ip
)) {
6504 if (ISEQ_COMPILE_DATA(ip
)->redo_label
!= 0) {
6507 else if (ip
->body
->type
== ISEQ_TYPE_BLOCK
) {
6510 else if (ip
->body
->type
== ISEQ_TYPE_EVAL
) {
6514 ip
= ip
->body
->parent_iseq
;
6517 ADD_INSN(ret
, line
, putnil
);
6518 ADD_INSN1(ret
, line
, throw, INT2FIX(VM_THROW_NO_ESCAPE_FLAG
| TAG_REDO
));
6521 ADD_INSN(ret
, line
, pop
);
6525 COMPILE_ERROR(ERROR_ARGS
"Invalid redo");
6533 compile_retry(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
6535 const int line
= nd_line(node
);
6537 if (iseq
->body
->type
== ISEQ_TYPE_RESCUE
) {
6538 ADD_INSN(ret
, line
, putnil
);
6539 ADD_INSN1(ret
, line
, throw, INT2FIX(TAG_RETRY
));
6542 ADD_INSN(ret
, line
, pop
);
6546 COMPILE_ERROR(ERROR_ARGS
"Invalid retry");
6553 compile_rescue(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
6555 const int line
= nd_line(node
);
6556 LABEL
*lstart
= NEW_LABEL(line
);
6557 LABEL
*lend
= NEW_LABEL(line
);
6558 LABEL
*lcont
= NEW_LABEL(line
);
6559 const rb_iseq_t
*rescue
= NEW_CHILD_ISEQ(node
->nd_resq
,
6560 rb_str_concat(rb_str_new2("rescue in "), iseq
->body
->location
.label
),
6561 ISEQ_TYPE_RESCUE
, line
);
6563 lstart
->rescued
= LABEL_RESCUE_BEG
;
6564 lend
->rescued
= LABEL_RESCUE_END
;
6565 ADD_LABEL(ret
, lstart
);
6566 CHECK(COMPILE(ret
, "rescue head", node
->nd_head
));
6567 ADD_LABEL(ret
, lend
);
6568 if (node
->nd_else
) {
6569 ADD_INSN(ret
, line
, pop
);
6570 CHECK(COMPILE(ret
, "rescue else", node
->nd_else
));
6572 ADD_INSN(ret
, line
, nop
);
6573 ADD_LABEL(ret
, lcont
);
6576 ADD_INSN(ret
, line
, pop
);
6579 /* register catch entry */
6580 ADD_CATCH_ENTRY(CATCH_TYPE_RESCUE
, lstart
, lend
, rescue
, lcont
);
6581 ADD_CATCH_ENTRY(CATCH_TYPE_RETRY
, lend
, lcont
, NULL
, lstart
);
6586 compile_resbody(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
6588 const int line
= nd_line(node
);
6589 const NODE
*resq
= node
;
6591 LABEL
*label_miss
, *label_hit
;
6594 label_miss
= NEW_LABEL(line
);
6595 label_hit
= NEW_LABEL(line
);
6597 narg
= resq
->nd_args
;
6599 switch (nd_type(narg
)) {
6602 ADD_GETLOCAL(ret
, line
, LVAR_ERRINFO
, 0);
6603 CHECK(COMPILE(ret
, "rescue arg", narg
->nd_head
));
6604 ADD_INSN1(ret
, line
, checkmatch
, INT2FIX(VM_CHECKMATCH_TYPE_RESCUE
));
6605 ADD_INSNL(ret
, line
, branchif
, label_hit
);
6606 narg
= narg
->nd_next
;
6612 ADD_GETLOCAL(ret
, line
, LVAR_ERRINFO
, 0);
6613 CHECK(COMPILE(ret
, "rescue/cond splat", narg
));
6614 ADD_INSN1(ret
, line
, checkmatch
, INT2FIX(VM_CHECKMATCH_TYPE_RESCUE
| VM_CHECKMATCH_ARRAY
));
6615 ADD_INSNL(ret
, line
, branchif
, label_hit
);
6618 UNKNOWN_NODE("NODE_RESBODY", narg
, COMPILE_NG
);
6622 ADD_GETLOCAL(ret
, line
, LVAR_ERRINFO
, 0);
6623 ADD_INSN1(ret
, line
, putobject
, rb_eStandardError
);
6624 ADD_INSN1(ret
, line
, checkmatch
, INT2FIX(VM_CHECKMATCH_TYPE_RESCUE
));
6625 ADD_INSNL(ret
, line
, branchif
, label_hit
);
6627 ADD_INSNL(ret
, line
, jump
, label_miss
);
6628 ADD_LABEL(ret
, label_hit
);
6629 CHECK(COMPILE(ret
, "resbody body", resq
->nd_body
));
6630 if (ISEQ_COMPILE_DATA(iseq
)->option
->tailcall_optimization
) {
6631 ADD_INSN(ret
, line
, nop
);
6633 ADD_INSN(ret
, line
, leave
);
6634 ADD_LABEL(ret
, label_miss
);
6635 resq
= resq
->nd_head
;
6641 compile_ensure(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
6643 const int line
= nd_line(node
);
6645 const rb_iseq_t
*ensure
= NEW_CHILD_ISEQ(node
->nd_ensr
,
6646 rb_str_concat(rb_str_new2 ("ensure in "), iseq
->body
->location
.label
),
6647 ISEQ_TYPE_ENSURE
, line
);
6648 LABEL
*lstart
= NEW_LABEL(line
);
6649 LABEL
*lend
= NEW_LABEL(line
);
6650 LABEL
*lcont
= NEW_LABEL(line
);
6653 struct ensure_range er
;
6654 struct iseq_compile_data_ensure_node_stack enl
;
6655 struct ensure_range
*erange
;
6658 CHECK(COMPILE_POPPED(ensr
, "ensure ensr", node
->nd_ensr
));
6660 last_leave
= last
&& IS_INSN(last
) && IS_INSN_ID(last
, leave
);
6665 push_ensure_entry(iseq
, &enl
, &er
, node
->nd_ensr
);
6667 ADD_LABEL(ret
, lstart
);
6668 CHECK(COMPILE_(ret
, "ensure head", node
->nd_head
, (popped
| last_leave
)));
6669 ADD_LABEL(ret
, lend
);
6671 if (!popped
&& last_leave
) ADD_INSN(ret
, line
, putnil
);
6672 ADD_LABEL(ret
, lcont
);
6673 if (last_leave
) ADD_INSN(ret
, line
, pop
);
6675 erange
= ISEQ_COMPILE_DATA(iseq
)->ensure_node_stack
->erange
;
6676 if (lstart
->link
.next
!= &lend
->link
) {
6678 ADD_CATCH_ENTRY(CATCH_TYPE_ENSURE
, erange
->begin
, erange
->end
,
6680 erange
= erange
->next
;
6684 ISEQ_COMPILE_DATA(iseq
)->ensure_node_stack
= enl
.prev
;
6689 compile_return(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
6691 const int line
= nd_line(node
);
6694 enum iseq_type type
= iseq
->body
->type
;
6695 const rb_iseq_t
*is
= iseq
;
6696 enum iseq_type t
= type
;
6697 const NODE
*retval
= node
->nd_stts
;
6700 while (t
== ISEQ_TYPE_RESCUE
|| t
== ISEQ_TYPE_ENSURE
) {
6701 if (!(is
= is
->body
->parent_iseq
)) break;
6706 case ISEQ_TYPE_MAIN
:
6708 rb_warn("argument of top-level return is ignored");
6711 /* plain top-level, leave directly */
6712 type
= ISEQ_TYPE_METHOD
;
6719 if (type
== ISEQ_TYPE_METHOD
) {
6720 splabel
= NEW_LABEL(0);
6721 ADD_LABEL(ret
, splabel
);
6722 ADD_ADJUST(ret
, line
, 0);
6725 CHECK(COMPILE(ret
, "return nd_stts (return val)", retval
));
6727 if (type
== ISEQ_TYPE_METHOD
) {
6728 add_ensure_iseq(ret
, iseq
, 1);
6729 ADD_TRACE(ret
, RUBY_EVENT_RETURN
);
6730 ADD_INSN(ret
, line
, leave
);
6731 ADD_ADJUST_RESTORE(ret
, splabel
);
6734 ADD_INSN(ret
, line
, putnil
);
6738 ADD_INSN1(ret
, line
, throw, INT2FIX(TAG_RETURN
));
6740 ADD_INSN(ret
, line
, pop
);
6748 compile_evstr(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
6750 CHECK(COMPILE_(ret
, "nd_body", node
, popped
));
6752 if (!popped
&& !all_string_result_p(node
)) {
6753 const int line
= nd_line(node
);
6754 const unsigned int flag
= VM_CALL_FCALL
;
6755 LABEL
*isstr
= NEW_LABEL(line
);
6756 ADD_INSN(ret
, line
, dup
);
6757 ADD_INSN1(ret
, line
, checktype
, INT2FIX(T_STRING
));
6758 ADD_INSNL(ret
, line
, branchif
, isstr
);
6759 ADD_INSN(ret
, line
, dup
);
6760 ADD_SEND_R(ret
, line
, idTo_s
, INT2FIX(0), NULL
, INT2FIX(flag
), NULL
);
6761 ADD_INSN(ret
, line
, tostring
);
6762 ADD_LABEL(ret
, isstr
);
6768 qcall_branch_start(rb_iseq_t
*iseq
, LINK_ANCHOR
*const recv
, VALUE
*branches
, const NODE
*node
, int line
)
6770 LABEL
*else_label
= NEW_LABEL(line
);
6771 const int first_lineno
= nd_first_lineno(node
), first_column
= nd_first_column(node
);
6772 const int last_lineno
= nd_last_lineno(node
), last_column
= nd_last_column(node
);
6775 DECL_BRANCH_BASE(br
, first_lineno
, first_column
, last_lineno
, last_column
, "&.");
6777 ADD_INSN(recv
, line
, dup
);
6778 ADD_INSNL(recv
, line
, branchnil
, else_label
);
6779 ADD_TRACE_BRANCH_COVERAGE(recv
, first_lineno
, first_column
, last_lineno
, last_column
, "then", br
);
6784 qcall_branch_end(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, LABEL
*else_label
, VALUE branches
, const NODE
*node
, int line
)
6787 if (!else_label
) return;
6788 end_label
= NEW_LABEL(line
);
6789 ADD_INSNL(ret
, line
, jump
, end_label
);
6790 ADD_LABEL(ret
, else_label
);
6791 ADD_TRACE_BRANCH_COVERAGE(ret
, nd_first_lineno(node
), nd_first_column(node
), nd_last_lineno(node
), nd_last_column(node
),
6793 ADD_LABEL(ret
, end_label
);
6797 compile_call_precheck_freeze(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int line
, int popped
)
6799 /* optimization shortcut
6800 * "literal".freeze -> opt_str_freeze("literal")
6802 if (node
->nd_recv
&& nd_type(node
->nd_recv
) == NODE_STR
&&
6803 (node
->nd_mid
== idFreeze
|| node
->nd_mid
== idUMinus
) &&
6804 node
->nd_args
== NULL
&&
6805 ISEQ_COMPILE_DATA(iseq
)->current_block
== NULL
&&
6806 ISEQ_COMPILE_DATA(iseq
)->option
->specialized_instruction
) {
6807 VALUE str
= rb_fstring(node
->nd_recv
->nd_lit
);
6808 if (node
->nd_mid
== idUMinus
) {
6809 ADD_INSN2(ret
, line
, opt_str_uminus
, str
,
6810 new_callinfo(iseq
, idUMinus
, 0, 0, NULL
, FALSE
));
6813 ADD_INSN2(ret
, line
, opt_str_freeze
, str
,
6814 new_callinfo(iseq
, idFreeze
, 0, 0, NULL
, FALSE
));
6816 RB_OBJ_WRITTEN(iseq
, Qundef
, str
);
6818 ADD_INSN(ret
, line
, pop
);
6822 /* optimization shortcut
6823 * obj["literal"] -> opt_aref_with(obj, "literal")
6825 if (node
->nd_mid
== idAREF
&& !private_recv_p(node
) && node
->nd_args
&&
6826 nd_type(node
->nd_args
) == NODE_LIST
&& node
->nd_args
->nd_alen
== 1 &&
6827 nd_type(node
->nd_args
->nd_head
) == NODE_STR
&&
6828 ISEQ_COMPILE_DATA(iseq
)->current_block
== NULL
&&
6829 !ISEQ_COMPILE_DATA(iseq
)->option
->frozen_string_literal
&&
6830 ISEQ_COMPILE_DATA(iseq
)->option
->specialized_instruction
) {
6831 VALUE str
= rb_fstring(node
->nd_args
->nd_head
->nd_lit
);
6832 CHECK(COMPILE(ret
, "recv", node
->nd_recv
));
6833 ADD_INSN2(ret
, line
, opt_aref_with
, str
,
6834 new_callinfo(iseq
, idAREF
, 1, 0, NULL
, FALSE
));
6835 RB_OBJ_WRITTEN(iseq
, Qundef
, str
);
6837 ADD_INSN(ret
, line
, pop
);
6845 iseq_has_builtin_function_table(const rb_iseq_t
*iseq
)
6847 return ISEQ_COMPILE_DATA(iseq
)->builtin_function_table
!= NULL
;
6850 static const struct rb_builtin_function
*
6851 iseq_builtin_function_lookup(const rb_iseq_t
*iseq
, const char *name
)
6854 const struct rb_builtin_function
*table
= ISEQ_COMPILE_DATA(iseq
)->builtin_function_table
;
6855 for (i
=0; table
[i
].index
!= -1; i
++) {
6856 if (strcmp(table
[i
].name
, name
) == 0) {
6864 iseq_builtin_function_name(ID mid
)
6866 const char *name
= rb_id2name(mid
);
6867 static const char prefix
[] = "__builtin_";
6868 const size_t prefix_len
= sizeof(prefix
) - 1;
6870 if (UNLIKELY(strncmp(prefix
, name
, prefix_len
) == 0)) {
6871 return &name
[prefix_len
];
6879 delegate_call_p(const rb_iseq_t
*iseq
, unsigned int argc
, const LINK_ANCHOR
*args
, unsigned int *pstart_index
)
6886 else if (argc
<= iseq
->body
->local_table_size
) {
6887 unsigned int start
=0;
6889 // local_table: [p1, p2, p3, l1, l2, l3]
6890 // arguments: [p3, l1, l2] -> 2
6892 argc
+ start
<= iseq
->body
->local_table_size
;
6894 const LINK_ELEMENT
*elem
= FIRST_ELEMENT(args
);
6896 for (unsigned int i
=start
; i
-start
<argc
; i
++) {
6897 if (elem
->type
== ISEQ_ELEMENT_INSN
&&
6898 INSN_OF(elem
) == BIN(getlocal
)) {
6899 int local_index
= FIX2INT(OPERAND_AT(elem
, 0));
6900 int local_level
= FIX2INT(OPERAND_AT(elem
, 1));
6902 if (local_level
== 0) {
6903 unsigned int index
= iseq
->body
->local_table_size
- (local_index
- VM_ENV_DATA_SIZE
+ 1);
6904 if (0) { // for debug
6905 fprintf(stderr
, "lvar:%s (%d), id:%s (%d) local_index:%d, local_size:%d\n",
6906 rb_id2name(iseq
->body
->local_table
[i
]), i
,
6907 rb_id2name(iseq
->body
->local_table
[index
]), index
,
6908 local_index
, (int)iseq
->body
->local_table_size
);
6919 goto fail
; // level != 0 is unsupported
6923 goto fail
; // insn is not a getlocal
6932 *pstart_index
= start
;
6941 compile_call(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int type
, int line
, int popped
)
6943 /* call: obj.method(...)
6949 ID mid
= node
->nd_mid
;
6951 unsigned int flag
= 0;
6952 struct rb_callinfo_kwarg
*keywords
= NULL
;
6953 const rb_iseq_t
*parent_block
= ISEQ_COMPILE_DATA(iseq
)->current_block
;
6954 LABEL
*else_label
= NULL
;
6955 VALUE branches
= Qfalse
;
6957 ISEQ_COMPILE_DATA(iseq
)->current_block
= NULL
;
6961 #if OPT_SUPPORT_JOKE
6962 if (nd_type(node
) == NODE_VCALL
) {
6966 CONST_ID(id_bitblt
, "bitblt");
6967 CONST_ID(id_answer
, "the_answer_to_life_the_universe_and_everything");
6969 if (mid
== id_bitblt
) {
6970 ADD_INSN(ret
, line
, bitblt
);
6973 else if (mid
== id_answer
) {
6974 ADD_INSN(ret
, line
, answer
);
6983 CONST_ID(goto_id
, "__goto__");
6984 CONST_ID(label_id
, "__label__");
6986 if (nd_type(node
) == NODE_FCALL
&&
6987 (mid
== goto_id
|| mid
== label_id
)) {
6990 st_table
*labels_table
= ISEQ_COMPILE_DATA(iseq
)->labels_table
;
6993 if (!labels_table
) {
6994 labels_table
= st_init_numtable();
6995 ISEQ_COMPILE_DATA(iseq
)->labels_table
= labels_table
;
6997 if (nd_type(node
->nd_args
->nd_head
) == NODE_LIT
&&
6998 SYMBOL_P(node
->nd_args
->nd_head
->nd_lit
)) {
7000 label_name
= node
->nd_args
->nd_head
->nd_lit
;
7001 if (!st_lookup(labels_table
, (st_data_t
)label_name
, &data
)) {
7002 label
= NEW_LABEL(line
);
7003 label
->position
= line
;
7004 st_insert(labels_table
, (st_data_t
)label_name
, (st_data_t
)label
);
7007 label
= (LABEL
*)data
;
7011 COMPILE_ERROR(ERROR_ARGS
"invalid goto/label format");
7015 if (mid
== goto_id
) {
7016 ADD_INSNL(ret
, line
, jump
, label
);
7019 ADD_LABEL(ret
, label
);
7025 const char *builtin_func
;
7026 NODE
*args_node
= node
->nd_args
;
7028 if (UNLIKELY(iseq_has_builtin_function_table(iseq
)) &&
7029 (builtin_func
= iseq_builtin_function_name(mid
)) != NULL
) {
7031 if (parent_block
!= NULL
) {
7032 COMPILE_ERROR(iseq
, line
, "should not call builtins here.");
7036 char inline_func
[0x20];
7037 bool cconst
= false;
7039 const struct rb_builtin_function
*bf
= iseq_builtin_function_lookup(iseq
, builtin_func
);
7042 if (strcmp("cstmt!", builtin_func
) == 0 ||
7043 strcmp("cexpr!", builtin_func
) == 0) {
7045 int inline_index
= GET_VM()->builtin_inline_index
++;
7046 snprintf(inline_func
, 0x20, "_bi%d", inline_index
);
7047 builtin_func
= inline_func
;
7051 else if (strcmp("cconst!", builtin_func
) == 0) {
7055 else if (strcmp("cinit!", builtin_func
) == 0) {
7057 GET_VM()->builtin_inline_index
++;
7062 rb_bug("can't find builtin function:%s", builtin_func
);
7065 COMPILE_ERROR(ERROR_ARGS
"can't find builtin function:%s", builtin_func
);
7071 typedef VALUE(*builtin_func0
)(void *, VALUE
);
7072 VALUE const_val
= (*(builtin_func0
)bf
->func_ptr
)(NULL
, Qnil
);
7073 ADD_INSN1(ret
, line
, putobject
, const_val
);
7077 // fprintf(stderr, "func_name:%s -> %p\n", builtin_func, bf->func_ptr);
7079 argc
= setup_args(iseq
, args
, args_node
, &flag
, &keywords
);
7081 if (FIX2INT(argc
) != bf
->argc
) {
7082 COMPILE_ERROR(ERROR_ARGS
"argc is not match for builtin function:%s (expect %d but %d)",
7083 builtin_func
, bf
->argc
, FIX2INT(argc
));
7087 unsigned int start_index
;
7088 if (delegate_call_p(iseq
, FIX2INT(argc
), args
, &start_index
)) {
7089 ADD_INSN2(ret
, line
, opt_invokebuiltin_delegate
, bf
, INT2FIX(start_index
));
7093 ADD_INSN1(ret
,line
, invokebuiltin
, bf
);
7096 if (popped
) ADD_INSN(ret
, line
, pop
);
7103 if (type
== NODE_CALL
|| type
== NODE_OPCALL
|| type
== NODE_QCALL
) {
7106 if (mid
== idCall
&&
7107 nd_type(node
->nd_recv
) == NODE_LVAR
&&
7108 iseq_block_param_id_p(iseq
, node
->nd_recv
->nd_vid
, &idx
, &level
)) {
7109 ADD_INSN2(recv
, nd_line(node
->nd_recv
), getblockparamproxy
, INT2FIX(idx
+ VM_ENV_DATA_SIZE
- 1), INT2FIX(level
));
7111 else if (private_recv_p(node
)) {
7112 ADD_INSN(recv
, nd_line(node
), putself
);
7113 flag
|= VM_CALL_FCALL
;
7116 CHECK(COMPILE(recv
, "recv", node
->nd_recv
));
7119 if (type
== NODE_QCALL
) {
7120 else_label
= qcall_branch_start(iseq
, recv
, &branches
, node
, line
);
7123 else if (type
== NODE_FCALL
|| type
== NODE_VCALL
) {
7124 ADD_CALL_RECEIVER(recv
, line
);
7128 if (type
!= NODE_VCALL
) {
7129 argc
= setup_args(iseq
, args
, node
->nd_args
, &flag
, &keywords
);
7130 CHECK(!NIL_P(argc
));
7139 debugp_param("call args argc", argc
);
7140 debugp_param("call method", ID2SYM(mid
));
7142 switch ((int)type
) {
7144 flag
|= VM_CALL_VCALL
;
7145 /* VCALL is funcall, so fall through */
7147 flag
|= VM_CALL_FCALL
;
7150 ADD_SEND_R(ret
, line
, mid
, argc
, parent_block
, INT2FIX(flag
), keywords
);
7152 qcall_branch_end(iseq
, ret
, else_label
, branches
, node
, line
);
7154 ADD_INSN(ret
, line
, pop
);
7160 static int iseq_compile_each0(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*node
, int popped
);
7164 self: InstructionSequence
7165 node: Ruby compiled node
7166 popped: This node will be popped
7169 iseq_compile_each(rb_iseq_t
*iseq
, LINK_ANCHOR
*ret
, const NODE
*node
, int popped
)
7173 int lineno
= ISEQ_COMPILE_DATA(iseq
)->last_line
;
7174 if (lineno
== 0) lineno
= FIX2INT(rb_iseq_first_lineno(iseq
));
7175 debugs("node: NODE_NIL(implicit)\n");
7176 ADD_INSN(ret
, lineno
, putnil
);
7180 return iseq_compile_each0(iseq
, ret
, node
, popped
);
7184 check_yield_place(const rb_iseq_t
*iseq
)
7186 switch (iseq
->body
->local_iseq
->body
->type
) {
7188 case ISEQ_TYPE_MAIN
:
7189 case ISEQ_TYPE_CLASS
:
7197 iseq_compile_each0(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*node
, int popped
)
7199 const int line
= (int)nd_line(node
);
7200 const enum node_type type
= nd_type(node
);
7201 struct rb_iseq_constant_body
*const body
= iseq
->body
;
7203 if (ISEQ_COMPILE_DATA(iseq
)->last_line
== line
) {
7207 if (node
->flags
& NODE_FL_NEWLINE
) {
7208 int event
= RUBY_EVENT_LINE
;
7209 ISEQ_COMPILE_DATA(iseq
)->last_line
= line
;
7210 if (ISEQ_COVERAGE(iseq
) && ISEQ_LINE_COVERAGE(iseq
)) {
7211 event
|= RUBY_EVENT_COVERAGE_LINE
;
7213 ADD_TRACE(ret
, event
);
7217 debug_node_start(node
);
7218 #undef BEFORE_RETURN
7219 #define BEFORE_RETURN debug_node_end()
7223 while (node
&& nd_type(node
) == NODE_BLOCK
) {
7224 CHECK(COMPILE_(ret
, "BLOCK body", node
->nd_head
,
7225 (node
->nd_next
? 1 : popped
)));
7226 node
= node
->nd_next
;
7229 CHECK(COMPILE_(ret
, "BLOCK next", node
->nd_next
, popped
));
7235 CHECK(compile_if(iseq
, ret
, node
, popped
, type
));
7238 CHECK(compile_case(iseq
, ret
, node
, popped
));
7241 CHECK(compile_case2(iseq
, ret
, node
, popped
));
7244 CHECK(compile_case3(iseq
, ret
, node
, popped
));
7248 CHECK(compile_loop(iseq
, ret
, node
, popped
, type
));
7252 CHECK(compile_iter(iseq
, ret
, node
, popped
));
7254 case NODE_FOR_MASGN
:
7255 CHECK(compile_for_masgn(iseq
, ret
, node
, popped
));
7258 CHECK(compile_break(iseq
, ret
, node
, popped
));
7261 CHECK(compile_next(iseq
, ret
, node
, popped
));
7264 CHECK(compile_redo(iseq
, ret
, node
, popped
));
7267 CHECK(compile_retry(iseq
, ret
, node
, popped
));
7270 CHECK(COMPILE_(ret
, "NODE_BEGIN", node
->nd_body
, popped
));
7274 CHECK(compile_rescue(iseq
, ret
, node
, popped
));
7277 CHECK(compile_resbody(iseq
, ret
, node
, popped
));
7280 CHECK(compile_ensure(iseq
, ret
, node
, popped
));
7285 LABEL
*end_label
= NEW_LABEL(line
);
7286 CHECK(COMPILE(ret
, "nd_1st", node
->nd_1st
));
7288 ADD_INSN(ret
, line
, dup
);
7290 if (type
== NODE_AND
) {
7291 ADD_INSNL(ret
, line
, branchunless
, end_label
);
7294 ADD_INSNL(ret
, line
, branchif
, end_label
);
7297 ADD_INSN(ret
, line
, pop
);
7299 CHECK(COMPILE_(ret
, "nd_2nd", node
->nd_2nd
, popped
));
7300 ADD_LABEL(ret
, end_label
);
7305 compile_massign(iseq
, ret
, node
, popped
);
7310 ID id
= node
->nd_vid
;
7311 int idx
= body
->local_iseq
->body
->local_table_size
- get_local_var_idx(iseq
, id
);
7313 debugs("lvar: %s idx: %d\n", rb_id2name(id
), idx
);
7314 CHECK(COMPILE(ret
, "rvalue", node
->nd_value
));
7317 ADD_INSN(ret
, line
, dup
);
7319 ADD_SETLOCAL(ret
, line
, idx
, get_lvar_level(iseq
));
7323 case NODE_DASGN_CURR
:{
7325 ID id
= node
->nd_vid
;
7326 CHECK(COMPILE(ret
, "dvalue", node
->nd_value
));
7327 debugi("dassn id", rb_id2str(id
) ? id
: '*');
7330 ADD_INSN(ret
, line
, dup
);
7333 idx
= get_dyna_var_idx(iseq
, id
, &lv
, &ls
);
7336 COMPILE_ERROR(ERROR_ARGS
"NODE_DASGN(_CURR): unknown id (%"PRIsVALUE
")",
7340 ADD_SETLOCAL(ret
, line
, ls
- idx
, lv
);
7344 CHECK(COMPILE(ret
, "lvalue", node
->nd_value
));
7347 ADD_INSN(ret
, line
, dup
);
7349 ADD_INSN1(ret
, line
, setglobal
,
7350 ((VALUE
)node
->nd_entry
| 1));
7354 CHECK(COMPILE(ret
, "lvalue", node
->nd_value
));
7356 ADD_INSN(ret
, line
, dup
);
7358 ADD_INSN2(ret
, line
, setinstancevariable
,
7359 ID2SYM(node
->nd_vid
),
7360 get_ivar_ic_value(iseq
,node
->nd_vid
));
7364 CHECK(COMPILE(ret
, "lvalue", node
->nd_value
));
7367 ADD_INSN(ret
, line
, dup
);
7371 ADD_INSN1(ret
, line
, putspecialobject
,
7372 INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE
));
7373 ADD_INSN1(ret
, line
, setconstant
, ID2SYM(node
->nd_vid
));
7376 compile_cpath(ret
, iseq
, node
->nd_else
);
7377 ADD_INSN1(ret
, line
, setconstant
, ID2SYM(node
->nd_else
->nd_mid
));
7382 CHECK(COMPILE(ret
, "cvasgn val", node
->nd_value
));
7384 ADD_INSN(ret
, line
, dup
);
7386 ADD_INSN1(ret
, line
, setclassvariable
,
7387 ID2SYM(node
->nd_vid
));
7390 case NODE_OP_ASGN1
: {
7392 unsigned int flag
= 0;
7394 ID id
= node
->nd_mid
;
7403 * dupn 2 # nil a x a x
7404 * send :[] # nil a x a[x]
7405 * eval y # nil a x a[x] y
7406 * send op # nil a x ret
7407 * setn 3 # ret a x ret
7413 * nd_recv[nd_args->nd_body] (nd_mid)= nd_args->nd_head;
7414 * NODE_OP_ASGN nd_recv
7421 ADD_INSN(ret
, line
, putnil
);
7423 asgnflag
= COMPILE_RECV(ret
, "NODE_OP_ASGN1 recv", node
);
7424 CHECK(asgnflag
!= -1);
7425 switch (nd_type(node
->nd_args
->nd_head
)) {
7429 case NODE_BLOCK_PASS
:
7433 argc
= setup_args(iseq
, ret
, node
->nd_args
->nd_head
, &flag
, NULL
);
7434 CHECK(!NIL_P(argc
));
7436 ADD_INSN1(ret
, line
, dupn
, FIXNUM_INC(argc
, 1 + boff
));
7438 ADD_SEND_WITH_FLAG(ret
, line
, idAREF
, argc
, INT2FIX(flag
));
7440 if (id
== idOROP
|| id
== idANDOP
) {
7441 /* a[x] ||= y or a[x] &&= y
7449 LABEL
*label
= NEW_LABEL(line
);
7450 LABEL
*lfin
= NEW_LABEL(line
);
7452 ADD_INSN(ret
, line
, dup
);
7454 ADD_INSNL(ret
, line
, branchif
, label
);
7456 else { /* idANDOP */
7457 ADD_INSNL(ret
, line
, branchunless
, label
);
7459 ADD_INSN(ret
, line
, pop
);
7461 CHECK(COMPILE(ret
, "NODE_OP_ASGN1 args->body: ", node
->nd_args
->nd_body
));
7463 ADD_INSN1(ret
, line
, setn
, FIXNUM_INC(argc
, 2+boff
));
7465 if (flag
& VM_CALL_ARGS_SPLAT
) {
7466 ADD_INSN1(ret
, line
, newarray
, INT2FIX(1));
7468 ADD_INSN1(ret
, line
, dupn
, INT2FIX(3));
7469 ADD_INSN(ret
, line
, swap
);
7470 ADD_INSN(ret
, line
, pop
);
7472 ADD_INSN(ret
, line
, concatarray
);
7474 ADD_INSN1(ret
, line
, setn
, INT2FIX(3));
7475 ADD_INSN(ret
, line
, pop
);
7476 ADD_INSN(ret
, line
, pop
);
7478 ADD_SEND_WITH_FLAG(ret
, line
, idASET
, argc
, INT2FIX(flag
));
7482 ADD_INSN(ret
, line
, swap
);
7483 ADD_SEND_WITH_FLAG(ret
, line
, idASET
, FIXNUM_INC(argc
, 1), INT2FIX(flag
));
7485 ADD_INSN(ret
, line
, pop
);
7486 ADD_INSNL(ret
, line
, jump
, lfin
);
7487 ADD_LABEL(ret
, label
);
7489 ADD_INSN1(ret
, line
, setn
, FIXNUM_INC(argc
, 2+boff
));
7491 ADD_INSN1(ret
, line
, adjuststack
, FIXNUM_INC(argc
, 2+boff
));
7492 ADD_LABEL(ret
, lfin
);
7495 CHECK(COMPILE(ret
, "NODE_OP_ASGN1 args->body: ", node
->nd_args
->nd_body
));
7496 ADD_SEND(ret
, line
, id
, INT2FIX(1));
7498 ADD_INSN1(ret
, line
, setn
, FIXNUM_INC(argc
, 2+boff
));
7500 if (flag
& VM_CALL_ARGS_SPLAT
) {
7501 ADD_INSN1(ret
, line
, newarray
, INT2FIX(1));
7503 ADD_INSN1(ret
, line
, dupn
, INT2FIX(3));
7504 ADD_INSN(ret
, line
, swap
);
7505 ADD_INSN(ret
, line
, pop
);
7507 ADD_INSN(ret
, line
, concatarray
);
7509 ADD_INSN1(ret
, line
, setn
, INT2FIX(3));
7510 ADD_INSN(ret
, line
, pop
);
7511 ADD_INSN(ret
, line
, pop
);
7513 ADD_SEND_WITH_FLAG(ret
, line
, idASET
, argc
, INT2FIX(flag
));
7517 ADD_INSN(ret
, line
, swap
);
7518 ADD_SEND_WITH_FLAG(ret
, line
, idASET
, FIXNUM_INC(argc
, 1), INT2FIX(flag
));
7520 ADD_INSN(ret
, line
, pop
);
7525 case NODE_OP_ASGN2
:{
7526 ID atype
= node
->nd_next
->nd_mid
;
7527 ID vid
= node
->nd_next
->nd_vid
, aid
= rb_id_attrset(vid
);
7529 LABEL
*lfin
= NEW_LABEL(line
);
7530 LABEL
*lcfin
= NEW_LABEL(line
);
7533 class C; attr_accessor :c; end
7574 asgnflag
= COMPILE_RECV(ret
, "NODE_OP_ASGN2#recv", node
);
7575 CHECK(asgnflag
!= -1);
7576 if (node
->nd_next
->nd_aid
) {
7577 lskip
= NEW_LABEL(line
);
7578 ADD_INSN(ret
, line
, dup
);
7579 ADD_INSNL(ret
, line
, branchnil
, lskip
);
7581 ADD_INSN(ret
, line
, dup
);
7582 ADD_SEND_WITH_FLAG(ret
, line
, vid
, INT2FIX(0), INT2FIX(asgnflag
));
7584 if (atype
== idOROP
|| atype
== idANDOP
) {
7585 ADD_INSN(ret
, line
, dup
);
7586 if (atype
== idOROP
) {
7587 ADD_INSNL(ret
, line
, branchif
, lcfin
);
7589 else { /* idANDOP */
7590 ADD_INSNL(ret
, line
, branchunless
, lcfin
);
7592 ADD_INSN(ret
, line
, pop
);
7593 CHECK(COMPILE(ret
, "NODE_OP_ASGN2 val", node
->nd_value
));
7594 ADD_INSN(ret
, line
, swap
);
7595 ADD_INSN1(ret
, line
, topn
, INT2FIX(1));
7596 ADD_SEND_WITH_FLAG(ret
, line
, aid
, INT2FIX(1), INT2FIX(asgnflag
));
7597 ADD_INSNL(ret
, line
, jump
, lfin
);
7599 ADD_LABEL(ret
, lcfin
);
7600 ADD_INSN(ret
, line
, swap
);
7602 ADD_LABEL(ret
, lfin
);
7603 ADD_INSN(ret
, line
, pop
);
7605 ADD_LABEL(ret
, lskip
);
7608 /* we can apply more optimize */
7609 ADD_INSN(ret
, line
, pop
);
7613 CHECK(COMPILE(ret
, "NODE_OP_ASGN2 val", node
->nd_value
));
7614 ADD_SEND(ret
, line
, atype
, INT2FIX(1));
7616 ADD_INSN(ret
, line
, swap
);
7617 ADD_INSN1(ret
, line
, topn
, INT2FIX(1));
7619 ADD_SEND_WITH_FLAG(ret
, line
, aid
, INT2FIX(1), INT2FIX(asgnflag
));
7620 if (lskip
&& popped
) {
7621 ADD_LABEL(ret
, lskip
);
7623 ADD_INSN(ret
, line
, pop
);
7624 if (lskip
&& !popped
) {
7625 ADD_LABEL(ret
, lskip
);
7630 case NODE_OP_CDECL
: {
7635 switch (nd_type(node
->nd_head
)) {
7637 ADD_INSN1(ret
, line
, putobject
, rb_cObject
);
7640 CHECK(COMPILE(ret
, "NODE_OP_CDECL/colon2#nd_head", node
->nd_head
->nd_head
));
7643 COMPILE_ERROR(ERROR_ARGS
"%s: invalid node in NODE_OP_CDECL",
7644 ruby_node_name(nd_type(node
->nd_head
)));
7647 mid
= node
->nd_head
->nd_mid
;
7649 if (node
->nd_aid
== idOROP
) {
7650 lassign
= NEW_LABEL(line
);
7651 ADD_INSN(ret
, line
, dup
); /* cref cref */
7652 ADD_INSN3(ret
, line
, defined
, INT2FIX(DEFINED_CONST_FROM
),
7653 ID2SYM(mid
), Qfalse
); /* cref bool */
7654 ADD_INSNL(ret
, line
, branchunless
, lassign
); /* cref */
7656 ADD_INSN(ret
, line
, dup
); /* cref cref */
7657 ADD_INSN1(ret
, line
, putobject
, Qtrue
);
7658 ADD_INSN1(ret
, line
, getconstant
, ID2SYM(mid
)); /* cref obj */
7660 if (node
->nd_aid
== idOROP
|| node
->nd_aid
== idANDOP
) {
7661 lfin
= NEW_LABEL(line
);
7662 if (!popped
) ADD_INSN(ret
, line
, dup
); /* cref [obj] obj */
7663 if (node
->nd_aid
== idOROP
)
7664 ADD_INSNL(ret
, line
, branchif
, lfin
);
7666 ADD_INSNL(ret
, line
, branchunless
, lfin
);
7668 if (!popped
) ADD_INSN(ret
, line
, pop
); /* cref */
7669 if (lassign
) ADD_LABEL(ret
, lassign
);
7670 CHECK(COMPILE(ret
, "NODE_OP_CDECL#nd_value", node
->nd_value
));
7673 ADD_INSN1(ret
, line
, topn
, INT2FIX(1)); /* cref value cref */
7675 ADD_INSN1(ret
, line
, dupn
, INT2FIX(2)); /* cref value cref value */
7676 ADD_INSN(ret
, line
, swap
); /* cref value value cref */
7678 ADD_INSN1(ret
, line
, setconstant
, ID2SYM(mid
)); /* cref [value] */
7679 ADD_LABEL(ret
, lfin
); /* cref [value] */
7680 if (!popped
) ADD_INSN(ret
, line
, swap
); /* [value] cref */
7681 ADD_INSN(ret
, line
, pop
); /* [value] */
7684 CHECK(COMPILE(ret
, "NODE_OP_CDECL#nd_value", node
->nd_value
));
7685 /* cref obj value */
7686 ADD_CALL(ret
, line
, node
->nd_aid
, INT2FIX(1));
7688 ADD_INSN(ret
, line
, swap
); /* value cref */
7690 ADD_INSN1(ret
, line
, topn
, INT2FIX(1)); /* value cref value */
7691 ADD_INSN(ret
, line
, swap
); /* value value cref */
7693 ADD_INSN1(ret
, line
, setconstant
, ID2SYM(mid
));
7697 case NODE_OP_ASGN_AND
:
7698 case NODE_OP_ASGN_OR
:{
7699 LABEL
*lfin
= NEW_LABEL(line
);
7702 if (nd_type(node
) == NODE_OP_ASGN_OR
) {
7706 defined_expr(iseq
, ret
, node
->nd_head
, lfinish
, Qfalse
);
7707 lassign
= lfinish
[1];
7709 lassign
= NEW_LABEL(line
);
7711 ADD_INSNL(ret
, line
, branchunless
, lassign
);
7714 lassign
= NEW_LABEL(line
);
7717 CHECK(COMPILE(ret
, "NODE_OP_ASGN_AND/OR#nd_head", node
->nd_head
));
7718 ADD_INSN(ret
, line
, dup
);
7720 if (nd_type(node
) == NODE_OP_ASGN_AND
) {
7721 ADD_INSNL(ret
, line
, branchunless
, lfin
);
7724 ADD_INSNL(ret
, line
, branchif
, lfin
);
7727 ADD_INSN(ret
, line
, pop
);
7728 ADD_LABEL(ret
, lassign
);
7729 CHECK(COMPILE(ret
, "NODE_OP_ASGN_AND/OR#nd_value", node
->nd_value
));
7730 ADD_LABEL(ret
, lfin
);
7733 /* we can apply more optimize */
7734 ADD_INSN(ret
, line
, pop
);
7738 case NODE_CALL
: /* obj.foo */
7739 case NODE_OPCALL
: /* foo[] */
7740 if (compile_call_precheck_freeze(iseq
, ret
, node
, line
, popped
) == TRUE
) {
7743 case NODE_QCALL
: /* obj&.foo */
7744 case NODE_FCALL
: /* foo() */
7745 case NODE_VCALL
: /* foo (variable or call) */
7746 if (compile_call(iseq
, ret
, node
, type
, line
, popped
) == COMPILE_NG
) {
7754 unsigned int flag
= 0;
7755 struct rb_callinfo_kwarg
*keywords
= NULL
;
7756 const rb_iseq_t
*parent_block
= ISEQ_COMPILE_DATA(iseq
)->current_block
;
7759 ISEQ_COMPILE_DATA(iseq
)->current_block
= NULL
;
7760 if (type
== NODE_SUPER
) {
7761 VALUE vargc
= setup_args(iseq
, args
, node
->nd_args
, &flag
, &keywords
);
7762 CHECK(!NIL_P(vargc
));
7763 argc
= FIX2INT(vargc
);
7768 const rb_iseq_t
*liseq
= body
->local_iseq
;
7769 const struct rb_iseq_constant_body
*const local_body
= liseq
->body
;
7770 const struct rb_iseq_param_keyword
*const local_kwd
= local_body
->param
.keyword
;
7771 int lvar_level
= get_lvar_level(iseq
);
7773 argc
= local_body
->param
.lead_num
;
7775 /* normal arguments */
7776 for (i
= 0; i
< local_body
->param
.lead_num
; i
++) {
7777 int idx
= local_body
->local_table_size
- i
;
7778 ADD_GETLOCAL(args
, line
, idx
, lvar_level
);
7781 if (local_body
->param
.flags
.has_opt
) {
7782 /* optional arguments */
7784 for (j
= 0; j
< local_body
->param
.opt_num
; j
++) {
7785 int idx
= local_body
->local_table_size
- (i
+ j
);
7786 ADD_GETLOCAL(args
, line
, idx
, lvar_level
);
7791 if (local_body
->param
.flags
.has_rest
) {
7793 int idx
= local_body
->local_table_size
- local_body
->param
.rest_start
;
7795 ADD_GETLOCAL(args
, line
, idx
, lvar_level
);
7796 ADD_INSN1(args
, line
, splatarray
, Qfalse
);
7798 argc
= local_body
->param
.rest_start
+ 1;
7799 flag
|= VM_CALL_ARGS_SPLAT
;
7801 if (local_body
->param
.flags
.has_post
) {
7802 /* post arguments */
7803 int post_len
= local_body
->param
.post_num
;
7804 int post_start
= local_body
->param
.post_start
;
7806 if (local_body
->param
.flags
.has_rest
) {
7808 for (j
=0; j
<post_len
; j
++) {
7809 int idx
= local_body
->local_table_size
- (post_start
+ j
);
7810 ADD_GETLOCAL(args
, line
, idx
, lvar_level
);
7812 ADD_INSN1(args
, line
, newarray
, INT2FIX(j
));
7813 ADD_INSN (args
, line
, concatarray
);
7814 /* argc is settled at above */
7818 for (j
=0; j
<post_len
; j
++) {
7819 int idx
= local_body
->local_table_size
- (post_start
+ j
);
7820 ADD_GETLOCAL(args
, line
, idx
, lvar_level
);
7822 argc
= post_len
+ post_start
;
7826 if (local_body
->param
.flags
.has_kw
) { /* TODO: support keywords */
7827 int local_size
= local_body
->local_table_size
;
7830 ADD_INSN1(args
, line
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
7832 if (local_body
->param
.flags
.has_kwrest
) {
7833 int idx
= local_body
->local_table_size
- local_kwd
->rest_start
;
7834 ADD_GETLOCAL(args
, line
, idx
, lvar_level
);
7837 ADD_INSN1(args
, line
, newhash
, INT2FIX(0));
7838 flag
|= VM_CALL_KW_SPLAT_MUT
;
7840 for (i
= 0; i
< local_kwd
->num
; ++i
) {
7841 ID id
= local_kwd
->table
[i
];
7842 int idx
= local_size
- get_local_var_idx(liseq
, id
);
7843 ADD_INSN1(args
, line
, putobject
, ID2SYM(id
));
7844 ADD_GETLOCAL(args
, line
, idx
, lvar_level
);
7846 ADD_SEND(args
, line
, id_core_hash_merge_ptr
, INT2FIX(i
* 2 + 1));
7847 if (local_body
->param
.flags
.has_rest
) {
7848 ADD_INSN1(args
, line
, newarray
, INT2FIX(1));
7849 ADD_INSN (args
, line
, concatarray
);
7852 flag
|= VM_CALL_KW_SPLAT
;
7854 else if (local_body
->param
.flags
.has_kwrest
) {
7855 int idx
= local_body
->local_table_size
- local_kwd
->rest_start
;
7856 ADD_GETLOCAL(args
, line
, idx
, lvar_level
);
7858 if (local_body
->param
.flags
.has_rest
) {
7859 ADD_INSN1(args
, line
, newarray
, INT2FIX(1));
7860 ADD_INSN (args
, line
, concatarray
);
7865 flag
|= VM_CALL_KW_SPLAT
;
7869 ADD_INSN(ret
, line
, putself
);
7871 ADD_INSN2(ret
, line
, invokesuper
,
7872 new_callinfo(iseq
, 0, argc
, flag
| VM_CALL_SUPER
| (type
== NODE_ZSUPER
? VM_CALL_ZSUPER
: 0) | VM_CALL_FCALL
, keywords
, parent_block
!= NULL
),
7876 ADD_INSN(ret
, line
, pop
);
7881 CHECK(compile_array(iseq
, ret
, node
, popped
) >= 0);
7886 ADD_INSN1(ret
, line
, newarray
, INT2FIX(0));
7891 const NODE
*n
= node
;
7893 COMPILE_ERROR(ERROR_ARGS
"NODE_VALUES: must not be popped");
7896 CHECK(COMPILE(ret
, "values item", n
->nd_head
));
7899 ADD_INSN1(ret
, line
, newarray
, INT2FIX(node
->nd_alen
));
7903 CHECK(compile_hash(iseq
, ret
, node
, popped
) >= 0);
7906 CHECK(compile_return(iseq
, ret
, node
, popped
));
7911 unsigned int flag
= 0;
7912 struct rb_callinfo_kwarg
*keywords
= NULL
;
7916 if (check_yield_place(iseq
) == FALSE
) {
7917 COMPILE_ERROR(ERROR_ARGS
"Invalid yield");
7921 if (node
->nd_head
) {
7922 argc
= setup_args(iseq
, args
, node
->nd_head
, &flag
, &keywords
);
7923 CHECK(!NIL_P(argc
));
7930 ADD_INSN1(ret
, line
, invokeblock
, new_callinfo(iseq
, 0, FIX2INT(argc
), flag
, keywords
, FALSE
));
7933 ADD_INSN(ret
, line
, pop
);
7939 ID id
= node
->nd_vid
;
7940 int idx
= body
->local_iseq
->body
->local_table_size
- get_local_var_idx(iseq
, id
);
7942 debugs("id: %s idx: %d\n", rb_id2name(id
), idx
);
7943 ADD_GETLOCAL(ret
, line
, idx
, get_lvar_level(iseq
));
7949 debugi("nd_vid", node
->nd_vid
);
7951 idx
= get_dyna_var_idx(iseq
, node
->nd_vid
, &lv
, &ls
);
7953 COMPILE_ERROR(ERROR_ARGS
"unknown dvar (%"PRIsVALUE
")",
7954 rb_id2str(node
->nd_vid
));
7957 ADD_GETLOCAL(ret
, line
, ls
- idx
, lv
);
7962 ADD_INSN1(ret
, line
, getglobal
,
7963 ((VALUE
)node
->nd_entry
| 1));
7965 ADD_INSN(ret
, line
, pop
);
7970 debugi("nd_vid", node
->nd_vid
);
7972 ADD_INSN2(ret
, line
, getinstancevariable
,
7973 ID2SYM(node
->nd_vid
),
7974 get_ivar_ic_value(iseq
,node
->nd_vid
));
7979 debugi("nd_vid", node
->nd_vid
);
7981 if (ISEQ_COMPILE_DATA(iseq
)->option
->inline_const_cache
) {
7982 LABEL
*lend
= NEW_LABEL(line
);
7983 int ic_index
= body
->is_size
++;
7985 ADD_INSN2(ret
, line
, opt_getinlinecache
, lend
, INT2FIX(ic_index
));
7986 ADD_INSN1(ret
, line
, putobject
, Qtrue
);
7987 ADD_INSN1(ret
, line
, getconstant
, ID2SYM(node
->nd_vid
));
7988 ADD_INSN1(ret
, line
, opt_setinlinecache
, INT2FIX(ic_index
));
7989 ADD_LABEL(ret
, lend
);
7992 ADD_INSN(ret
, line
, putnil
);
7993 ADD_INSN1(ret
, line
, putobject
, Qtrue
);
7994 ADD_INSN1(ret
, line
, getconstant
, ID2SYM(node
->nd_vid
));
7998 ADD_INSN(ret
, line
, pop
);
8004 ADD_INSN1(ret
, line
, getclassvariable
,
8005 ID2SYM(node
->nd_vid
));
8011 if (!node
->nd_nth
) {
8012 ADD_INSN(ret
, line
, putnil
);
8015 ADD_INSN2(ret
, line
, getspecial
, INT2FIX(1) /* '~' */,
8016 INT2FIX(node
->nd_nth
<< 1));
8020 case NODE_BACK_REF
:{
8022 ADD_INSN2(ret
, line
, getspecial
, INT2FIX(1) /* '~' */,
8023 INT2FIX(0x01 | (node
->nd_nth
<< 1)));
8035 switch (nd_type(node
)) {
8037 ADD_INSN1(recv
, line
, putobject
, node
->nd_lit
);
8038 ADD_INSN2(val
, line
, getspecial
, INT2FIX(0),
8042 CHECK(COMPILE(recv
, "receiver", node
->nd_recv
));
8043 CHECK(COMPILE(val
, "value", node
->nd_value
));
8046 CHECK(COMPILE(recv
, "receiver", node
->nd_value
));
8047 CHECK(COMPILE(val
, "value", node
->nd_recv
));
8053 ADD_SEND(ret
, line
, idEqTilde
, INT2FIX(1));
8055 if (node
->nd_args
) {
8056 compile_named_capture_assign(iseq
, ret
, node
->nd_args
);
8060 ADD_INSN(ret
, line
, pop
);
8065 debugp_param("lit", node
->nd_lit
);
8067 ADD_INSN1(ret
, line
, putobject
, node
->nd_lit
);
8072 debugp_param("nd_lit", node
->nd_lit
);
8074 VALUE lit
= node
->nd_lit
;
8075 if (!ISEQ_COMPILE_DATA(iseq
)->option
->frozen_string_literal
) {
8076 lit
= rb_fstring(lit
);
8077 ADD_INSN1(ret
, line
, putstring
, lit
);
8078 RB_OBJ_WRITTEN(iseq
, Qundef
, lit
);
8081 if (ISEQ_COMPILE_DATA(iseq
)->option
->debug_frozen_string_literal
|| RTEST(ruby_debug
)) {
8082 VALUE debug_info
= rb_ary_new_from_args(2, rb_iseq_path(iseq
), INT2FIX(line
));
8083 lit
= rb_str_dup(lit
);
8084 rb_ivar_set(lit
, id_debug_created_info
, rb_obj_freeze(debug_info
));
8085 lit
= rb_str_freeze(lit
);
8088 lit
= rb_fstring(lit
);
8090 ADD_INSN1(ret
, line
, putobject
, lit
);
8091 RB_OBJ_WRITTEN(iseq
, Qundef
, lit
);
8097 compile_dstr(iseq
, ret
, node
);
8100 ADD_INSN(ret
, line
, pop
);
8103 if (ISEQ_COMPILE_DATA(iseq
)->option
->frozen_string_literal
) {
8104 VALUE debug_info
= Qnil
;
8105 if (ISEQ_COMPILE_DATA(iseq
)->option
->debug_frozen_string_literal
|| RTEST(ruby_debug
)) {
8106 debug_info
= rb_ary_new_from_args(2, rb_iseq_path(iseq
), INT2FIX(line
));
8108 ADD_INSN1(ret
, line
, freezestring
, debug_info
);
8109 if (!NIL_P(debug_info
)) {
8110 RB_OBJ_WRITTEN(iseq
, Qundef
, rb_obj_freeze(debug_info
));
8117 ADD_CALL_RECEIVER(ret
, line
);
8118 VALUE str
= rb_fstring(node
->nd_lit
);
8119 ADD_INSN1(ret
, line
, putobject
, str
);
8120 RB_OBJ_WRITTEN(iseq
, Qundef
, str
);
8121 ADD_CALL(ret
, line
, idBackquote
, INT2FIX(1));
8124 ADD_INSN(ret
, line
, pop
);
8129 ADD_CALL_RECEIVER(ret
, line
);
8130 compile_dstr(iseq
, ret
, node
);
8131 ADD_CALL(ret
, line
, idBackquote
, INT2FIX(1));
8134 ADD_INSN(ret
, line
, pop
);
8139 CHECK(compile_evstr(iseq
, ret
, node
->nd_body
, popped
));
8142 compile_dregx(iseq
, ret
, node
);
8145 ADD_INSN(ret
, line
, pop
);
8150 int ic_index
= body
->is_size
++;
8151 const rb_iseq_t
*block_iseq
;
8152 block_iseq
= NEW_CHILD_ISEQ(node
->nd_body
, make_name_for_block(iseq
), ISEQ_TYPE_PLAIN
, line
);
8154 ADD_INSN2(ret
, line
, once
, block_iseq
, INT2FIX(ic_index
));
8155 RB_OBJ_WRITTEN(iseq
, Qundef
, (VALUE
)block_iseq
);
8158 ADD_INSN(ret
, line
, pop
);
8164 CHECK(COMPILE(ret
, "argscat head", node
->nd_head
));
8165 ADD_INSN1(ret
, line
, splatarray
, Qfalse
);
8166 ADD_INSN(ret
, line
, pop
);
8167 CHECK(COMPILE(ret
, "argscat body", node
->nd_body
));
8168 ADD_INSN1(ret
, line
, splatarray
, Qfalse
);
8169 ADD_INSN(ret
, line
, pop
);
8172 CHECK(COMPILE(ret
, "argscat head", node
->nd_head
));
8173 CHECK(COMPILE(ret
, "argscat body", node
->nd_body
));
8174 ADD_INSN(ret
, line
, concatarray
);
8178 case NODE_ARGSPUSH
:{
8180 CHECK(COMPILE(ret
, "arsgpush head", node
->nd_head
));
8181 ADD_INSN1(ret
, line
, splatarray
, Qfalse
);
8182 ADD_INSN(ret
, line
, pop
);
8183 CHECK(COMPILE_(ret
, "argspush body", node
->nd_body
, popped
));
8186 CHECK(COMPILE(ret
, "arsgpush head", node
->nd_head
));
8187 CHECK(COMPILE_(ret
, "argspush body", node
->nd_body
, popped
));
8188 ADD_INSN1(ret
, line
, newarray
, INT2FIX(1));
8189 ADD_INSN(ret
, line
, concatarray
);
8194 CHECK(COMPILE(ret
, "splat", node
->nd_head
));
8195 ADD_INSN1(ret
, line
, splatarray
, Qtrue
);
8198 ADD_INSN(ret
, line
, pop
);
8203 ID mid
= node
->nd_mid
;
8204 const rb_iseq_t
*method_iseq
= NEW_ISEQ(node
->nd_defn
,
8206 ISEQ_TYPE_METHOD
, line
);
8208 debugp_param("defn/iseq", rb_iseqw_new(method_iseq
));
8209 ADD_INSN2(ret
, line
, definemethod
, ID2SYM(mid
), method_iseq
);
8210 RB_OBJ_WRITTEN(iseq
, Qundef
, (VALUE
)method_iseq
);
8213 ADD_INSN1(ret
, line
, putobject
, ID2SYM(mid
));
8219 ID mid
= node
->nd_mid
;
8220 const rb_iseq_t
* singleton_method_iseq
= NEW_ISEQ(node
->nd_defn
,
8222 ISEQ_TYPE_METHOD
, line
);
8224 debugp_param("defs/iseq", rb_iseqw_new(singleton_method_iseq
));
8225 CHECK(COMPILE(ret
, "defs: recv", node
->nd_recv
));
8226 ADD_INSN2(ret
, line
, definesmethod
, ID2SYM(mid
), singleton_method_iseq
);
8227 RB_OBJ_WRITTEN(iseq
, Qundef
, (VALUE
)singleton_method_iseq
);
8230 ADD_INSN1(ret
, line
, putobject
, ID2SYM(mid
));
8235 ADD_INSN1(ret
, line
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
8236 ADD_INSN1(ret
, line
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_CBASE
));
8237 CHECK(COMPILE(ret
, "alias arg1", node
->nd_1st
));
8238 CHECK(COMPILE(ret
, "alias arg2", node
->nd_2nd
));
8239 ADD_SEND(ret
, line
, id_core_set_method_alias
, INT2FIX(3));
8242 ADD_INSN(ret
, line
, pop
);
8247 ADD_INSN1(ret
, line
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
8248 ADD_INSN1(ret
, line
, putobject
, ID2SYM(node
->nd_alias
));
8249 ADD_INSN1(ret
, line
, putobject
, ID2SYM(node
->nd_orig
));
8250 ADD_SEND(ret
, line
, id_core_set_variable_alias
, INT2FIX(2));
8253 ADD_INSN(ret
, line
, pop
);
8258 ADD_INSN1(ret
, line
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
8259 ADD_INSN1(ret
, line
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_CBASE
));
8260 CHECK(COMPILE(ret
, "undef arg", node
->nd_undef
));
8261 ADD_SEND(ret
, line
, id_core_undef_method
, INT2FIX(2));
8264 ADD_INSN(ret
, line
, pop
);
8269 const rb_iseq_t
*class_iseq
= NEW_CHILD_ISEQ(node
->nd_body
,
8270 rb_sprintf("<class:%"PRIsVALUE
">", rb_id2str(node
->nd_cpath
->nd_mid
)),
8271 ISEQ_TYPE_CLASS
, line
);
8272 const int flags
= VM_DEFINECLASS_TYPE_CLASS
|
8273 (node
->nd_super
? VM_DEFINECLASS_FLAG_HAS_SUPERCLASS
: 0) |
8274 compile_cpath(ret
, iseq
, node
->nd_cpath
);
8276 CHECK(COMPILE(ret
, "super", node
->nd_super
));
8277 ADD_INSN3(ret
, line
, defineclass
, ID2SYM(node
->nd_cpath
->nd_mid
), class_iseq
, INT2FIX(flags
));
8278 RB_OBJ_WRITTEN(iseq
, Qundef
, (VALUE
)class_iseq
);
8281 ADD_INSN(ret
, line
, pop
);
8286 const rb_iseq_t
*module_iseq
= NEW_CHILD_ISEQ(node
->nd_body
,
8287 rb_sprintf("<module:%"PRIsVALUE
">", rb_id2str(node
->nd_cpath
->nd_mid
)),
8288 ISEQ_TYPE_CLASS
, line
);
8289 const int flags
= VM_DEFINECLASS_TYPE_MODULE
|
8290 compile_cpath(ret
, iseq
, node
->nd_cpath
);
8292 ADD_INSN (ret
, line
, putnil
); /* dummy */
8293 ADD_INSN3(ret
, line
, defineclass
, ID2SYM(node
->nd_cpath
->nd_mid
), module_iseq
, INT2FIX(flags
));
8294 RB_OBJ_WRITTEN(iseq
, Qundef
, (VALUE
)module_iseq
);
8297 ADD_INSN(ret
, line
, pop
);
8303 const rb_iseq_t
*singleton_class
= NEW_ISEQ(node
->nd_body
, rb_fstring_lit("singleton class"),
8304 ISEQ_TYPE_CLASS
, line
);
8306 CHECK(COMPILE(ret
, "sclass#recv", node
->nd_recv
));
8307 ADD_INSN (ret
, line
, putnil
);
8308 CONST_ID(singletonclass
, "singletonclass");
8309 ADD_INSN3(ret
, line
, defineclass
,
8310 ID2SYM(singletonclass
), singleton_class
,
8311 INT2FIX(VM_DEFINECLASS_TYPE_SINGLETON_CLASS
));
8312 RB_OBJ_WRITTEN(iseq
, Qundef
, (VALUE
)singleton_class
);
8315 ADD_INSN(ret
, line
, pop
);
8320 if (rb_is_const_id(node
->nd_mid
)) {
8322 LABEL
*lend
= NEW_LABEL(line
);
8323 int ic_index
= body
->is_size
++;
8330 CHECK(compile_const_prefix(iseq
, node
, pref
, body
));
8331 if (LIST_INSN_SIZE_ZERO(pref
)) {
8332 if (ISEQ_COMPILE_DATA(iseq
)->option
->inline_const_cache
) {
8333 ADD_INSN2(ret
, line
, opt_getinlinecache
, lend
, INT2FIX(ic_index
));
8336 ADD_INSN(ret
, line
, putnil
);
8341 if (ISEQ_COMPILE_DATA(iseq
)->option
->inline_const_cache
) {
8342 ADD_INSN1(ret
, line
, opt_setinlinecache
, INT2FIX(ic_index
));
8343 ADD_LABEL(ret
, lend
);
8353 ADD_CALL_RECEIVER(ret
, line
);
8354 CHECK(COMPILE(ret
, "colon2#nd_head", node
->nd_head
));
8355 ADD_CALL(ret
, line
, node
->nd_mid
, INT2FIX(1));
8358 ADD_INSN(ret
, line
, pop
);
8363 LABEL
*lend
= NEW_LABEL(line
);
8364 int ic_index
= body
->is_size
++;
8366 debugi("colon3#nd_mid", node
->nd_mid
);
8368 /* add cache insn */
8369 if (ISEQ_COMPILE_DATA(iseq
)->option
->inline_const_cache
) {
8370 ADD_INSN2(ret
, line
, opt_getinlinecache
, lend
, INT2FIX(ic_index
));
8371 ADD_INSN(ret
, line
, pop
);
8374 ADD_INSN1(ret
, line
, putobject
, rb_cObject
);
8375 ADD_INSN1(ret
, line
, putobject
, Qtrue
);
8376 ADD_INSN1(ret
, line
, getconstant
, ID2SYM(node
->nd_mid
));
8378 if (ISEQ_COMPILE_DATA(iseq
)->option
->inline_const_cache
) {
8379 ADD_INSN1(ret
, line
, opt_setinlinecache
, INT2FIX(ic_index
));
8380 ADD_LABEL(ret
, lend
);
8384 ADD_INSN(ret
, line
, pop
);
8390 int excl
= type
== NODE_DOT3
;
8391 VALUE flag
= INT2FIX(excl
);
8392 const NODE
*b
= node
->nd_beg
;
8393 const NODE
*e
= node
->nd_end
;
8394 if (optimizable_range_item_p(b
) && optimizable_range_item_p(e
)) {
8396 VALUE bv
= nd_type(b
) == NODE_LIT
? b
->nd_lit
: Qnil
;
8397 VALUE ev
= nd_type(e
) == NODE_LIT
? e
->nd_lit
: Qnil
;
8398 VALUE val
= rb_range_new(bv
, ev
, excl
);
8399 ADD_INSN1(ret
, line
, putobject
, val
);
8400 RB_OBJ_WRITTEN(iseq
, Qundef
, val
);
8404 CHECK(COMPILE_(ret
, "min", b
, popped
));
8405 CHECK(COMPILE_(ret
, "max", e
, popped
));
8407 ADD_INSN1(ret
, line
, newrange
, flag
);
8414 LABEL
*lend
= NEW_LABEL(line
);
8415 LABEL
*ltrue
= NEW_LABEL(line
);
8416 LABEL
*lfalse
= NEW_LABEL(line
);
8417 CHECK(compile_flip_flop(iseq
, ret
, node
, type
== NODE_FLIP2
,
8419 ADD_LABEL(ret
, ltrue
);
8420 ADD_INSN1(ret
, line
, putobject
, Qtrue
);
8421 ADD_INSNL(ret
, line
, jump
, lend
);
8422 ADD_LABEL(ret
, lfalse
);
8423 ADD_INSN1(ret
, line
, putobject
, Qfalse
);
8424 ADD_LABEL(ret
, lend
);
8429 ADD_INSN(ret
, line
, putself
);
8435 ADD_INSN(ret
, line
, putnil
);
8441 ADD_INSN1(ret
, line
, putobject
, Qtrue
);
8447 ADD_INSN1(ret
, line
, putobject
, Qfalse
);
8453 if (body
->type
== ISEQ_TYPE_RESCUE
) {
8454 ADD_GETLOCAL(ret
, line
, LVAR_ERRINFO
, 0);
8457 const rb_iseq_t
*ip
= iseq
;
8460 if (ip
->body
->type
== ISEQ_TYPE_RESCUE
) {
8463 ip
= ip
->body
->parent_iseq
;
8467 ADD_GETLOCAL(ret
, line
, LVAR_ERRINFO
, level
);
8470 ADD_INSN(ret
, line
, putnil
);
8478 CHECK(compile_defined_expr(iseq
, ret
, node
, Qtrue
));
8483 * ONCE{ rb_mRubyVMFrozenCore::core#set_postexe{ ... } }
8485 int is_index
= body
->is_size
++;
8486 struct rb_iseq_new_with_callback_callback_func
*ifunc
=
8487 rb_iseq_new_with_callback_new_callback(build_postexe_iseq
, node
->nd_body
);
8488 const rb_iseq_t
*once_iseq
=
8489 new_child_iseq_with_callback(iseq
, ifunc
,
8490 rb_fstring(make_name_for_block(iseq
)), iseq
, ISEQ_TYPE_BLOCK
, line
);
8492 ADD_INSN2(ret
, line
, once
, once_iseq
, INT2FIX(is_index
));
8493 RB_OBJ_WRITTEN(iseq
, Qundef
, (VALUE
)once_iseq
);
8496 ADD_INSN(ret
, line
, pop
);
8502 LABEL
*end_label
= NEW_LABEL(nd_line(node
));
8503 const NODE
*default_value
= node
->nd_body
->nd_value
;
8505 if (default_value
== NODE_SPECIAL_REQUIRED_KEYWORD
) {
8506 /* required argument. do nothing */
8507 COMPILE_ERROR(ERROR_ARGS
"unreachable");
8510 else if (nd_type(default_value
) == NODE_LIT
||
8511 nd_type(default_value
) == NODE_NIL
||
8512 nd_type(default_value
) == NODE_TRUE
||
8513 nd_type(default_value
) == NODE_FALSE
) {
8514 COMPILE_ERROR(ERROR_ARGS
"unreachable");
8518 /* if keywordcheck(_kw_bits, nth_keyword)
8519 * kw = default_value
8522 int kw_bits_idx
= body
->local_table_size
- body
->param
.keyword
->bits_start
;
8523 int keyword_idx
= body
->param
.keyword
->num
;
8525 ADD_INSN2(ret
, line
, checkkeyword
, INT2FIX(kw_bits_idx
+ VM_ENV_DATA_SIZE
- 1), INT2FIX(keyword_idx
));
8526 ADD_INSNL(ret
, line
, branchif
, end_label
);
8527 CHECK(COMPILE_POPPED(ret
, "keyword default argument", node
->nd_body
));
8528 ADD_LABEL(ret
, end_label
);
8534 compile_dstr(iseq
, ret
, node
);
8536 ADD_INSN(ret
, line
, intern
);
8539 ADD_INSN(ret
, line
, pop
);
8543 case NODE_ATTRASGN
:{
8546 unsigned int flag
= 0;
8547 ID mid
= node
->nd_mid
;
8549 LABEL
*else_label
= NULL
;
8550 VALUE branches
= Qfalse
;
8552 /* optimization shortcut
8553 * obj["literal"] = value -> opt_aset_with(obj, "literal", value)
8555 if (mid
== idASET
&& !private_recv_p(node
) && node
->nd_args
&&
8556 nd_type(node
->nd_args
) == NODE_LIST
&& node
->nd_args
->nd_alen
== 2 &&
8557 nd_type(node
->nd_args
->nd_head
) == NODE_STR
&&
8558 ISEQ_COMPILE_DATA(iseq
)->current_block
== NULL
&&
8559 !ISEQ_COMPILE_DATA(iseq
)->option
->frozen_string_literal
&&
8560 ISEQ_COMPILE_DATA(iseq
)->option
->specialized_instruction
)
8562 VALUE str
= rb_fstring(node
->nd_args
->nd_head
->nd_lit
);
8563 CHECK(COMPILE(ret
, "recv", node
->nd_recv
));
8564 CHECK(COMPILE(ret
, "value", node
->nd_args
->nd_next
->nd_head
));
8566 ADD_INSN(ret
, line
, swap
);
8567 ADD_INSN1(ret
, line
, topn
, INT2FIX(1));
8569 ADD_INSN2(ret
, line
, opt_aset_with
, str
,
8570 new_callinfo(iseq
, idASET
, 2, 0, NULL
, FALSE
));
8571 RB_OBJ_WRITTEN(iseq
, Qundef
, str
);
8572 ADD_INSN(ret
, line
, pop
);
8578 argc
= setup_args(iseq
, args
, node
->nd_args
, &flag
, NULL
);
8579 CHECK(!NIL_P(argc
));
8581 int asgnflag
= COMPILE_RECV(recv
, "recv", node
);
8582 CHECK(asgnflag
!= -1);
8583 flag
|= (unsigned int)asgnflag
;
8585 debugp_param("argc", argc
);
8586 debugp_param("nd_mid", ID2SYM(mid
));
8588 if (!rb_is_attrset_id(mid
)) {
8590 mid
= rb_id_attrset(mid
);
8591 else_label
= qcall_branch_start(iseq
, recv
, &branches
, node
, line
);
8594 ADD_INSN(ret
, line
, putnil
);
8598 if (flag
& VM_CALL_ARGS_BLOCKARG
) {
8599 ADD_INSN1(ret
, line
, topn
, INT2FIX(1));
8600 if (flag
& VM_CALL_ARGS_SPLAT
) {
8601 ADD_INSN1(ret
, line
, putobject
, INT2FIX(-1));
8602 ADD_SEND_WITH_FLAG(ret
, line
, idAREF
, INT2FIX(1), INT2FIX(asgnflag
));
8604 ADD_INSN1(ret
, line
, setn
, FIXNUM_INC(argc
, 3));
8605 ADD_INSN (ret
, line
, pop
);
8607 else if (flag
& VM_CALL_ARGS_SPLAT
) {
8608 ADD_INSN(ret
, line
, dup
);
8609 ADD_INSN1(ret
, line
, putobject
, INT2FIX(-1));
8610 ADD_SEND_WITH_FLAG(ret
, line
, idAREF
, INT2FIX(1), INT2FIX(asgnflag
));
8611 ADD_INSN1(ret
, line
, setn
, FIXNUM_INC(argc
, 2));
8612 ADD_INSN (ret
, line
, pop
);
8615 ADD_INSN1(ret
, line
, setn
, FIXNUM_INC(argc
, 1));
8622 ADD_SEND_WITH_FLAG(ret
, line
, mid
, argc
, INT2FIX(flag
));
8623 qcall_branch_end(iseq
, ret
, else_label
, branches
, node
, line
);
8624 ADD_INSN(ret
, line
, pop
);
8629 /* compile same as lambda{...} */
8630 const rb_iseq_t
*block
= NEW_CHILD_ISEQ(node
->nd_body
, make_name_for_block(iseq
), ISEQ_TYPE_BLOCK
, line
);
8631 VALUE argc
= INT2FIX(0);
8633 ADD_INSN1(ret
, line
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
8634 ADD_CALL_WITH_BLOCK(ret
, line
, idLambda
, argc
, block
);
8635 RB_OBJ_WRITTEN(iseq
, Qundef
, (VALUE
)block
);
8638 ADD_INSN(ret
, line
, pop
);
8643 UNKNOWN_NODE("iseq_compile_each", node
, COMPILE_NG
);
8653 /***************************/
8654 /* instruction information */
8655 /***************************/
8658 insn_data_length(INSN
*iobj
)
8660 return insn_len(iobj
->insn_id
);
8664 calc_sp_depth(int depth
, INSN
*insn
)
8666 return comptime_insn_stack_increase(depth
, insn
->insn_id
, insn
->operands
);
8670 opobj_inspect(VALUE obj
)
8672 if (!SPECIAL_CONST_P(obj
) && !RBASIC_CLASS(obj
)) {
8673 switch (BUILTIN_TYPE(obj
)) {
8675 obj
= rb_str_new_cstr(RSTRING_PTR(obj
));
8678 obj
= rb_ary_dup(obj
);
8682 return rb_inspect(obj
);
8688 insn_data_to_s_detail(INSN
*iobj
)
8690 VALUE str
= rb_sprintf("%-20s ", insn_name(iobj
->insn_id
));
8692 if (iobj
->operands
) {
8693 const char *types
= insn_op_types(iobj
->insn_id
);
8696 for (j
= 0; types
[j
]; j
++) {
8697 char type
= types
[j
];
8700 case TS_OFFSET
: /* label(destination position) */
8702 LABEL
*lobj
= (LABEL
*)OPERAND_AT(iobj
, j
);
8703 rb_str_catf(str
, LABEL_FORMAT
, lobj
->label_no
);
8707 case TS_ISEQ
: /* iseq */
8709 rb_iseq_t
*iseq
= (rb_iseq_t
*)OPERAND_AT(iobj
, j
);
8711 if (0 && iseq
) { /* TODO: invalidate now */
8714 rb_str_concat(str
, opobj_inspect(val
));
8718 case TS_NUM
: /* ulong */
8719 case TS_VALUE
: /* VALUE */
8721 VALUE v
= OPERAND_AT(iobj
, j
);
8722 rb_str_concat(str
, opobj_inspect(v
));
8725 case TS_ID
: /* ID */
8726 rb_str_concat(str
, opobj_inspect(OPERAND_AT(iobj
, j
)));
8730 struct rb_global_entry
*entry
= (struct rb_global_entry
*)
8731 (OPERAND_AT(iobj
, j
) & (~1));
8732 rb_str_append(str
, rb_id2str(entry
->id
));
8735 case TS_IC
: /* inline cache */
8736 case TS_IVC
: /* inline ivar cache */
8737 case TS_ISE
: /* inline storage entry */
8738 rb_str_catf(str
, "<ic:%d>", FIX2INT(OPERAND_AT(iobj
, j
)));
8740 case TS_CALLDATA
: /* we store these as call infos at compile time */
8742 const struct rb_callinfo
*ci
= (struct rb_callinfo
*)OPERAND_AT(iobj
, j
);
8743 rb_str_cat2(str
, "<calldata:");
8744 if (vm_ci_mid(ci
)) rb_str_catf(str
, "%"PRIsVALUE
, rb_id2str(vm_ci_mid(ci
)));
8745 rb_str_catf(str
, ", %d>", vm_ci_argc(ci
));
8748 case TS_CDHASH
: /* case/when condition cache */
8749 rb_str_cat2(str
, "<ch>");
8753 void *func
= (void *)OPERAND_AT(iobj
, j
);
8756 if (dladdr(func
, &info
) && info
.dli_sname
) {
8757 rb_str_cat2(str
, info
.dli_sname
);
8761 rb_str_catf(str
, "<%p>", func
);
8765 rb_bug("unsupported: TS_BUILTIN");
8768 rb_raise(rb_eSyntaxError
, "unknown operand type: %c", type
);
8772 rb_str_cat2(str
, ", ");
8780 dump_disasm_list(const LINK_ELEMENT
*link
)
8782 dump_disasm_list_with_cursor(link
, NULL
, NULL
);
8786 dump_disasm_list_with_cursor(const LINK_ELEMENT
*link
, const LINK_ELEMENT
*curr
, const LABEL
*dest
)
8793 printf("-- raw disasm--------\n");
8796 if (curr
) printf(curr
== link
? "*" : " ");
8797 switch (link
->type
) {
8798 case ISEQ_ELEMENT_INSN
:
8800 iobj
= (INSN
*)link
;
8801 str
= insn_data_to_s_detail(iobj
);
8802 printf("%04d %-65s(%4u)\n", pos
, StringValueCStr(str
), iobj
->insn_info
.line_no
);
8803 pos
+= insn_data_length(iobj
);
8806 case ISEQ_ELEMENT_LABEL
:
8808 lobj
= (LABEL
*)link
;
8809 printf(LABEL_FORMAT
" [sp: %d]%s\n", lobj
->label_no
, lobj
->sp
,
8810 dest
== lobj
? " <---" : "");
8813 case ISEQ_ELEMENT_TRACE
:
8815 TRACE
*trace
= (TRACE
*)link
;
8816 printf("trace: %0x\n", trace
->event
);
8819 case ISEQ_ELEMENT_ADJUST
:
8821 ADJUST
*adjust
= (ADJUST
*)link
;
8822 printf("adjust: [label: %d]\n", adjust
->label
? adjust
->label
->label_no
: -1);
8827 rb_raise(rb_eSyntaxError
, "dump_disasm_list error: %ld\n", FIX2LONG(link
->type
));
8831 printf("---------------------\n");
8836 rb_insns_name(int i
)
8838 return insn_name(i
);
8842 rb_insns_name_array(void)
8844 VALUE ary
= rb_ary_new_capa(VM_INSTRUCTION_SIZE
);
8846 for (i
= 0; i
< VM_INSTRUCTION_SIZE
; i
++) {
8847 rb_ary_push(ary
, rb_fstring_cstr(insn_name(i
)));
8849 return rb_obj_freeze(ary
);
8853 register_label(rb_iseq_t
*iseq
, struct st_table
*labels_table
, VALUE obj
)
8857 obj
= rb_to_symbol_type(obj
);
8859 if (st_lookup(labels_table
, obj
, &tmp
) == 0) {
8860 label
= NEW_LABEL(0);
8861 st_insert(labels_table
, obj
, (st_data_t
)label
);
8864 label
= (LABEL
*)tmp
;
8871 get_exception_sym2type(VALUE sym
)
8874 #define rb_intern(str) rb_intern_const(str)
8875 static VALUE symRescue
, symEnsure
, symRetry
;
8876 static VALUE symBreak
, symRedo
, symNext
;
8878 if (symRescue
== 0) {
8879 symRescue
= ID2SYM(rb_intern("rescue"));
8880 symEnsure
= ID2SYM(rb_intern("ensure"));
8881 symRetry
= ID2SYM(rb_intern("retry"));
8882 symBreak
= ID2SYM(rb_intern("break"));
8883 symRedo
= ID2SYM(rb_intern("redo"));
8884 symNext
= ID2SYM(rb_intern("next"));
8887 if (sym
== symRescue
) return CATCH_TYPE_RESCUE
;
8888 if (sym
== symEnsure
) return CATCH_TYPE_ENSURE
;
8889 if (sym
== symRetry
) return CATCH_TYPE_RETRY
;
8890 if (sym
== symBreak
) return CATCH_TYPE_BREAK
;
8891 if (sym
== symRedo
) return CATCH_TYPE_REDO
;
8892 if (sym
== symNext
) return CATCH_TYPE_NEXT
;
8893 rb_raise(rb_eSyntaxError
, "invalid exception symbol: %+"PRIsVALUE
, sym
);
8898 iseq_build_from_ary_exception(rb_iseq_t
*iseq
, struct st_table
*labels_table
,
8903 for (i
=0; i
<RARRAY_LEN(exception
); i
++) {
8904 const rb_iseq_t
*eiseq
;
8906 LABEL
*lstart
, *lend
, *lcont
;
8909 v
= rb_to_array_type(RARRAY_AREF(exception
, i
));
8910 if (RARRAY_LEN(v
) != 6) {
8911 rb_raise(rb_eSyntaxError
, "wrong exception entry");
8913 type
= get_exception_sym2type(RARRAY_AREF(v
, 0));
8914 if (RARRAY_AREF(v
, 1) == Qnil
) {
8918 eiseq
= rb_iseqw_to_iseq(rb_iseq_load(RARRAY_AREF(v
, 1), (VALUE
)iseq
, Qnil
));
8921 lstart
= register_label(iseq
, labels_table
, RARRAY_AREF(v
, 2));
8922 lend
= register_label(iseq
, labels_table
, RARRAY_AREF(v
, 3));
8923 lcont
= register_label(iseq
, labels_table
, RARRAY_AREF(v
, 4));
8924 sp
= NUM2UINT(RARRAY_AREF(v
, 5));
8926 /* TODO: Dirty Hack! Fix me */
8927 if (type
== CATCH_TYPE_RESCUE
||
8928 type
== CATCH_TYPE_BREAK
||
8929 type
== CATCH_TYPE_NEXT
) {
8935 ADD_CATCH_ENTRY(type
, lstart
, lend
, eiseq
, lcont
);
8942 static struct st_table
*
8943 insn_make_insn_table(void)
8945 struct st_table
*table
;
8947 table
= st_init_numtable_with_size(VM_INSTRUCTION_SIZE
);
8949 for (i
=0; i
<VM_INSTRUCTION_SIZE
; i
++) {
8950 st_insert(table
, ID2SYM(rb_intern(insn_name(i
))), i
);
8956 static const rb_iseq_t
*
8957 iseq_build_load_iseq(const rb_iseq_t
*iseq
, VALUE op
)
8960 const rb_iseq_t
*loaded_iseq
;
8962 if (RB_TYPE_P(op
, T_ARRAY
)) {
8963 iseqw
= rb_iseq_load(op
, (VALUE
)iseq
, Qnil
);
8965 else if (CLASS_OF(op
) == rb_cISeq
) {
8969 rb_raise(rb_eSyntaxError
, "ISEQ is required");
8972 loaded_iseq
= rb_iseqw_to_iseq(iseqw
);
8977 iseq_build_callinfo_from_hash(rb_iseq_t
*iseq
, VALUE op
)
8981 unsigned int flag
= 0;
8982 struct rb_callinfo_kwarg
*kw_arg
= 0;
8985 VALUE vmid
= rb_hash_aref(op
, ID2SYM(rb_intern("mid")));
8986 VALUE vflag
= rb_hash_aref(op
, ID2SYM(rb_intern("flag")));
8987 VALUE vorig_argc
= rb_hash_aref(op
, ID2SYM(rb_intern("orig_argc")));
8988 VALUE vkw_arg
= rb_hash_aref(op
, ID2SYM(rb_intern("kw_arg")));
8990 if (!NIL_P(vmid
)) mid
= SYM2ID(vmid
);
8991 if (!NIL_P(vflag
)) flag
= NUM2UINT(vflag
);
8992 if (!NIL_P(vorig_argc
)) orig_argc
= FIX2INT(vorig_argc
);
8994 if (!NIL_P(vkw_arg
)) {
8996 int len
= RARRAY_LENINT(vkw_arg
);
8997 size_t n
= rb_callinfo_kwarg_bytes(len
);
8999 kw_arg
= xmalloc(n
);
9000 kw_arg
->keyword_len
= len
;
9001 for (i
= 0; i
< len
; i
++) {
9002 VALUE kw
= RARRAY_AREF(vkw_arg
, i
);
9003 SYM2ID(kw
); /* make immortal */
9004 kw_arg
->keywords
[i
] = kw
;
9009 const struct rb_callinfo
*ci
= new_callinfo(iseq
, mid
, orig_argc
, flag
, kw_arg
, (flag
& VM_CALL_ARGS_SIMPLE
) == 0);
9010 RB_OBJ_WRITTEN(iseq
, Qundef
, ci
);
9014 static rb_event_flag_t
9015 event_name_to_flag(VALUE sym
)
9017 #define CHECK_EVENT(ev) if (sym == ID2SYM(rb_intern(#ev))) return ev;
9018 CHECK_EVENT(RUBY_EVENT_LINE
);
9019 CHECK_EVENT(RUBY_EVENT_CLASS
);
9020 CHECK_EVENT(RUBY_EVENT_END
);
9021 CHECK_EVENT(RUBY_EVENT_CALL
);
9022 CHECK_EVENT(RUBY_EVENT_RETURN
);
9023 CHECK_EVENT(RUBY_EVENT_B_CALL
);
9024 CHECK_EVENT(RUBY_EVENT_B_RETURN
);
9026 return RUBY_EVENT_NONE
;
9030 iseq_build_from_ary_body(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
,
9031 VALUE body
, VALUE labels_wrapper
)
9033 /* TODO: body should be frozen */
9034 long i
, len
= RARRAY_LEN(body
);
9035 struct st_table
*labels_table
= DATA_PTR(labels_wrapper
);
9038 int ret
= COMPILE_OK
;
9041 * index -> LABEL *label
9043 static struct st_table
*insn_table
;
9045 if (insn_table
== 0) {
9046 insn_table
= insn_make_insn_table();
9049 for (i
=0; i
<len
; i
++) {
9050 VALUE obj
= RARRAY_AREF(body
, i
);
9052 if (SYMBOL_P(obj
)) {
9053 rb_event_flag_t event
;
9054 if ((event
= event_name_to_flag(obj
)) != RUBY_EVENT_NONE
) {
9055 ADD_TRACE(anchor
, event
);
9058 LABEL
*label
= register_label(iseq
, labels_table
, obj
);
9059 ADD_LABEL(anchor
, label
);
9062 else if (FIXNUM_P(obj
)) {
9063 line_no
= NUM2INT(obj
);
9065 else if (RB_TYPE_P(obj
, T_ARRAY
)) {
9067 int argc
= RARRAY_LENINT(obj
) - 1;
9071 insn
= (argc
< 0) ? Qnil
: RARRAY_AREF(obj
, 0);
9072 if (st_lookup(insn_table
, (st_data_t
)insn
, &insn_id
) == 0) {
9073 /* TODO: exception */
9074 COMPILE_ERROR(iseq
, line_no
,
9075 "unknown instruction: %+"PRIsVALUE
, insn
);
9080 if (argc
!= insn_len((VALUE
)insn_id
)-1) {
9081 COMPILE_ERROR(iseq
, line_no
,
9082 "operand size mismatch");
9088 argv
= compile_data_calloc2(iseq
, sizeof(VALUE
), argc
);
9090 // add element before operand setup to make GC root
9092 (LINK_ELEMENT
*)new_insn_core(iseq
, line_no
,
9093 (enum ruby_vminsn_type
)insn_id
, argc
, argv
));
9095 for (j
=0; j
<argc
; j
++) {
9096 VALUE op
= rb_ary_entry(obj
, j
+1);
9097 switch (insn_op_type((VALUE
)insn_id
, j
)) {
9099 LABEL
*label
= register_label(iseq
, labels_table
, op
);
9100 argv
[j
] = (VALUE
)label
;
9110 RB_OBJ_WRITTEN(iseq
, Qundef
, op
);
9115 VALUE v
= (VALUE
)iseq_build_load_iseq(iseq
, op
);
9117 RB_OBJ_WRITTEN(iseq
, Qundef
, v
);
9125 op
= rb_to_symbol_type(op
);
9126 argv
[j
] = (VALUE
)rb_global_entry(SYM2ID(op
));
9129 FL_SET((VALUE
)iseq
, ISEQ_MARKABLE_ISEQ
);
9132 case TS_IVC
: /* inline ivar cache */
9134 if (NUM2UINT(op
) >= iseq
->body
->is_size
) {
9135 iseq
->body
->is_size
= NUM2INT(op
) + 1;
9139 argv
[j
] = iseq_build_callinfo_from_hash(iseq
, op
);
9142 argv
[j
] = rb_to_symbol_type(op
);
9147 VALUE map
= rb_hash_new_with_size(RARRAY_LEN(op
)/2);
9149 RHASH_TBL_RAW(map
)->type
= &cdhash_type
;
9150 op
= rb_to_array_type(op
);
9151 for (i
=0; i
<RARRAY_LEN(op
); i
+=2) {
9152 VALUE key
= RARRAY_AREF(op
, i
);
9153 VALUE sym
= RARRAY_AREF(op
, i
+1);
9155 register_label(iseq
, labels_table
, sym
);
9156 rb_hash_aset(map
, key
, (VALUE
)label
| 1);
9160 RB_OBJ_WRITTEN(iseq
, Qundef
, map
);
9165 #if SIZEOF_VALUE <= SIZEOF_LONG
9166 long funcptr
= NUM2LONG(op
);
9168 LONG_LONG funcptr
= NUM2LL(op
);
9170 argv
[j
] = (VALUE
)funcptr
;
9174 rb_raise(rb_eSyntaxError
, "unknown operand: %c", insn_op_type((VALUE
)insn_id
, j
));
9180 (LINK_ELEMENT
*)new_insn_core(iseq
, line_no
,
9181 (enum ruby_vminsn_type
)insn_id
, argc
, NULL
));
9185 rb_raise(rb_eTypeError
, "unexpected object for instruction");
9188 DATA_PTR(labels_wrapper
) = 0;
9189 validate_labels(iseq
, labels_table
);
9190 if (!ret
) return ret
;
9191 return iseq_setup(iseq
, anchor
);
9194 #define CHECK_ARRAY(v) rb_to_array_type(v)
9195 #define CHECK_SYMBOL(v) rb_to_symbol_type(v)
9198 int_param(int *dst
, VALUE param
, VALUE sym
)
9200 VALUE val
= rb_hash_aref(param
, sym
);
9201 if (FIXNUM_P(val
)) {
9202 *dst
= FIX2INT(val
);
9205 else if (!NIL_P(val
)) {
9206 rb_raise(rb_eTypeError
, "invalid %+"PRIsVALUE
" Fixnum: %+"PRIsVALUE
,
9212 static const struct rb_iseq_param_keyword
*
9213 iseq_build_kw(rb_iseq_t
*iseq
, VALUE params
, VALUE keywords
)
9216 int len
= RARRAY_LENINT(keywords
);
9218 VALUE key
, sym
, default_val
;
9221 struct rb_iseq_param_keyword
*keyword
= ZALLOC(struct rb_iseq_param_keyword
);
9223 iseq
->body
->param
.flags
.has_kw
= TRUE
;
9226 #define SYM(s) ID2SYM(rb_intern(#s))
9227 (void)int_param(&keyword
->bits_start
, params
, SYM(kwbits
));
9228 i
= keyword
->bits_start
- keyword
->num
;
9229 ids
= (ID
*)&iseq
->body
->local_table
[i
];
9233 for (i
= 0; i
< len
; i
++) {
9234 VALUE val
= RARRAY_AREF(keywords
, i
);
9236 if (!SYMBOL_P(val
)) {
9237 goto default_values
;
9239 ids
[i
] = SYM2ID(val
);
9240 keyword
->required_num
++;
9243 default_values
: /* note: we intentionally preserve `i' from previous loop */
9244 default_len
= len
- i
;
9245 if (default_len
== 0) {
9246 keyword
->table
= ids
;
9249 else if (default_len
< 0) {
9253 dvs
= ALLOC_N(VALUE
, (unsigned int)default_len
);
9255 for (j
= 0; i
< len
; i
++, j
++) {
9256 key
= RARRAY_AREF(keywords
, i
);
9259 switch (RARRAY_LEN(key
)) {
9261 sym
= RARRAY_AREF(key
, 0);
9262 default_val
= Qundef
;
9265 sym
= RARRAY_AREF(key
, 0);
9266 default_val
= RARRAY_AREF(key
, 1);
9269 rb_raise(rb_eTypeError
, "keyword default has unsupported len %+"PRIsVALUE
, key
);
9271 ids
[i
] = SYM2ID(sym
);
9272 dvs
[j
] = default_val
;
9275 keyword
->table
= ids
;
9276 keyword
->default_values
= dvs
;
9282 rb_iseq_mark_insn_storage(struct iseq_compile_data_storage
*storage
)
9285 size_t size
= sizeof(INSN
);
9286 unsigned int pos
= 0;
9289 #ifdef STRICT_ALIGNMENT
9290 size_t padding
= calc_padding((void *)&storage
->buff
[pos
], size
);
9292 const size_t padding
= 0; /* expected to be optimized by compiler */
9293 #endif /* STRICT_ALIGNMENT */
9294 size_t offset
= pos
+ size
+ padding
;
9295 if (offset
> storage
->size
|| offset
> storage
->pos
) {
9297 storage
= storage
->next
;
9300 #ifdef STRICT_ALIGNMENT
9301 pos
+= (int)padding
;
9302 #endif /* STRICT_ALIGNMENT */
9304 iobj
= (INSN
*)&storage
->buff
[pos
];
9306 if (iobj
->operands
) {
9308 const char *types
= insn_op_types(iobj
->insn_id
);
9310 for (j
= 0; types
[j
]; j
++) {
9311 char type
= types
[j
];
9316 case TS_CALLDATA
: // ci is stored.
9318 VALUE op
= OPERAND_AT(iobj
, j
);
9320 if (!SPECIAL_CONST_P(op
)) {
9336 rb_iseq_build_from_ary(rb_iseq_t
*iseq
, VALUE misc
, VALUE locals
, VALUE params
,
9337 VALUE exception
, VALUE body
)
9339 #define SYM(s) ID2SYM(rb_intern(#s))
9341 unsigned int arg_size
, local_size
, stack_max
;
9343 struct st_table
*labels_table
= st_init_numtable();
9344 VALUE labels_wrapper
= Data_Wrap_Struct(0, rb_mark_set
, st_free_table
, labels_table
);
9345 VALUE arg_opt_labels
= rb_hash_aref(params
, SYM(opt
));
9346 VALUE keywords
= rb_hash_aref(params
, SYM(keyword
));
9347 VALUE sym_arg_rest
= ID2SYM(rb_intern("#arg_rest"));
9348 DECL_ANCHOR(anchor
);
9349 INIT_ANCHOR(anchor
);
9351 len
= RARRAY_LENINT(locals
);
9352 iseq
->body
->local_table_size
= len
;
9353 iseq
->body
->local_table
= tbl
= len
> 0 ? (ID
*)ALLOC_N(ID
, iseq
->body
->local_table_size
) : NULL
;
9355 for (i
= 0; i
< len
; i
++) {
9356 VALUE lv
= RARRAY_AREF(locals
, i
);
9358 if (sym_arg_rest
== lv
) {
9362 tbl
[i
] = FIXNUM_P(lv
) ? (ID
)FIX2LONG(lv
) : SYM2ID(CHECK_SYMBOL(lv
));
9366 #define INT_PARAM(F) int_param(&iseq->body->param.F, params, SYM(F))
9367 if (INT_PARAM(lead_num
)) {
9368 iseq
->body
->param
.flags
.has_lead
= TRUE
;
9370 if (INT_PARAM(post_num
)) iseq
->body
->param
.flags
.has_post
= TRUE
;
9371 if (INT_PARAM(post_start
)) iseq
->body
->param
.flags
.has_post
= TRUE
;
9372 if (INT_PARAM(rest_start
)) iseq
->body
->param
.flags
.has_rest
= TRUE
;
9373 if (INT_PARAM(block_start
)) iseq
->body
->param
.flags
.has_block
= TRUE
;
9376 #define INT_PARAM(F) F = (int_param(&x, misc, SYM(F)) ? (unsigned int)x : 0)
9378 INT_PARAM(arg_size
);
9379 INT_PARAM(local_size
);
9380 INT_PARAM(stack_max
);
9384 if (RB_TYPE_P(arg_opt_labels
, T_ARRAY
)) {
9385 len
= RARRAY_LENINT(arg_opt_labels
);
9386 iseq
->body
->param
.flags
.has_opt
= !!(len
- 1 >= 0);
9388 if (iseq
->body
->param
.flags
.has_opt
) {
9389 VALUE
*opt_table
= ALLOC_N(VALUE
, len
);
9391 for (i
= 0; i
< len
; i
++) {
9392 VALUE ent
= RARRAY_AREF(arg_opt_labels
, i
);
9393 LABEL
*label
= register_label(iseq
, labels_table
, ent
);
9394 opt_table
[i
] = (VALUE
)label
;
9397 iseq
->body
->param
.opt_num
= len
- 1;
9398 iseq
->body
->param
.opt_table
= opt_table
;
9401 else if (!NIL_P(arg_opt_labels
)) {
9402 rb_raise(rb_eTypeError
, ":opt param is not an array: %+"PRIsVALUE
,
9406 if (RB_TYPE_P(keywords
, T_ARRAY
)) {
9407 iseq
->body
->param
.keyword
= iseq_build_kw(iseq
, params
, keywords
);
9409 else if (!NIL_P(keywords
)) {
9410 rb_raise(rb_eTypeError
, ":keywords param is not an array: %+"PRIsVALUE
,
9414 if (Qtrue
== rb_hash_aref(params
, SYM(ambiguous_param0
))) {
9415 iseq
->body
->param
.flags
.ambiguous_param0
= TRUE
;
9418 if (int_param(&i
, params
, SYM(kwrest
))) {
9419 struct rb_iseq_param_keyword
*keyword
= (struct rb_iseq_param_keyword
*)iseq
->body
->param
.keyword
;
9420 if (keyword
== NULL
) {
9421 iseq
->body
->param
.keyword
= keyword
= ZALLOC(struct rb_iseq_param_keyword
);
9423 keyword
->rest_start
= i
;
9424 iseq
->body
->param
.flags
.has_kwrest
= TRUE
;
9427 iseq_calc_param_size(iseq
);
9430 iseq_build_from_ary_exception(iseq
, labels_table
, exception
);
9433 iseq_build_from_ary_body(iseq
, anchor
, body
, labels_wrapper
);
9435 iseq
->body
->param
.size
= arg_size
;
9436 iseq
->body
->local_table_size
= local_size
;
9437 iseq
->body
->stack_max
= stack_max
;
9443 rb_dvar_defined(ID id
, const rb_iseq_t
*iseq
)
9446 const struct rb_iseq_constant_body
*body
= iseq
->body
;
9447 while (body
->type
== ISEQ_TYPE_BLOCK
||
9448 body
->type
== ISEQ_TYPE_RESCUE
||
9449 body
->type
== ISEQ_TYPE_ENSURE
||
9450 body
->type
== ISEQ_TYPE_EVAL
||
9451 body
->type
== ISEQ_TYPE_MAIN
9455 for (i
= 0; i
< body
->local_table_size
; i
++) {
9456 if (body
->local_table
[i
] == id
) {
9460 iseq
= body
->parent_iseq
;
9468 rb_local_defined(ID id
, const rb_iseq_t
*iseq
)
9472 const struct rb_iseq_constant_body
*const body
= iseq
->body
->local_iseq
->body
;
9474 for (i
=0; i
<body
->local_table_size
; i
++) {
9475 if (body
->local_table
[i
] == id
) {
9484 caller_location(VALUE
*path
, VALUE
*realpath
)
9486 const rb_execution_context_t
*ec
= GET_EC();
9487 const rb_control_frame_t
*const cfp
=
9488 rb_vm_get_ruby_level_next_cfp(ec
, ec
->cfp
);
9491 int line
= rb_vm_get_sourceline(cfp
);
9492 *path
= rb_iseq_path(cfp
->iseq
);
9493 *realpath
= rb_iseq_realpath(cfp
->iseq
);
9497 *path
= rb_fstring_lit("<compiled>");
9505 rb_insn_func_t func
;
9509 static const rb_iseq_t
*
9510 method_for_self(VALUE name
, VALUE arg
, rb_insn_func_t func
,
9511 void (*build
)(rb_iseq_t
*, LINK_ANCHOR
*, const void *))
9513 VALUE path
, realpath
;
9518 acc
.line
= caller_location(&path
, &realpath
);
9519 struct rb_iseq_new_with_callback_callback_func
*ifunc
=
9520 rb_iseq_new_with_callback_new_callback(build
, &acc
);
9521 return rb_iseq_new_with_callback(ifunc
,
9522 rb_sym2str(name
), path
, realpath
,
9523 INT2FIX(acc
.line
), 0, ISEQ_TYPE_METHOD
, 0);
9527 for_self_aref(rb_iseq_t
*iseq
, LINK_ANCHOR
*ret
, const void *a
)
9529 const accessor_args
*const args
= (void *)a
;
9530 const int line
= args
->line
;
9531 struct rb_iseq_constant_body
*const body
= iseq
->body
;
9533 iseq_set_local_table(iseq
, 0);
9534 body
->param
.lead_num
= 0;
9535 body
->param
.size
= 0;
9537 ADD_INSN1(ret
, line
, putobject
, args
->arg
);
9538 ADD_INSN1(ret
, line
, opt_call_c_function
, (VALUE
)args
->func
);
9542 for_self_aset(rb_iseq_t
*iseq
, LINK_ANCHOR
*ret
, const void *a
)
9544 const accessor_args
*const args
= (void *)a
;
9545 const int line
= args
->line
;
9546 struct rb_iseq_constant_body
*const body
= iseq
->body
;
9547 static const ID vars
[] = {1, idUScore
};
9549 iseq_set_local_table(iseq
, vars
);
9550 body
->param
.lead_num
= 1;
9551 body
->param
.size
= 1;
9553 ADD_GETLOCAL(ret
, line
, numberof(vars
)-1, 0);
9554 ADD_INSN1(ret
, line
, putobject
, args
->arg
);
9555 ADD_INSN1(ret
, line
, opt_call_c_function
, (VALUE
)args
->func
);
9556 ADD_INSN(ret
, line
, pop
);
9560 * func (index) -> (value)
9563 rb_method_for_self_aref(VALUE name
, VALUE arg
, rb_insn_func_t func
)
9565 return method_for_self(name
, arg
, func
, for_self_aref
);
9569 * func (index, value) -> (index, value)
9572 rb_method_for_self_aset(VALUE name
, VALUE arg
, rb_insn_func_t func
)
9574 return method_for_self(name
, arg
, func
, for_self_aset
);
9577 /* ISeq binary format */
9579 #ifndef IBF_ISEQ_DEBUG
9580 #define IBF_ISEQ_DEBUG 0
9583 #ifndef IBF_ISEQ_ENABLE_LOCAL_BUFFER
9584 #define IBF_ISEQ_ENABLE_LOCAL_BUFFER 0
9587 typedef unsigned int ibf_offset_t
;
9588 #define IBF_OFFSET(ptr) ((ibf_offset_t)(VALUE)(ptr))
9590 #define IBF_MAJOR_VERSION ISEQ_MAJOR_VERSION
9592 #define IBF_DEVEL_VERSION 2
9593 #define IBF_MINOR_VERSION (ISEQ_MINOR_VERSION * 10000 + IBF_DEVEL_VERSION)
9595 #define IBF_MINOR_VERSION ISEQ_MINOR_VERSION
9599 char magic
[4]; /* YARB */
9600 unsigned int major_version
;
9601 unsigned int minor_version
;
9603 unsigned int extra_size
;
9605 unsigned int iseq_list_size
;
9606 unsigned int global_object_list_size
;
9607 ibf_offset_t iseq_list_offset
;
9608 ibf_offset_t global_object_list_offset
;
9611 struct ibf_dump_buffer
{
9613 st_table
*obj_table
; /* obj -> obj number */
9617 st_table
*iseq_table
; /* iseq -> iseq number */
9618 struct ibf_dump_buffer global_buffer
;
9619 struct ibf_dump_buffer
*current_buffer
;
9622 rb_iseq_t
* iseq_alloc(void);
9624 struct ibf_load_buffer
{
9628 VALUE obj_list
; /* [obj0, ...] */
9629 unsigned int obj_list_size
;
9630 ibf_offset_t obj_list_offset
;
9634 const struct ibf_header
*header
;
9635 VALUE iseq_list
; /* [iseq0, ...] */
9636 struct ibf_load_buffer global_buffer
;
9640 struct ibf_load_buffer
*current_buffer
;
9644 ibf_dump_pos(struct ibf_dump
*dump
)
9646 long pos
= RSTRING_LEN(dump
->current_buffer
->str
);
9647 #if SIZEOF_LONG > SIZEOF_INT
9648 if (pos
>= UINT_MAX
) {
9649 rb_raise(rb_eRuntimeError
, "dump size exceeds");
9652 return (unsigned int)pos
;
9656 ibf_dump_align(struct ibf_dump
*dump
, size_t align
)
9658 ibf_offset_t pos
= ibf_dump_pos(dump
);
9660 static const char padding
[sizeof(VALUE
)];
9661 size_t size
= align
- ((size_t)pos
% align
);
9662 #if SIZEOF_LONG > SIZEOF_INT
9663 if (pos
+ size
>= UINT_MAX
) {
9664 rb_raise(rb_eRuntimeError
, "dump size exceeds");
9667 for (; size
> sizeof(padding
); size
-= sizeof(padding
)) {
9668 rb_str_cat(dump
->current_buffer
->str
, padding
, sizeof(padding
));
9670 rb_str_cat(dump
->current_buffer
->str
, padding
, size
);
9675 ibf_dump_write(struct ibf_dump
*dump
, const void *buff
, unsigned long size
)
9677 ibf_offset_t pos
= ibf_dump_pos(dump
);
9678 rb_str_cat(dump
->current_buffer
->str
, (const char *)buff
, size
);
9679 /* TODO: overflow check */
9684 ibf_dump_write_byte(struct ibf_dump
*dump
, unsigned char byte
)
9686 return ibf_dump_write(dump
, &byte
, sizeof(unsigned char));
9690 ibf_dump_overwrite(struct ibf_dump
*dump
, void *buff
, unsigned int size
, long offset
)
9692 VALUE str
= dump
->current_buffer
->str
;
9693 char *ptr
= RSTRING_PTR(str
);
9694 if ((unsigned long)(size
+ offset
) > (unsigned long)RSTRING_LEN(str
))
9695 rb_bug("ibf_dump_overwrite: overflow");
9696 memcpy(ptr
+ offset
, buff
, size
);
9700 ibf_load_ptr(const struct ibf_load
*load
, ibf_offset_t
*offset
, int size
)
9702 ibf_offset_t beg
= *offset
;
9704 return load
->current_buffer
->buff
+ beg
;
9708 ibf_load_alloc(const struct ibf_load
*load
, ibf_offset_t offset
, size_t x
, size_t y
)
9710 void *buff
= ruby_xmalloc2(x
, y
);
9711 size_t size
= x
* y
;
9712 memcpy(buff
, load
->current_buffer
->buff
+ offset
, size
);
9716 #define IBF_W_ALIGN(type) (RUBY_ALIGNOF(type) > 1 ? ibf_dump_align(dump, RUBY_ALIGNOF(type)) : (void)0)
9718 #define IBF_W(b, type, n) (IBF_W_ALIGN(type), (type *)(VALUE)IBF_WP(b, type, n))
9719 #define IBF_WV(variable) ibf_dump_write(dump, &(variable), sizeof(variable))
9720 #define IBF_WP(b, type, n) ibf_dump_write(dump, (b), sizeof(type) * (n))
9721 #define IBF_R(val, type, n) (type *)ibf_load_alloc(load, IBF_OFFSET(val), sizeof(type), (n))
9722 #define IBF_ZERO(variable) memset(&(variable), 0, sizeof(variable))
9725 ibf_table_lookup(struct st_table
*table
, st_data_t key
)
9729 if (st_lookup(table
, key
, &val
)) {
9738 ibf_table_find_or_insert(struct st_table
*table
, st_data_t key
)
9740 int index
= ibf_table_lookup(table
, key
);
9742 if (index
< 0) { /* not found */
9743 index
= (int)table
->num_entries
;
9744 st_insert(table
, key
, (st_data_t
)index
);
9750 /* dump/load generic */
9752 static void ibf_dump_object_list(struct ibf_dump
*dump
, ibf_offset_t
*obj_list_offset
, unsigned int *obj_list_size
);
9754 static VALUE
ibf_load_object(const struct ibf_load
*load
, VALUE object_index
);
9755 static rb_iseq_t
*ibf_load_iseq(const struct ibf_load
*load
, const rb_iseq_t
*index_iseq
);
9758 ibf_dump_object_table_new(void)
9760 st_table
*obj_table
= st_init_numtable(); /* need free */
9761 st_insert(obj_table
, (st_data_t
)Qnil
, (st_data_t
)0); /* 0th is nil */
9767 ibf_dump_object(struct ibf_dump
*dump
, VALUE obj
)
9769 return ibf_table_find_or_insert(dump
->current_buffer
->obj_table
, (st_data_t
)obj
);
9773 ibf_dump_id(struct ibf_dump
*dump
, ID id
)
9775 if (id
== 0 || rb_id2name(id
) == NULL
) {
9778 return ibf_dump_object(dump
, rb_id2sym(id
));
9782 ibf_load_id(const struct ibf_load
*load
, const ID id_index
)
9784 if (id_index
== 0) {
9787 VALUE sym
= ibf_load_object(load
, id_index
);
9788 return rb_sym2id(sym
);
9791 /* dump/load: code */
9793 static ibf_offset_t
ibf_dump_iseq_each(struct ibf_dump
*dump
, const rb_iseq_t
*iseq
);
9796 ibf_dump_iseq(struct ibf_dump
*dump
, const rb_iseq_t
*iseq
)
9802 return ibf_table_find_or_insert(dump
->iseq_table
, (st_data_t
)iseq
);
9807 ibf_dump_gentry(struct ibf_dump
*dump
, const struct rb_global_entry
*entry
)
9809 return (VALUE
)ibf_dump_id(dump
, entry
->id
);
9813 ibf_load_gentry(const struct ibf_load
*load
, const struct rb_global_entry
*entry
)
9815 ID gid
= ibf_load_id(load
, (ID
)(VALUE
)entry
);
9816 return (VALUE
)rb_global_entry(gid
);
9819 static unsigned char
9820 ibf_load_byte(const struct ibf_load
*load
, ibf_offset_t
*offset
)
9822 if (*offset
>= load
->current_buffer
->size
) { rb_raise(rb_eRuntimeError
, "invalid bytecode"); }
9823 return (unsigned char)load
->current_buffer
->buff
[(*offset
)++];
9827 * Small uint serialization
9828 * 0x00000000_00000000 - 0x00000000_0000007f: 1byte | XXXX XXX1 |
9829 * 0x00000000_00000080 - 0x00000000_00003fff: 2byte | XXXX XX10 | XXXX XXXX |
9830 * 0x00000000_00004000 - 0x00000000_001fffff: 3byte | XXXX X100 | XXXX XXXX | XXXX XXXX |
9831 * 0x00000000_00020000 - 0x00000000_0fffffff: 4byte | XXXX 1000 | XXXX XXXX | XXXX XXXX | XXXX XXXX |
9833 * 0x00010000_00000000 - 0x00ffffff_ffffffff: 8byte | 1000 0000 | XXXX XXXX | XXXX XXXX | XXXX XXXX | XXXX XXXX | XXXX XXXX | XXXX XXXX | XXXX XXXX |
9834 * 0x01000000_00000000 - 0xffffffff_ffffffff: 9byte | 0000 0000 | XXXX XXXX | XXXX XXXX | XXXX XXXX | XXXX XXXX | XXXX XXXX | XXXX XXXX | XXXX XXXX | XXXX XXXX |
9837 ibf_dump_write_small_value(struct ibf_dump
*dump
, VALUE x
)
9839 if (sizeof(VALUE
) > 8 || CHAR_BIT
!= 8) {
9840 ibf_dump_write(dump
, &x
, sizeof(VALUE
));
9844 enum { max_byte_length
= sizeof(VALUE
) + 1 };
9846 unsigned char bytes
[max_byte_length
];
9849 for (n
= 0; n
< sizeof(VALUE
) && (x
>> (7 - n
)); n
++, x
>>= 8) {
9850 bytes
[max_byte_length
- 1 - n
] = (unsigned char)x
;
9856 bytes
[max_byte_length
- 1 - n
] = (unsigned char)x
;
9859 ibf_dump_write(dump
, bytes
+ max_byte_length
- n
, n
);
9863 ibf_load_small_value(const struct ibf_load
*load
, ibf_offset_t
*offset
)
9865 if (sizeof(VALUE
) > 8 || CHAR_BIT
!= 8) {
9866 union { char s
[sizeof(VALUE
)]; VALUE v
; } x
;
9868 memcpy(x
.s
, load
->current_buffer
->buff
+ *offset
, sizeof(VALUE
));
9869 *offset
+= sizeof(VALUE
);
9874 enum { max_byte_length
= sizeof(VALUE
) + 1 };
9876 const unsigned char *buffer
= (const unsigned char *)load
->current_buffer
->buff
;
9877 const unsigned char c
= buffer
[*offset
];
9881 c
== 0 ? 9 : ntz_int32(c
) + 1;
9882 VALUE x
= (VALUE
)c
>> n
;
9884 if (*offset
+ n
> load
->current_buffer
->size
) {
9885 rb_raise(rb_eRuntimeError
, "invalid byte sequence");
9889 for (i
= 1; i
< n
; i
++) {
9891 x
|= (VALUE
)buffer
[*offset
+ i
];
9899 ibf_dump_builtin(struct ibf_dump
*dump
, const struct rb_builtin_function
*bf
)
9902 // short: name.length
9904 // // omit argc (only verify with name)
9905 ibf_dump_write_small_value(dump
, (VALUE
)bf
->index
);
9907 size_t len
= strlen(bf
->name
);
9908 ibf_dump_write_small_value(dump
, (VALUE
)len
);
9909 ibf_dump_write(dump
, bf
->name
, len
);
9912 static const struct rb_builtin_function
*
9913 ibf_load_builtin(const struct ibf_load
*load
, ibf_offset_t
*offset
)
9915 int i
= (int)ibf_load_small_value(load
, offset
);
9916 int len
= (int)ibf_load_small_value(load
, offset
);
9917 const char *name
= (char *)ibf_load_ptr(load
, offset
, len
);
9920 for (int i
=0; i
<len
; i
++) fprintf(stderr
, "%c", name
[i
]);
9921 fprintf(stderr
, "!!\n");
9924 const struct rb_builtin_function
*table
= GET_VM()->builtin_function_table
;
9925 if (table
== NULL
) rb_bug("%s: table is not provided.", RUBY_FUNCTION_NAME_STRING
);
9926 if (strncmp(table
[i
].name
, name
, len
) != 0) {
9927 rb_bug("%s: index (%d) mismatch (expect %s but %s).", RUBY_FUNCTION_NAME_STRING
, i
, name
, table
[i
].name
);
9929 // fprintf(stderr, "load-builtin: name:%s(%d)\n", table[i].name, table[i].argc);
9935 ibf_dump_code(struct ibf_dump
*dump
, const rb_iseq_t
*iseq
)
9937 const struct rb_iseq_constant_body
*const body
= iseq
->body
;
9938 const int iseq_size
= body
->iseq_size
;
9940 const VALUE
*orig_code
= rb_iseq_original_iseq(iseq
);
9942 ibf_offset_t offset
= ibf_dump_pos(dump
);
9944 for (code_index
=0; code_index
<iseq_size
;) {
9945 const VALUE insn
= orig_code
[code_index
++];
9946 const char *types
= insn_op_types(insn
);
9950 if (insn
>= 0x100) { rb_raise(rb_eRuntimeError
, "invalid instruction"); }
9951 ibf_dump_write_small_value(dump
, insn
);
9954 for (op_index
=0; types
[op_index
]; op_index
++, code_index
++) {
9955 VALUE op
= orig_code
[code_index
];
9958 switch (types
[op_index
]) {
9961 wv
= ibf_dump_object(dump
, op
);
9964 wv
= (VALUE
)ibf_dump_iseq(dump
, (const rb_iseq_t
*)op
);
9971 for (i
=0; i
<body
->is_size
; i
++) {
9972 if (op
== (VALUE
)&body
->is_entries
[i
]) {
9984 wv
= ibf_dump_id(dump
, (ID
)op
);
9987 wv
= ibf_dump_gentry(dump
, (const struct rb_global_entry
*)op
);
9990 rb_raise(rb_eRuntimeError
, "TS_FUNCPTR is not supported");
9993 ibf_dump_builtin(dump
, (const struct rb_builtin_function
*)op
);
9999 ibf_dump_write_small_value(dump
, wv
);
10002 assert(insn_len(insn
) == op_index
+1);
10009 ibf_load_code(const struct ibf_load
*load
, rb_iseq_t
*iseq
, ibf_offset_t bytecode_offset
, ibf_offset_t bytecode_size
, unsigned int iseq_size
)
10011 VALUE iseqv
= (VALUE
)iseq
;
10012 unsigned int code_index
;
10013 ibf_offset_t reading_pos
= bytecode_offset
;
10014 VALUE
*code
= ALLOC_N(VALUE
, iseq_size
);
10016 struct rb_iseq_constant_body
*load_body
= iseq
->body
;
10017 struct rb_call_data
*cd_entries
= load_body
->call_data
;
10018 union iseq_inline_storage_entry
*is_entries
= load_body
->is_entries
;
10020 for (code_index
=0; code_index
<iseq_size
;) {
10022 const VALUE insn
= code
[code_index
++] = ibf_load_small_value(load
, &reading_pos
);
10023 const char *types
= insn_op_types(insn
);
10027 for (op_index
=0; types
[op_index
]; op_index
++, code_index
++) {
10028 switch (types
[op_index
]) {
10032 VALUE op
= ibf_load_small_value(load
, &reading_pos
);
10033 VALUE v
= ibf_load_object(load
, op
);
10034 code
[code_index
] = v
;
10035 if (!SPECIAL_CONST_P(v
)) {
10036 RB_OBJ_WRITTEN(iseqv
, Qundef
, v
);
10037 FL_SET(iseqv
, ISEQ_MARKABLE_ISEQ
);
10043 VALUE op
= (VALUE
)ibf_load_small_value(load
, &reading_pos
);
10044 VALUE v
= (VALUE
)ibf_load_iseq(load
, (const rb_iseq_t
*)op
);
10045 code
[code_index
] = v
;
10046 if (!SPECIAL_CONST_P(v
)) {
10047 RB_OBJ_WRITTEN(iseqv
, Qundef
, v
);
10048 FL_SET(iseqv
, ISEQ_MARKABLE_ISEQ
);
10053 FL_SET(iseqv
, ISEQ_MARKABLE_ISEQ
);
10058 VALUE op
= ibf_load_small_value(load
, &reading_pos
);
10059 code
[code_index
] = (VALUE
)&is_entries
[op
];
10064 code
[code_index
] = (VALUE
)cd_entries
++;
10069 VALUE op
= ibf_load_small_value(load
, &reading_pos
);
10070 code
[code_index
] = ibf_load_id(load
, (ID
)(VALUE
)op
);
10075 VALUE op
= ibf_load_small_value(load
, &reading_pos
);
10076 code
[code_index
] = ibf_load_gentry(load
, (const struct rb_global_entry
*)(VALUE
)op
);
10080 rb_raise(rb_eRuntimeError
, "TS_FUNCPTR is not supported");
10083 code
[code_index
] = (VALUE
)ibf_load_builtin(load
, &reading_pos
);
10086 code
[code_index
] = ibf_load_small_value(load
, &reading_pos
);
10090 if (insn_len(insn
) != op_index
+1) {
10091 rb_raise(rb_eRuntimeError
, "operand size mismatch");
10094 load_body
->iseq_encoded
= code
;
10095 load_body
->iseq_size
= code_index
;
10097 assert(code_index
== iseq_size
);
10098 assert(reading_pos
== bytecode_offset
+ bytecode_size
);
10102 static ibf_offset_t
10103 ibf_dump_param_opt_table(struct ibf_dump
*dump
, const rb_iseq_t
*iseq
)
10105 int opt_num
= iseq
->body
->param
.opt_num
;
10108 IBF_W_ALIGN(VALUE
);
10109 return ibf_dump_write(dump
, iseq
->body
->param
.opt_table
, sizeof(VALUE
) * (opt_num
+ 1));
10112 return ibf_dump_pos(dump
);
10117 ibf_load_param_opt_table(const struct ibf_load
*load
, ibf_offset_t opt_table_offset
, int opt_num
)
10120 VALUE
*table
= ALLOC_N(VALUE
, opt_num
+1);
10121 MEMCPY(table
, load
->current_buffer
->buff
+ opt_table_offset
, VALUE
, opt_num
+1);
10129 static ibf_offset_t
10130 ibf_dump_param_keyword(struct ibf_dump
*dump
, const rb_iseq_t
*iseq
)
10132 const struct rb_iseq_param_keyword
*kw
= iseq
->body
->param
.keyword
;
10135 struct rb_iseq_param_keyword dump_kw
= *kw
;
10136 int dv_num
= kw
->num
- kw
->required_num
;
10137 ID
*ids
= kw
->num
> 0 ? ALLOCA_N(ID
, kw
->num
) : NULL
;
10138 VALUE
*dvs
= dv_num
> 0 ? ALLOCA_N(VALUE
, dv_num
) : NULL
;
10141 for (i
=0; i
<kw
->num
; i
++) ids
[i
] = (ID
)ibf_dump_id(dump
, kw
->table
[i
]);
10142 for (i
=0; i
<dv_num
; i
++) dvs
[i
] = (VALUE
)ibf_dump_object(dump
, kw
->default_values
[i
]);
10144 dump_kw
.table
= IBF_W(ids
, ID
, kw
->num
);
10145 dump_kw
.default_values
= IBF_W(dvs
, VALUE
, dv_num
);
10146 IBF_W_ALIGN(struct rb_iseq_param_keyword
);
10147 return ibf_dump_write(dump
, &dump_kw
, sizeof(struct rb_iseq_param_keyword
) * 1);
10154 static const struct rb_iseq_param_keyword
*
10155 ibf_load_param_keyword(const struct ibf_load
*load
, ibf_offset_t param_keyword_offset
)
10157 if (param_keyword_offset
) {
10158 struct rb_iseq_param_keyword
*kw
= IBF_R(param_keyword_offset
, struct rb_iseq_param_keyword
, 1);
10159 ID
*ids
= IBF_R(kw
->table
, ID
, kw
->num
);
10160 int dv_num
= kw
->num
- kw
->required_num
;
10161 VALUE
*dvs
= IBF_R(kw
->default_values
, VALUE
, dv_num
);
10164 for (i
=0; i
<kw
->num
; i
++) {
10165 ids
[i
] = ibf_load_id(load
, ids
[i
]);
10167 for (i
=0; i
<dv_num
; i
++) {
10168 dvs
[i
] = ibf_load_object(load
, dvs
[i
]);
10172 kw
->default_values
= dvs
;
10180 static ibf_offset_t
10181 ibf_dump_insns_info_body(struct ibf_dump
*dump
, const rb_iseq_t
*iseq
)
10183 ibf_offset_t offset
= ibf_dump_pos(dump
);
10184 const struct iseq_insn_info_entry
*entries
= iseq
->body
->insns_info
.body
;
10187 for (i
= 0; i
< iseq
->body
->insns_info
.size
; i
++) {
10188 ibf_dump_write_small_value(dump
, entries
[i
].line_no
);
10189 ibf_dump_write_small_value(dump
, entries
[i
].events
);
10195 static struct iseq_insn_info_entry
*
10196 ibf_load_insns_info_body(const struct ibf_load
*load
, ibf_offset_t body_offset
, unsigned int size
)
10198 ibf_offset_t reading_pos
= body_offset
;
10199 struct iseq_insn_info_entry
*entries
= ALLOC_N(struct iseq_insn_info_entry
, size
);
10202 for (i
= 0; i
< size
; i
++) {
10203 entries
[i
].line_no
= (int)ibf_load_small_value(load
, &reading_pos
);
10204 entries
[i
].events
= (rb_event_flag_t
)ibf_load_small_value(load
, &reading_pos
);
10210 static ibf_offset_t
10211 ibf_dump_insns_info_positions(struct ibf_dump
*dump
, const unsigned int *positions
, unsigned int size
)
10213 ibf_offset_t offset
= ibf_dump_pos(dump
);
10215 unsigned int last
= 0;
10217 for (i
= 0; i
< size
; i
++) {
10218 ibf_dump_write_small_value(dump
, positions
[i
] - last
);
10219 last
= positions
[i
];
10225 static unsigned int *
10226 ibf_load_insns_info_positions(const struct ibf_load
*load
, ibf_offset_t positions_offset
, unsigned int size
)
10228 ibf_offset_t reading_pos
= positions_offset
;
10229 unsigned int *positions
= ALLOC_N(unsigned int, size
);
10231 unsigned int last
= 0;
10233 for (i
= 0; i
< size
; i
++) {
10234 positions
[i
] = last
+ (unsigned int)ibf_load_small_value(load
, &reading_pos
);
10235 last
= positions
[i
];
10241 static ibf_offset_t
10242 ibf_dump_local_table(struct ibf_dump
*dump
, const rb_iseq_t
*iseq
)
10244 const struct rb_iseq_constant_body
*const body
= iseq
->body
;
10245 const int size
= body
->local_table_size
;
10246 ID
*table
= ALLOCA_N(ID
, size
);
10249 for (i
=0; i
<size
; i
++) {
10250 table
[i
] = ibf_dump_id(dump
, body
->local_table
[i
]);
10254 return ibf_dump_write(dump
, table
, sizeof(ID
) * size
);
10258 ibf_load_local_table(const struct ibf_load
*load
, ibf_offset_t local_table_offset
, int size
)
10261 ID
*table
= IBF_R(local_table_offset
, ID
, size
);
10264 for (i
=0; i
<size
; i
++) {
10265 table
[i
] = ibf_load_id(load
, table
[i
]);
10274 static ibf_offset_t
10275 ibf_dump_catch_table(struct ibf_dump
*dump
, const rb_iseq_t
*iseq
)
10277 const struct iseq_catch_table
*table
= iseq
->body
->catch_table
;
10280 int *iseq_indices
= ALLOCA_N(int, table
->size
);
10283 for (i
=0; i
<table
->size
; i
++) {
10284 iseq_indices
[i
] = ibf_dump_iseq(dump
, table
->entries
[i
].iseq
);
10287 const ibf_offset_t offset
= ibf_dump_pos(dump
);
10289 for (i
=0; i
<table
->size
; i
++) {
10290 ibf_dump_write_small_value(dump
, iseq_indices
[i
]);
10291 ibf_dump_write_small_value(dump
, table
->entries
[i
].type
);
10292 ibf_dump_write_small_value(dump
, table
->entries
[i
].start
);
10293 ibf_dump_write_small_value(dump
, table
->entries
[i
].end
);
10294 ibf_dump_write_small_value(dump
, table
->entries
[i
].cont
);
10295 ibf_dump_write_small_value(dump
, table
->entries
[i
].sp
);
10300 return ibf_dump_pos(dump
);
10304 static struct iseq_catch_table
*
10305 ibf_load_catch_table(const struct ibf_load
*load
, ibf_offset_t catch_table_offset
, unsigned int size
)
10308 struct iseq_catch_table
*table
= ruby_xmalloc(iseq_catch_table_bytes(size
));
10309 table
->size
= size
;
10311 ibf_offset_t reading_pos
= catch_table_offset
;
10314 for (i
=0; i
<table
->size
; i
++) {
10315 int iseq_index
= (int)ibf_load_small_value(load
, &reading_pos
);
10316 table
->entries
[i
].type
= (enum catch_type
)ibf_load_small_value(load
, &reading_pos
);
10317 table
->entries
[i
].start
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
10318 table
->entries
[i
].end
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
10319 table
->entries
[i
].cont
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
10320 table
->entries
[i
].sp
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
10322 table
->entries
[i
].iseq
= ibf_load_iseq(load
, (const rb_iseq_t
*)(VALUE
)iseq_index
);
10331 static ibf_offset_t
10332 ibf_dump_ci_entries(struct ibf_dump
*dump
, const rb_iseq_t
*iseq
)
10334 const struct rb_iseq_constant_body
*const body
= iseq
->body
;
10335 const unsigned int ci_size
= body
->ci_size
;
10336 const struct rb_call_data
*cds
= body
->call_data
;
10338 ibf_offset_t offset
= ibf_dump_pos(dump
);
10342 for (i
= 0; i
< ci_size
; i
++) {
10343 const struct rb_callinfo
*ci
= cds
[i
].ci
;
10345 ibf_dump_write_small_value(dump
, ibf_dump_id(dump
, vm_ci_mid(ci
)));
10346 ibf_dump_write_small_value(dump
, vm_ci_flag(ci
));
10347 ibf_dump_write_small_value(dump
, vm_ci_argc(ci
));
10349 const struct rb_callinfo_kwarg
*kwarg
= vm_ci_kwarg(ci
);
10351 int len
= kwarg
->keyword_len
;
10352 ibf_dump_write_small_value(dump
, len
);
10353 for (int j
=0; j
<len
; j
++) {
10354 VALUE keyword
= ibf_dump_object(dump
, kwarg
->keywords
[j
]);
10355 ibf_dump_write_small_value(dump
, keyword
);
10359 ibf_dump_write_small_value(dump
, 0);
10363 // TODO: truncate NULL ci from call_data.
10364 ibf_dump_write_small_value(dump
, (VALUE
)-1);
10371 /* note that we dump out rb_call_info but load back rb_call_data */
10373 ibf_load_ci_entries(const struct ibf_load
*load
,
10374 ibf_offset_t ci_entries_offset
,
10375 unsigned int ci_size
,
10376 struct rb_call_data
**cd_ptr
)
10378 ibf_offset_t reading_pos
= ci_entries_offset
;
10382 struct rb_call_data
*cds
= ZALLOC_N(struct rb_call_data
, ci_size
);
10385 for (i
= 0; i
< ci_size
; i
++) {
10386 VALUE mid_index
= ibf_load_small_value(load
, &reading_pos
);
10387 if (mid_index
!= (VALUE
)-1) {
10388 ID mid
= ibf_load_id(load
, mid_index
);
10389 unsigned int flag
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
10390 unsigned int argc
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
10392 struct rb_callinfo_kwarg
*kwarg
= NULL
;
10393 int kwlen
= (int)ibf_load_small_value(load
, &reading_pos
);
10395 kwarg
= rb_xmalloc_mul_add(kwlen
- 1, sizeof(VALUE
), sizeof(struct rb_callinfo_kwarg
));;
10396 kwarg
->keyword_len
= kwlen
;
10397 for (int j
=0; j
<kwlen
; j
++) {
10398 VALUE keyword
= ibf_load_small_value(load
, &reading_pos
);
10399 kwarg
->keywords
[j
] = ibf_load_object(load
, keyword
);
10403 cds
[i
].ci
= vm_ci_new(mid
, flag
, argc
, kwarg
);
10404 RB_OBJ_WRITTEN(load
->iseq
, Qundef
, cds
[i
].ci
);
10405 cds
[i
].cc
= vm_cc_empty();
10415 static ibf_offset_t
10416 ibf_dump_iseq_each(struct ibf_dump
*dump
, const rb_iseq_t
*iseq
)
10418 assert(dump
->current_buffer
== &dump
->global_buffer
);
10420 unsigned int *positions
;
10422 const struct rb_iseq_constant_body
*body
= iseq
->body
;
10424 const VALUE location_pathobj_index
= ibf_dump_object(dump
, body
->location
.pathobj
); /* TODO: freeze */
10425 const VALUE location_base_label_index
= ibf_dump_object(dump
, body
->location
.base_label
);
10426 const VALUE location_label_index
= ibf_dump_object(dump
, body
->location
.label
);
10428 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
10429 ibf_offset_t iseq_start
= ibf_dump_pos(dump
);
10431 struct ibf_dump_buffer
*saved_buffer
= dump
->current_buffer
;
10432 struct ibf_dump_buffer buffer
;
10433 buffer
.str
= rb_str_new(0, 0);
10434 buffer
.obj_table
= ibf_dump_object_table_new();
10435 dump
->current_buffer
= &buffer
;
10438 const ibf_offset_t bytecode_offset
= ibf_dump_code(dump
, iseq
);
10439 const ibf_offset_t bytecode_size
= ibf_dump_pos(dump
) - bytecode_offset
;
10440 const ibf_offset_t param_opt_table_offset
= ibf_dump_param_opt_table(dump
, iseq
);
10441 const ibf_offset_t param_keyword_offset
= ibf_dump_param_keyword(dump
, iseq
);
10442 const ibf_offset_t insns_info_body_offset
= ibf_dump_insns_info_body(dump
, iseq
);
10444 positions
= rb_iseq_insns_info_decode_positions(iseq
->body
);
10445 const ibf_offset_t insns_info_positions_offset
= ibf_dump_insns_info_positions(dump
, positions
, body
->insns_info
.size
);
10446 ruby_xfree(positions
);
10448 const ibf_offset_t local_table_offset
= ibf_dump_local_table(dump
, iseq
);
10449 const unsigned int catch_table_size
= body
->catch_table
? body
->catch_table
->size
: 0;
10450 const ibf_offset_t catch_table_offset
= ibf_dump_catch_table(dump
, iseq
);
10451 const int parent_iseq_index
= ibf_dump_iseq(dump
, iseq
->body
->parent_iseq
);
10452 const int local_iseq_index
= ibf_dump_iseq(dump
, iseq
->body
->local_iseq
);
10453 const ibf_offset_t ci_entries_offset
= ibf_dump_ci_entries(dump
, iseq
);
10455 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
10456 ibf_offset_t local_obj_list_offset
;
10457 unsigned int local_obj_list_size
;
10459 ibf_dump_object_list(dump
, &local_obj_list_offset
, &local_obj_list_size
);
10462 ibf_offset_t body_offset
= ibf_dump_pos(dump
);
10464 /* dump the constant body */
10465 unsigned int param_flags
=
10466 (body
->param
.flags
.has_lead
<< 0) |
10467 (body
->param
.flags
.has_opt
<< 1) |
10468 (body
->param
.flags
.has_rest
<< 2) |
10469 (body
->param
.flags
.has_post
<< 3) |
10470 (body
->param
.flags
.has_kw
<< 4) |
10471 (body
->param
.flags
.has_kwrest
<< 5) |
10472 (body
->param
.flags
.has_block
<< 6) |
10473 (body
->param
.flags
.ambiguous_param0
<< 7) |
10474 (body
->param
.flags
.accepts_no_kwarg
<< 8) |
10475 (body
->param
.flags
.ruby2_keywords
<< 9);
10477 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
10478 # define IBF_BODY_OFFSET(x) (x)
10480 # define IBF_BODY_OFFSET(x) (body_offset - (x))
10483 ibf_dump_write_small_value(dump
, body
->type
);
10484 ibf_dump_write_small_value(dump
, body
->iseq_size
);
10485 ibf_dump_write_small_value(dump
, IBF_BODY_OFFSET(bytecode_offset
));
10486 ibf_dump_write_small_value(dump
, bytecode_size
);
10487 ibf_dump_write_small_value(dump
, param_flags
);
10488 ibf_dump_write_small_value(dump
, body
->param
.size
);
10489 ibf_dump_write_small_value(dump
, body
->param
.lead_num
);
10490 ibf_dump_write_small_value(dump
, body
->param
.opt_num
);
10491 ibf_dump_write_small_value(dump
, body
->param
.rest_start
);
10492 ibf_dump_write_small_value(dump
, body
->param
.post_start
);
10493 ibf_dump_write_small_value(dump
, body
->param
.post_num
);
10494 ibf_dump_write_small_value(dump
, body
->param
.block_start
);
10495 ibf_dump_write_small_value(dump
, IBF_BODY_OFFSET(param_opt_table_offset
));
10496 ibf_dump_write_small_value(dump
, param_keyword_offset
);
10497 ibf_dump_write_small_value(dump
, location_pathobj_index
);
10498 ibf_dump_write_small_value(dump
, location_base_label_index
);
10499 ibf_dump_write_small_value(dump
, location_label_index
);
10500 ibf_dump_write_small_value(dump
, body
->location
.first_lineno
);
10501 ibf_dump_write_small_value(dump
, body
->location
.node_id
);
10502 ibf_dump_write_small_value(dump
, body
->location
.code_location
.beg_pos
.lineno
);
10503 ibf_dump_write_small_value(dump
, body
->location
.code_location
.beg_pos
.column
);
10504 ibf_dump_write_small_value(dump
, body
->location
.code_location
.end_pos
.lineno
);
10505 ibf_dump_write_small_value(dump
, body
->location
.code_location
.end_pos
.column
);
10506 ibf_dump_write_small_value(dump
, IBF_BODY_OFFSET(insns_info_body_offset
));
10507 ibf_dump_write_small_value(dump
, IBF_BODY_OFFSET(insns_info_positions_offset
));
10508 ibf_dump_write_small_value(dump
, body
->insns_info
.size
);
10509 ibf_dump_write_small_value(dump
, IBF_BODY_OFFSET(local_table_offset
));
10510 ibf_dump_write_small_value(dump
, catch_table_size
);
10511 ibf_dump_write_small_value(dump
, IBF_BODY_OFFSET(catch_table_offset
));
10512 ibf_dump_write_small_value(dump
, parent_iseq_index
);
10513 ibf_dump_write_small_value(dump
, local_iseq_index
);
10514 ibf_dump_write_small_value(dump
, IBF_BODY_OFFSET(ci_entries_offset
));
10515 ibf_dump_write_small_value(dump
, body
->variable
.flip_count
);
10516 ibf_dump_write_small_value(dump
, body
->local_table_size
);
10517 ibf_dump_write_small_value(dump
, body
->is_size
);
10518 ibf_dump_write_small_value(dump
, body
->ci_size
);
10519 ibf_dump_write_small_value(dump
, body
->stack_max
);
10520 ibf_dump_write_small_value(dump
, body
->catch_except_p
);
10522 #undef IBF_BODY_OFFSET
10524 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
10525 ibf_offset_t iseq_length_bytes
= ibf_dump_pos(dump
);
10527 dump
->current_buffer
= saved_buffer
;
10528 ibf_dump_write(dump
, RSTRING_PTR(buffer
.str
), iseq_length_bytes
);
10530 ibf_offset_t offset
= ibf_dump_pos(dump
);
10531 ibf_dump_write_small_value(dump
, iseq_start
);
10532 ibf_dump_write_small_value(dump
, iseq_length_bytes
);
10533 ibf_dump_write_small_value(dump
, body_offset
);
10535 ibf_dump_write_small_value(dump
, local_obj_list_offset
);
10536 ibf_dump_write_small_value(dump
, local_obj_list_size
);
10538 st_free_table(buffer
.obj_table
); // TODO: this leaks in case of exception
10542 return body_offset
;
10547 ibf_load_location_str(const struct ibf_load
*load
, VALUE str_index
)
10549 VALUE str
= ibf_load_object(load
, str_index
);
10551 str
= rb_fstring(str
);
10557 ibf_load_iseq_each(struct ibf_load
*load
, rb_iseq_t
*iseq
, ibf_offset_t offset
)
10559 struct rb_iseq_constant_body
*load_body
= iseq
->body
= rb_iseq_constant_body_alloc();
10561 ibf_offset_t reading_pos
= offset
;
10563 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
10564 struct ibf_load_buffer
*saved_buffer
= load
->current_buffer
;
10565 load
->current_buffer
= &load
->global_buffer
;
10567 const ibf_offset_t iseq_start
= (ibf_offset_t
)ibf_load_small_value(load
, &reading_pos
);
10568 const ibf_offset_t iseq_length_bytes
= (ibf_offset_t
)ibf_load_small_value(load
, &reading_pos
);
10569 const ibf_offset_t body_offset
= (ibf_offset_t
)ibf_load_small_value(load
, &reading_pos
);
10571 struct ibf_load_buffer buffer
;
10572 buffer
.buff
= load
->global_buffer
.buff
+ iseq_start
;
10573 buffer
.size
= iseq_length_bytes
;
10574 buffer
.obj_list_offset
= (ibf_offset_t
)ibf_load_small_value(load
, &reading_pos
);
10575 buffer
.obj_list_size
= (ibf_offset_t
)ibf_load_small_value(load
, &reading_pos
);
10576 buffer
.obj_list
= rb_ary_tmp_new(buffer
.obj_list_size
);
10577 rb_ary_resize(buffer
.obj_list
, buffer
.obj_list_size
);
10579 load
->current_buffer
= &buffer
;
10580 reading_pos
= body_offset
;
10583 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
10584 # define IBF_BODY_OFFSET(x) (x)
10586 # define IBF_BODY_OFFSET(x) (offset - (x))
10589 const unsigned int type
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
10590 const unsigned int iseq_size
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
10591 const ibf_offset_t bytecode_offset
= (ibf_offset_t
)IBF_BODY_OFFSET(ibf_load_small_value(load
, &reading_pos
));
10592 const ibf_offset_t bytecode_size
= (ibf_offset_t
)ibf_load_small_value(load
, &reading_pos
);
10593 const unsigned int param_flags
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
10594 const unsigned int param_size
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
10595 const int param_lead_num
= (int)ibf_load_small_value(load
, &reading_pos
);
10596 const int param_opt_num
= (int)ibf_load_small_value(load
, &reading_pos
);
10597 const int param_rest_start
= (int)ibf_load_small_value(load
, &reading_pos
);
10598 const int param_post_start
= (int)ibf_load_small_value(load
, &reading_pos
);
10599 const int param_post_num
= (int)ibf_load_small_value(load
, &reading_pos
);
10600 const int param_block_start
= (int)ibf_load_small_value(load
, &reading_pos
);
10601 const ibf_offset_t param_opt_table_offset
= (ibf_offset_t
)IBF_BODY_OFFSET(ibf_load_small_value(load
, &reading_pos
));
10602 const ibf_offset_t param_keyword_offset
= (ibf_offset_t
)ibf_load_small_value(load
, &reading_pos
);
10603 const VALUE location_pathobj_index
= ibf_load_small_value(load
, &reading_pos
);
10604 const VALUE location_base_label_index
= ibf_load_small_value(load
, &reading_pos
);
10605 const VALUE location_label_index
= ibf_load_small_value(load
, &reading_pos
);
10606 const VALUE location_first_lineno
= ibf_load_small_value(load
, &reading_pos
);
10607 const int location_node_id
= (int)ibf_load_small_value(load
, &reading_pos
);
10608 const int location_code_location_beg_pos_lineno
= (int)ibf_load_small_value(load
, &reading_pos
);
10609 const int location_code_location_beg_pos_column
= (int)ibf_load_small_value(load
, &reading_pos
);
10610 const int location_code_location_end_pos_lineno
= (int)ibf_load_small_value(load
, &reading_pos
);
10611 const int location_code_location_end_pos_column
= (int)ibf_load_small_value(load
, &reading_pos
);
10612 const ibf_offset_t insns_info_body_offset
= (ibf_offset_t
)IBF_BODY_OFFSET(ibf_load_small_value(load
, &reading_pos
));
10613 const ibf_offset_t insns_info_positions_offset
= (ibf_offset_t
)IBF_BODY_OFFSET(ibf_load_small_value(load
, &reading_pos
));
10614 const unsigned int insns_info_size
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
10615 const ibf_offset_t local_table_offset
= (ibf_offset_t
)IBF_BODY_OFFSET(ibf_load_small_value(load
, &reading_pos
));
10616 const unsigned int catch_table_size
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
10617 const ibf_offset_t catch_table_offset
= (ibf_offset_t
)IBF_BODY_OFFSET(ibf_load_small_value(load
, &reading_pos
));
10618 const int parent_iseq_index
= (int)ibf_load_small_value(load
, &reading_pos
);
10619 const int local_iseq_index
= (int)ibf_load_small_value(load
, &reading_pos
);
10620 const ibf_offset_t ci_entries_offset
= (ibf_offset_t
)IBF_BODY_OFFSET(ibf_load_small_value(load
, &reading_pos
));
10621 const rb_snum_t variable_flip_count
= (rb_snum_t
)ibf_load_small_value(load
, &reading_pos
);
10622 const unsigned int local_table_size
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
10623 const unsigned int is_size
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
10624 const unsigned int ci_size
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
10625 const unsigned int stack_max
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
10626 const char catch_except_p
= (char)ibf_load_small_value(load
, &reading_pos
);
10628 #undef IBF_BODY_OFFSET
10630 load_body
->type
= type
;
10631 load_body
->stack_max
= stack_max
;
10632 load_body
->param
.flags
.has_lead
= (param_flags
>> 0) & 1;
10633 load_body
->param
.flags
.has_opt
= (param_flags
>> 1) & 1;
10634 load_body
->param
.flags
.has_rest
= (param_flags
>> 2) & 1;
10635 load_body
->param
.flags
.has_post
= (param_flags
>> 3) & 1;
10636 load_body
->param
.flags
.has_kw
= FALSE
;
10637 load_body
->param
.flags
.has_kwrest
= (param_flags
>> 5) & 1;
10638 load_body
->param
.flags
.has_block
= (param_flags
>> 6) & 1;
10639 load_body
->param
.flags
.ambiguous_param0
= (param_flags
>> 7) & 1;
10640 load_body
->param
.flags
.accepts_no_kwarg
= (param_flags
>> 8) & 1;
10641 load_body
->param
.flags
.ruby2_keywords
= (param_flags
>> 9) & 1;
10642 load_body
->param
.size
= param_size
;
10643 load_body
->param
.lead_num
= param_lead_num
;
10644 load_body
->param
.opt_num
= param_opt_num
;
10645 load_body
->param
.rest_start
= param_rest_start
;
10646 load_body
->param
.post_start
= param_post_start
;
10647 load_body
->param
.post_num
= param_post_num
;
10648 load_body
->param
.block_start
= param_block_start
;
10649 load_body
->local_table_size
= local_table_size
;
10650 load_body
->is_size
= is_size
;
10651 load_body
->ci_size
= ci_size
;
10652 load_body
->insns_info
.size
= insns_info_size
;
10654 ISEQ_COVERAGE_SET(iseq
, Qnil
);
10655 ISEQ_ORIGINAL_ISEQ_CLEAR(iseq
);
10656 iseq
->body
->variable
.flip_count
= variable_flip_count
;
10658 load_body
->location
.first_lineno
= location_first_lineno
;
10659 load_body
->location
.node_id
= location_node_id
;
10660 load_body
->location
.code_location
.beg_pos
.lineno
= location_code_location_beg_pos_lineno
;
10661 load_body
->location
.code_location
.beg_pos
.column
= location_code_location_beg_pos_column
;
10662 load_body
->location
.code_location
.end_pos
.lineno
= location_code_location_end_pos_lineno
;
10663 load_body
->location
.code_location
.end_pos
.column
= location_code_location_end_pos_column
;
10664 load_body
->catch_except_p
= catch_except_p
;
10666 load_body
->is_entries
= ZALLOC_N(union iseq_inline_storage_entry
, is_size
);
10667 ibf_load_ci_entries(load
, ci_entries_offset
, ci_size
, &load_body
->call_data
);
10668 load_body
->param
.opt_table
= ibf_load_param_opt_table(load
, param_opt_table_offset
, param_opt_num
);
10669 load_body
->param
.keyword
= ibf_load_param_keyword(load
, param_keyword_offset
);
10670 load_body
->param
.flags
.has_kw
= (param_flags
>> 4) & 1;
10671 load_body
->insns_info
.body
= ibf_load_insns_info_body(load
, insns_info_body_offset
, insns_info_size
);
10672 load_body
->insns_info
.positions
= ibf_load_insns_info_positions(load
, insns_info_positions_offset
, insns_info_size
);
10673 load_body
->local_table
= ibf_load_local_table(load
, local_table_offset
, local_table_size
);
10674 load_body
->catch_table
= ibf_load_catch_table(load
, catch_table_offset
, catch_table_size
);
10675 load_body
->parent_iseq
= ibf_load_iseq(load
, (const rb_iseq_t
*)(VALUE
)parent_iseq_index
);
10676 load_body
->local_iseq
= ibf_load_iseq(load
, (const rb_iseq_t
*)(VALUE
)local_iseq_index
);
10678 ibf_load_code(load
, iseq
, bytecode_offset
, bytecode_size
, iseq_size
);
10679 #if VM_INSN_INFO_TABLE_IMPL == 2
10680 rb_iseq_insns_info_encode_positions(iseq
);
10683 rb_iseq_translate_threaded_code(iseq
);
10685 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
10686 load
->current_buffer
= &load
->global_buffer
;
10690 VALUE realpath
= Qnil
, path
= ibf_load_object(load
, location_pathobj_index
);
10691 if (RB_TYPE_P(path
, T_STRING
)) {
10692 realpath
= path
= rb_fstring(path
);
10694 else if (RB_TYPE_P(path
, T_ARRAY
)) {
10695 VALUE pathobj
= path
;
10696 if (RARRAY_LEN(pathobj
) != 2) {
10697 rb_raise(rb_eRuntimeError
, "path object size mismatch");
10699 path
= rb_fstring(RARRAY_AREF(pathobj
, 0));
10700 realpath
= RARRAY_AREF(pathobj
, 1);
10701 if (!NIL_P(realpath
)) {
10702 if (!RB_TYPE_P(realpath
, T_STRING
)) {
10703 rb_raise(rb_eArgError
, "unexpected realpath %"PRIxVALUE
10704 "(%x), path=%+"PRIsVALUE
,
10705 realpath
, TYPE(realpath
), path
);
10707 realpath
= rb_fstring(realpath
);
10711 rb_raise(rb_eRuntimeError
, "unexpected path object");
10713 rb_iseq_pathobj_set(iseq
, path
, realpath
);
10716 RB_OBJ_WRITE(iseq
, &load_body
->location
.base_label
, ibf_load_location_str(load
, location_base_label_index
));
10717 RB_OBJ_WRITE(iseq
, &load_body
->location
.label
, ibf_load_location_str(load
, location_label_index
));
10719 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
10720 load
->current_buffer
= saved_buffer
;
10722 verify_call_cache(iseq
);
10725 struct ibf_dump_iseq_list_arg
10727 struct ibf_dump
*dump
;
10732 ibf_dump_iseq_list_i(st_data_t key
, st_data_t val
, st_data_t ptr
)
10734 const rb_iseq_t
*iseq
= (const rb_iseq_t
*)key
;
10735 struct ibf_dump_iseq_list_arg
*args
= (struct ibf_dump_iseq_list_arg
*)ptr
;
10737 ibf_offset_t offset
= ibf_dump_iseq_each(args
->dump
, iseq
);
10738 rb_ary_push(args
->offset_list
, UINT2NUM(offset
));
10740 return ST_CONTINUE
;
10744 ibf_dump_iseq_list(struct ibf_dump
*dump
, struct ibf_header
*header
)
10746 VALUE offset_list
= rb_ary_tmp_new(dump
->iseq_table
->num_entries
);
10748 struct ibf_dump_iseq_list_arg args
;
10750 args
.offset_list
= offset_list
;
10752 st_foreach(dump
->iseq_table
, ibf_dump_iseq_list_i
, (st_data_t
)&args
);
10755 st_index_t size
= dump
->iseq_table
->num_entries
;
10756 ibf_offset_t
*offsets
= ALLOCA_N(ibf_offset_t
, size
);
10758 for (i
= 0; i
< size
; i
++) {
10759 offsets
[i
] = NUM2UINT(RARRAY_AREF(offset_list
, i
));
10762 ibf_dump_align(dump
, sizeof(ibf_offset_t
));
10763 header
->iseq_list_offset
= ibf_dump_write(dump
, offsets
, sizeof(ibf_offset_t
) * size
);
10764 header
->iseq_list_size
= (unsigned int)size
;
10767 #define IBF_OBJECT_INTERNAL FL_PROMOTED0
10771 * - ibf_object_header
10772 * - ibf_object_xxx (xxx is type)
10775 struct ibf_object_header
{
10776 unsigned int type
: 5;
10777 unsigned int special_const
: 1;
10778 unsigned int frozen
: 1;
10779 unsigned int internal
: 1;
10782 enum ibf_object_class_index
{
10783 IBF_OBJECT_CLASS_OBJECT
,
10784 IBF_OBJECT_CLASS_ARRAY
,
10785 IBF_OBJECT_CLASS_STANDARD_ERROR
,
10786 IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_ERROR
,
10787 IBF_OBJECT_CLASS_TYPE_ERROR
,
10790 struct ibf_object_regexp
{
10795 struct ibf_object_hash
{
10797 long keyval
[FLEX_ARY_LEN
];
10800 struct ibf_object_struct_range
{
10808 struct ibf_object_bignum
{
10810 BDIGIT digits
[FLEX_ARY_LEN
];
10813 enum ibf_object_data_type
{
10814 IBF_OBJECT_DATA_ENCODING
,
10817 struct ibf_object_complex_rational
{
10821 struct ibf_object_symbol
{
10825 #define IBF_ALIGNED_OFFSET(align, offset) /* offset > 0 */ \
10826 ((((offset) - 1) / (align) + 1) * (align))
10827 #define IBF_OBJBODY(type, offset) (const type *)\
10828 ibf_load_check_offset(load, IBF_ALIGNED_OFFSET(RUBY_ALIGNOF(type), offset))
10830 static const void *
10831 ibf_load_check_offset(const struct ibf_load
*load
, size_t offset
)
10833 if (offset
>= load
->current_buffer
->size
) {
10834 rb_raise(rb_eIndexError
, "object offset out of range: %"PRIdSIZE
, offset
);
10836 return load
->current_buffer
->buff
+ offset
;
10839 NORETURN(static void ibf_dump_object_unsupported(struct ibf_dump
*dump
, VALUE obj
));
10842 ibf_dump_object_unsupported(struct ibf_dump
*dump
, VALUE obj
)
10845 rb_raw_obj_info(buff
, sizeof(buff
), obj
);
10846 rb_raise(rb_eNotImpError
, "ibf_dump_object_unsupported: %s", buff
);
10850 ibf_load_object_unsupported(const struct ibf_load
*load
, const struct ibf_object_header
*header
, ibf_offset_t offset
)
10852 rb_raise(rb_eArgError
, "unsupported");
10857 ibf_dump_object_class(struct ibf_dump
*dump
, VALUE obj
)
10859 enum ibf_object_class_index cindex
;
10860 if (obj
== rb_cObject
) {
10861 cindex
= IBF_OBJECT_CLASS_OBJECT
;
10863 else if (obj
== rb_cArray
) {
10864 cindex
= IBF_OBJECT_CLASS_ARRAY
;
10866 else if (obj
== rb_eStandardError
) {
10867 cindex
= IBF_OBJECT_CLASS_STANDARD_ERROR
;
10869 else if (obj
== rb_eNoMatchingPatternError
) {
10870 cindex
= IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_ERROR
;
10872 else if (obj
== rb_eTypeError
) {
10873 cindex
= IBF_OBJECT_CLASS_TYPE_ERROR
;
10876 rb_obj_info_dump(obj
);
10878 rb_bug("unsupported class");
10880 ibf_dump_write_small_value(dump
, (VALUE
)cindex
);
10884 ibf_load_object_class(const struct ibf_load
*load
, const struct ibf_object_header
*header
, ibf_offset_t offset
)
10886 enum ibf_object_class_index cindex
= (enum ibf_object_class_index
)ibf_load_small_value(load
, &offset
);
10889 case IBF_OBJECT_CLASS_OBJECT
:
10891 case IBF_OBJECT_CLASS_ARRAY
:
10893 case IBF_OBJECT_CLASS_STANDARD_ERROR
:
10894 return rb_eStandardError
;
10895 case IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_ERROR
:
10896 return rb_eNoMatchingPatternError
;
10897 case IBF_OBJECT_CLASS_TYPE_ERROR
:
10898 return rb_eTypeError
;
10901 rb_raise(rb_eArgError
, "ibf_load_object_class: unknown class (%d)", (int)cindex
);
10906 ibf_dump_object_float(struct ibf_dump
*dump
, VALUE obj
)
10908 double dbl
= RFLOAT_VALUE(obj
);
10909 (void)IBF_W(&dbl
, double, 1);
10913 ibf_load_object_float(const struct ibf_load
*load
, const struct ibf_object_header
*header
, ibf_offset_t offset
)
10915 const double *dblp
= IBF_OBJBODY(double, offset
);
10916 return DBL2NUM(*dblp
);
10920 ibf_dump_object_string(struct ibf_dump
*dump
, VALUE obj
)
10922 long encindex
= (long)rb_enc_get_index(obj
);
10923 long len
= RSTRING_LEN(obj
);
10924 const char *ptr
= RSTRING_PTR(obj
);
10926 if (encindex
> RUBY_ENCINDEX_BUILTIN_MAX
) {
10927 rb_encoding
*enc
= rb_enc_from_index((int)encindex
);
10928 const char *enc_name
= rb_enc_name(enc
);
10929 encindex
= RUBY_ENCINDEX_BUILTIN_MAX
+ ibf_dump_object(dump
, rb_str_new2(enc_name
));
10932 ibf_dump_write_small_value(dump
, encindex
);
10933 ibf_dump_write_small_value(dump
, len
);
10934 IBF_WP(ptr
, char, len
);
10938 ibf_load_object_string(const struct ibf_load
*load
, const struct ibf_object_header
*header
, ibf_offset_t offset
)
10940 ibf_offset_t reading_pos
= offset
;
10942 int encindex
= (int)ibf_load_small_value(load
, &reading_pos
);
10943 const long len
= (long)ibf_load_small_value(load
, &reading_pos
);
10944 const char *ptr
= load
->current_buffer
->buff
+ reading_pos
;
10946 VALUE str
= rb_str_new(ptr
, len
);
10948 if (encindex
> RUBY_ENCINDEX_BUILTIN_MAX
) {
10949 VALUE enc_name_str
= ibf_load_object(load
, encindex
- RUBY_ENCINDEX_BUILTIN_MAX
);
10950 encindex
= rb_enc_find_index(RSTRING_PTR(enc_name_str
));
10952 rb_enc_associate_index(str
, encindex
);
10954 if (header
->internal
) rb_obj_hide(str
);
10955 if (header
->frozen
) str
= rb_fstring(str
);
10961 ibf_dump_object_regexp(struct ibf_dump
*dump
, VALUE obj
)
10963 VALUE srcstr
= RREGEXP_SRC(obj
);
10964 struct ibf_object_regexp regexp
;
10965 regexp
.option
= (char)rb_reg_options(obj
);
10966 regexp
.srcstr
= (long)ibf_dump_object(dump
, srcstr
);
10968 ibf_dump_write_byte(dump
, (unsigned char)regexp
.option
);
10969 ibf_dump_write_small_value(dump
, regexp
.srcstr
);
10973 ibf_load_object_regexp(const struct ibf_load
*load
, const struct ibf_object_header
*header
, ibf_offset_t offset
)
10975 struct ibf_object_regexp regexp
;
10976 regexp
.option
= ibf_load_byte(load
, &offset
);
10977 regexp
.srcstr
= ibf_load_small_value(load
, &offset
);
10979 VALUE srcstr
= ibf_load_object(load
, regexp
.srcstr
);
10980 VALUE reg
= rb_reg_compile(srcstr
, (int)regexp
.option
, NULL
, 0);
10982 if (header
->internal
) rb_obj_hide(reg
);
10983 if (header
->frozen
) rb_obj_freeze(reg
);
10989 ibf_dump_object_array(struct ibf_dump
*dump
, VALUE obj
)
10991 long i
, len
= RARRAY_LEN(obj
);
10992 ibf_dump_write_small_value(dump
, len
);
10993 for (i
=0; i
<len
; i
++) {
10994 long index
= (long)ibf_dump_object(dump
, RARRAY_AREF(obj
, i
));
10995 ibf_dump_write_small_value(dump
, index
);
11000 ibf_load_object_array(const struct ibf_load
*load
, const struct ibf_object_header
*header
, ibf_offset_t offset
)
11002 ibf_offset_t reading_pos
= offset
;
11004 const long len
= (long)ibf_load_small_value(load
, &reading_pos
);
11006 VALUE ary
= rb_ary_new_capa(len
);
11009 for (i
=0; i
<len
; i
++) {
11010 const VALUE index
= ibf_load_small_value(load
, &reading_pos
);
11011 rb_ary_push(ary
, ibf_load_object(load
, index
));
11014 if (header
->internal
) rb_obj_hide(ary
);
11015 if (header
->frozen
) rb_obj_freeze(ary
);
11021 ibf_dump_object_hash_i(st_data_t key
, st_data_t val
, st_data_t ptr
)
11023 struct ibf_dump
*dump
= (struct ibf_dump
*)ptr
;
11025 VALUE key_index
= ibf_dump_object(dump
, (VALUE
)key
);
11026 VALUE val_index
= ibf_dump_object(dump
, (VALUE
)val
);
11028 ibf_dump_write_small_value(dump
, key_index
);
11029 ibf_dump_write_small_value(dump
, val_index
);
11030 return ST_CONTINUE
;
11034 ibf_dump_object_hash(struct ibf_dump
*dump
, VALUE obj
)
11036 long len
= RHASH_SIZE(obj
);
11037 ibf_dump_write_small_value(dump
, (VALUE
)len
);
11039 if (len
> 0) rb_hash_foreach(obj
, ibf_dump_object_hash_i
, (VALUE
)dump
);
11043 ibf_load_object_hash(const struct ibf_load
*load
, const struct ibf_object_header
*header
, ibf_offset_t offset
)
11045 long len
= (long)ibf_load_small_value(load
, &offset
);
11046 VALUE obj
= rb_hash_new_with_size(len
);
11049 for (i
= 0; i
< len
; i
++) {
11050 VALUE key_index
= ibf_load_small_value(load
, &offset
);
11051 VALUE val_index
= ibf_load_small_value(load
, &offset
);
11053 VALUE key
= ibf_load_object(load
, key_index
);
11054 VALUE val
= ibf_load_object(load
, val_index
);
11055 rb_hash_aset(obj
, key
, val
);
11057 rb_hash_rehash(obj
);
11059 if (header
->internal
) rb_obj_hide(obj
);
11060 if (header
->frozen
) rb_obj_freeze(obj
);
11066 ibf_dump_object_struct(struct ibf_dump
*dump
, VALUE obj
)
11068 if (rb_obj_is_kind_of(obj
, rb_cRange
)) {
11069 struct ibf_object_struct_range range
;
11073 range
.class_index
= 0;
11075 rb_range_values(obj
, &beg
, &end
, &range
.excl
);
11076 range
.beg
= (long)ibf_dump_object(dump
, beg
);
11077 range
.end
= (long)ibf_dump_object(dump
, end
);
11079 IBF_W_ALIGN(struct ibf_object_struct_range
);
11083 rb_raise(rb_eNotImpError
, "ibf_dump_object_struct: unsupported class %"PRIsVALUE
,
11084 rb_class_name(CLASS_OF(obj
)));
11089 ibf_load_object_struct(const struct ibf_load
*load
, const struct ibf_object_header
*header
, ibf_offset_t offset
)
11091 const struct ibf_object_struct_range
*range
= IBF_OBJBODY(struct ibf_object_struct_range
, offset
);
11092 VALUE beg
= ibf_load_object(load
, range
->beg
);
11093 VALUE end
= ibf_load_object(load
, range
->end
);
11094 VALUE obj
= rb_range_new(beg
, end
, range
->excl
);
11095 if (header
->internal
) rb_obj_hide(obj
);
11096 if (header
->frozen
) rb_obj_freeze(obj
);
11101 ibf_dump_object_bignum(struct ibf_dump
*dump
, VALUE obj
)
11103 ssize_t len
= BIGNUM_LEN(obj
);
11104 ssize_t slen
= BIGNUM_SIGN(obj
) > 0 ? len
: len
* -1;
11105 BDIGIT
*d
= BIGNUM_DIGITS(obj
);
11107 (void)IBF_W(&slen
, ssize_t
, 1);
11108 IBF_WP(d
, BDIGIT
, len
);
11112 ibf_load_object_bignum(const struct ibf_load
*load
, const struct ibf_object_header
*header
, ibf_offset_t offset
)
11114 const struct ibf_object_bignum
*bignum
= IBF_OBJBODY(struct ibf_object_bignum
, offset
);
11115 int sign
= bignum
->slen
> 0;
11116 ssize_t len
= sign
> 0 ? bignum
->slen
: -1 * bignum
->slen
;
11117 VALUE obj
= rb_integer_unpack(bignum
->digits
, len
* 2, 2, 0,
11118 INTEGER_PACK_LITTLE_ENDIAN
| (sign
== 0 ? INTEGER_PACK_NEGATIVE
: 0));
11119 if (header
->internal
) rb_obj_hide(obj
);
11120 if (header
->frozen
) rb_obj_freeze(obj
);
11125 ibf_dump_object_data(struct ibf_dump
*dump
, VALUE obj
)
11127 if (rb_data_is_encoding(obj
)) {
11128 rb_encoding
*enc
= rb_to_encoding(obj
);
11129 const char *name
= rb_enc_name(enc
);
11130 long len
= strlen(name
) + 1;
11132 data
[0] = IBF_OBJECT_DATA_ENCODING
;
11134 (void)IBF_W(data
, long, 2);
11135 IBF_WP(name
, char, len
);
11138 ibf_dump_object_unsupported(dump
, obj
);
11143 ibf_load_object_data(const struct ibf_load
*load
, const struct ibf_object_header
*header
, ibf_offset_t offset
)
11145 const long *body
= IBF_OBJBODY(long, offset
);
11146 const enum ibf_object_data_type type
= (enum ibf_object_data_type
)body
[0];
11147 /* const long len = body[1]; */
11148 const char *data
= (const char *)&body
[2];
11151 case IBF_OBJECT_DATA_ENCODING
:
11153 VALUE encobj
= rb_enc_from_encoding(rb_enc_find(data
));
11158 return ibf_load_object_unsupported(load
, header
, offset
);
11162 ibf_dump_object_complex_rational(struct ibf_dump
*dump
, VALUE obj
)
11165 data
[0] = (long)ibf_dump_object(dump
, RCOMPLEX(obj
)->real
);
11166 data
[1] = (long)ibf_dump_object(dump
, RCOMPLEX(obj
)->imag
);
11168 (void)IBF_W(data
, long, 2);
11172 ibf_load_object_complex_rational(const struct ibf_load
*load
, const struct ibf_object_header
*header
, ibf_offset_t offset
)
11174 const struct ibf_object_complex_rational
*nums
= IBF_OBJBODY(struct ibf_object_complex_rational
, offset
);
11175 VALUE a
= ibf_load_object(load
, nums
->a
);
11176 VALUE b
= ibf_load_object(load
, nums
->b
);
11177 VALUE obj
= header
->type
== T_COMPLEX
?
11178 rb_complex_new(a
, b
) : rb_rational_new(a
, b
);
11180 if (header
->internal
) rb_obj_hide(obj
);
11181 if (header
->frozen
) rb_obj_freeze(obj
);
11186 ibf_dump_object_symbol(struct ibf_dump
*dump
, VALUE obj
)
11188 VALUE str
= rb_sym2str(obj
);
11189 VALUE str_index
= ibf_dump_object(dump
, str
);
11191 ibf_dump_write_small_value(dump
, str_index
);
11195 ibf_load_object_symbol(const struct ibf_load
*load
, const struct ibf_object_header
*header
, ibf_offset_t offset
)
11197 VALUE str_index
= ibf_load_small_value(load
, &offset
);
11198 VALUE str
= ibf_load_object(load
, str_index
);
11199 ID id
= rb_intern_str(str
);
11203 typedef void (*ibf_dump_object_function
)(struct ibf_dump
*dump
, VALUE obj
);
11204 static ibf_dump_object_function dump_object_functions
[RUBY_T_MASK
+1] = {
11205 ibf_dump_object_unsupported
, /* T_NONE */
11206 ibf_dump_object_unsupported
, /* T_OBJECT */
11207 ibf_dump_object_class
, /* T_CLASS */
11208 ibf_dump_object_unsupported
, /* T_MODULE */
11209 ibf_dump_object_float
, /* T_FLOAT */
11210 ibf_dump_object_string
, /* T_STRING */
11211 ibf_dump_object_regexp
, /* T_REGEXP */
11212 ibf_dump_object_array
, /* T_ARRAY */
11213 ibf_dump_object_hash
, /* T_HASH */
11214 ibf_dump_object_struct
, /* T_STRUCT */
11215 ibf_dump_object_bignum
, /* T_BIGNUM */
11216 ibf_dump_object_unsupported
, /* T_FILE */
11217 ibf_dump_object_data
, /* T_DATA */
11218 ibf_dump_object_unsupported
, /* T_MATCH */
11219 ibf_dump_object_complex_rational
, /* T_COMPLEX */
11220 ibf_dump_object_complex_rational
, /* T_RATIONAL */
11221 ibf_dump_object_unsupported
, /* 0x10 */
11222 ibf_dump_object_unsupported
, /* 0x11 T_NIL */
11223 ibf_dump_object_unsupported
, /* 0x12 T_TRUE */
11224 ibf_dump_object_unsupported
, /* 0x13 T_FALSE */
11225 ibf_dump_object_symbol
, /* 0x14 T_SYMBOL */
11226 ibf_dump_object_unsupported
, /* T_FIXNUM */
11227 ibf_dump_object_unsupported
, /* T_UNDEF */
11228 ibf_dump_object_unsupported
, /* 0x17 */
11229 ibf_dump_object_unsupported
, /* 0x18 */
11230 ibf_dump_object_unsupported
, /* 0x19 */
11231 ibf_dump_object_unsupported
, /* T_IMEMO 0x1a */
11232 ibf_dump_object_unsupported
, /* T_NODE 0x1b */
11233 ibf_dump_object_unsupported
, /* T_ICLASS 0x1c */
11234 ibf_dump_object_unsupported
, /* T_ZOMBIE 0x1d */
11235 ibf_dump_object_unsupported
, /* 0x1e */
11236 ibf_dump_object_unsupported
, /* 0x1f */
11240 ibf_dump_object_object_header(struct ibf_dump
*dump
, const struct ibf_object_header header
)
11242 unsigned char byte
=
11243 (header
.type
<< 0) |
11244 (header
.special_const
<< 5) |
11245 (header
.frozen
<< 6) |
11246 (header
.internal
<< 7);
11251 static struct ibf_object_header
11252 ibf_load_object_object_header(const struct ibf_load
*load
, ibf_offset_t
*offset
)
11254 unsigned char byte
= ibf_load_byte(load
, offset
);
11256 struct ibf_object_header header
;
11257 header
.type
= (byte
>> 0) & 0x1f;
11258 header
.special_const
= (byte
>> 5) & 0x01;
11259 header
.frozen
= (byte
>> 6) & 0x01;
11260 header
.internal
= (byte
>> 7) & 0x01;
11265 static ibf_offset_t
11266 ibf_dump_object_object(struct ibf_dump
*dump
, VALUE obj
)
11268 struct ibf_object_header obj_header
;
11269 ibf_offset_t current_offset
;
11270 IBF_ZERO(obj_header
);
11271 obj_header
.type
= TYPE(obj
);
11273 IBF_W_ALIGN(ibf_offset_t
);
11274 current_offset
= ibf_dump_pos(dump
);
11276 if (SPECIAL_CONST_P(obj
)) {
11277 if (RB_TYPE_P(obj
, T_SYMBOL
) ||
11278 RB_TYPE_P(obj
, T_FLOAT
)) {
11279 obj_header
.internal
= FALSE
;
11282 obj_header
.special_const
= TRUE
;
11283 obj_header
.frozen
= TRUE
;
11284 obj_header
.internal
= TRUE
;
11285 ibf_dump_object_object_header(dump
, obj_header
);
11286 ibf_dump_write_small_value(dump
, obj
);
11289 obj_header
.internal
= (RBASIC_CLASS(obj
) == 0) ? TRUE
: FALSE
;
11291 obj_header
.special_const
= FALSE
;
11292 obj_header
.frozen
= FL_TEST(obj
, FL_FREEZE
) ? TRUE
: FALSE
;
11293 ibf_dump_object_object_header(dump
, obj_header
);
11294 (*dump_object_functions
[obj_header
.type
])(dump
, obj
);
11297 return current_offset
;
11300 typedef VALUE (*ibf_load_object_function
)(const struct ibf_load
*load
, const struct ibf_object_header
*header
, ibf_offset_t offset
);
11301 static ibf_load_object_function load_object_functions
[RUBY_T_MASK
+1] = {
11302 ibf_load_object_unsupported
, /* T_NONE */
11303 ibf_load_object_unsupported
, /* T_OBJECT */
11304 ibf_load_object_class
, /* T_CLASS */
11305 ibf_load_object_unsupported
, /* T_MODULE */
11306 ibf_load_object_float
, /* T_FLOAT */
11307 ibf_load_object_string
, /* T_STRING */
11308 ibf_load_object_regexp
, /* T_REGEXP */
11309 ibf_load_object_array
, /* T_ARRAY */
11310 ibf_load_object_hash
, /* T_HASH */
11311 ibf_load_object_struct
, /* T_STRUCT */
11312 ibf_load_object_bignum
, /* T_BIGNUM */
11313 ibf_load_object_unsupported
, /* T_FILE */
11314 ibf_load_object_data
, /* T_DATA */
11315 ibf_load_object_unsupported
, /* T_MATCH */
11316 ibf_load_object_complex_rational
, /* T_COMPLEX */
11317 ibf_load_object_complex_rational
, /* T_RATIONAL */
11318 ibf_load_object_unsupported
, /* 0x10 */
11319 ibf_load_object_unsupported
, /* T_NIL */
11320 ibf_load_object_unsupported
, /* T_TRUE */
11321 ibf_load_object_unsupported
, /* T_FALSE */
11322 ibf_load_object_symbol
,
11323 ibf_load_object_unsupported
, /* T_FIXNUM */
11324 ibf_load_object_unsupported
, /* T_UNDEF */
11325 ibf_load_object_unsupported
, /* 0x17 */
11326 ibf_load_object_unsupported
, /* 0x18 */
11327 ibf_load_object_unsupported
, /* 0x19 */
11328 ibf_load_object_unsupported
, /* T_IMEMO 0x1a */
11329 ibf_load_object_unsupported
, /* T_NODE 0x1b */
11330 ibf_load_object_unsupported
, /* T_ICLASS 0x1c */
11331 ibf_load_object_unsupported
, /* T_ZOMBIE 0x1d */
11332 ibf_load_object_unsupported
, /* 0x1e */
11333 ibf_load_object_unsupported
, /* 0x1f */
11337 ibf_load_object(const struct ibf_load
*load
, VALUE object_index
)
11339 if (object_index
== 0) {
11342 else if (object_index
>= (VALUE
)RARRAY_LEN(load
->current_buffer
->obj_list
)) {
11343 rb_raise(rb_eIndexError
, "object index out of range: %"PRIdVALUE
, object_index
);
11346 VALUE obj
= rb_ary_entry(load
->current_buffer
->obj_list
, (long)object_index
);
11347 if (obj
== Qnil
) { /* TODO: avoid multiple Qnil load */
11348 ibf_offset_t
*offsets
= (ibf_offset_t
*)(load
->current_buffer
->obj_list_offset
+ load
->current_buffer
->buff
);
11349 ibf_offset_t offset
= offsets
[object_index
];
11350 const struct ibf_object_header header
= ibf_load_object_object_header(load
, &offset
);
11353 fprintf(stderr
, "ibf_load_object: list=%#x offsets=%p offset=%#x\n",
11354 load
->current_buffer
->obj_list_offset
, (void *)offsets
, offset
);
11355 fprintf(stderr
, "ibf_load_object: type=%#x special=%d frozen=%d internal=%d\n",
11356 header
.type
, header
.special_const
, header
.frozen
, header
.internal
);
11358 if (offset
>= load
->current_buffer
->size
) {
11359 rb_raise(rb_eIndexError
, "object offset out of range: %u", offset
);
11362 if (header
.special_const
) {
11363 ibf_offset_t reading_pos
= offset
;
11365 obj
= ibf_load_small_value(load
, &reading_pos
);
11368 obj
= (*load_object_functions
[header
.type
])(load
, &header
, offset
);
11371 rb_ary_store(load
->current_buffer
->obj_list
, (long)object_index
, obj
);
11374 fprintf(stderr
, "ibf_load_object: index=%#"PRIxVALUE
" obj=%#"PRIxVALUE
"\n",
11375 object_index
, obj
);
11381 struct ibf_dump_object_list_arg
11383 struct ibf_dump
*dump
;
11388 ibf_dump_object_list_i(st_data_t key
, st_data_t val
, st_data_t ptr
)
11390 VALUE obj
= (VALUE
)key
;
11391 struct ibf_dump_object_list_arg
*args
= (struct ibf_dump_object_list_arg
*)ptr
;
11393 ibf_offset_t offset
= ibf_dump_object_object(args
->dump
, obj
);
11394 rb_ary_push(args
->offset_list
, UINT2NUM(offset
));
11396 return ST_CONTINUE
;
11400 ibf_dump_object_list(struct ibf_dump
*dump
, ibf_offset_t
*obj_list_offset
, unsigned int *obj_list_size
)
11402 st_table
*obj_table
= dump
->current_buffer
->obj_table
;
11403 VALUE offset_list
= rb_ary_tmp_new(obj_table
->num_entries
);
11405 struct ibf_dump_object_list_arg args
;
11407 args
.offset_list
= offset_list
;
11409 st_foreach(obj_table
, ibf_dump_object_list_i
, (st_data_t
)&args
);
11411 IBF_W_ALIGN(ibf_offset_t
);
11412 *obj_list_offset
= ibf_dump_pos(dump
);
11414 st_index_t size
= obj_table
->num_entries
;
11417 for (i
=0; i
<size
; i
++) {
11418 ibf_offset_t offset
= NUM2UINT(RARRAY_AREF(offset_list
, i
));
11422 *obj_list_size
= (unsigned int)size
;
11426 ibf_dump_mark(void *ptr
)
11428 struct ibf_dump
*dump
= (struct ibf_dump
*)ptr
;
11429 rb_gc_mark(dump
->global_buffer
.str
);
11431 rb_mark_set(dump
->global_buffer
.obj_table
);
11432 rb_mark_set(dump
->iseq_table
);
11436 ibf_dump_free(void *ptr
)
11438 struct ibf_dump
*dump
= (struct ibf_dump
*)ptr
;
11439 if (dump
->global_buffer
.obj_table
) {
11440 st_free_table(dump
->global_buffer
.obj_table
);
11441 dump
->global_buffer
.obj_table
= 0;
11443 if (dump
->iseq_table
) {
11444 st_free_table(dump
->iseq_table
);
11445 dump
->iseq_table
= 0;
11451 ibf_dump_memsize(const void *ptr
)
11453 struct ibf_dump
*dump
= (struct ibf_dump
*)ptr
;
11454 size_t size
= sizeof(*dump
);
11455 if (dump
->iseq_table
) size
+= st_memsize(dump
->iseq_table
);
11456 if (dump
->global_buffer
.obj_table
) size
+= st_memsize(dump
->global_buffer
.obj_table
);
11460 static const rb_data_type_t ibf_dump_type
= {
11462 {ibf_dump_mark
, ibf_dump_free
, ibf_dump_memsize
,},
11463 0, 0, RUBY_TYPED_WB_PROTECTED
| RUBY_TYPED_FREE_IMMEDIATELY
11467 ibf_dump_setup(struct ibf_dump
*dump
, VALUE dumper_obj
)
11469 dump
->global_buffer
.obj_table
= NULL
; // GC may run before a value is assigned
11470 dump
->iseq_table
= NULL
;
11472 RB_OBJ_WRITE(dumper_obj
, &dump
->global_buffer
.str
, rb_str_new(0, 0));
11473 dump
->global_buffer
.obj_table
= ibf_dump_object_table_new();
11474 dump
->iseq_table
= st_init_numtable(); /* need free */
11476 dump
->current_buffer
= &dump
->global_buffer
;
11480 rb_iseq_ibf_dump(const rb_iseq_t
*iseq
, VALUE opt
)
11482 struct ibf_dump
*dump
;
11483 struct ibf_header header
= {{0}};
11487 if (iseq
->body
->parent_iseq
!= NULL
||
11488 iseq
->body
->local_iseq
!= iseq
) {
11489 rb_raise(rb_eRuntimeError
, "should be top of iseq");
11491 if (RTEST(ISEQ_COVERAGE(iseq
))) {
11492 rb_raise(rb_eRuntimeError
, "should not compile with coverage");
11495 dump_obj
= TypedData_Make_Struct(0, struct ibf_dump
, &ibf_dump_type
, dump
);
11496 ibf_dump_setup(dump
, dump_obj
);
11498 ibf_dump_write(dump
, &header
, sizeof(header
));
11499 ibf_dump_write(dump
, RUBY_PLATFORM
, strlen(RUBY_PLATFORM
) + 1);
11500 ibf_dump_iseq(dump
, iseq
);
11502 header
.magic
[0] = 'Y'; /* YARB */
11503 header
.magic
[1] = 'A';
11504 header
.magic
[2] = 'R';
11505 header
.magic
[3] = 'B';
11506 header
.major_version
= IBF_MAJOR_VERSION
;
11507 header
.minor_version
= IBF_MINOR_VERSION
;
11508 ibf_dump_iseq_list(dump
, &header
);
11509 ibf_dump_object_list(dump
, &header
.global_object_list_offset
, &header
.global_object_list_size
);
11510 header
.size
= ibf_dump_pos(dump
);
11513 VALUE opt_str
= opt
;
11514 const char *ptr
= StringValuePtr(opt_str
);
11515 header
.extra_size
= RSTRING_LENINT(opt_str
);
11516 ibf_dump_write(dump
, ptr
, header
.extra_size
);
11519 header
.extra_size
= 0;
11522 ibf_dump_overwrite(dump
, &header
, sizeof(header
), 0);
11524 str
= dump
->global_buffer
.str
;
11525 ibf_dump_free(dump
);
11526 DATA_PTR(dump_obj
) = NULL
;
11527 RB_GC_GUARD(dump_obj
);
11531 static const ibf_offset_t
*
11532 ibf_iseq_list(const struct ibf_load
*load
)
11534 return (const ibf_offset_t
*)(load
->global_buffer
.buff
+ load
->header
->iseq_list_offset
);
11538 rb_ibf_load_iseq_complete(rb_iseq_t
*iseq
)
11540 struct ibf_load
*load
= RTYPEDDATA_DATA(iseq
->aux
.loader
.obj
);
11541 rb_iseq_t
*prev_src_iseq
= load
->iseq
;
11542 ibf_offset_t offset
= ibf_iseq_list(load
)[iseq
->aux
.loader
.index
];
11545 fprintf(stderr
, "rb_ibf_load_iseq_complete: index=%#x offset=%#x size=%#x\n",
11546 iseq
->aux
.loader
.index
, offset
,
11547 load
->header
->size
);
11549 ibf_load_iseq_each(load
, iseq
, offset
);
11550 ISEQ_COMPILE_DATA_CLEAR(iseq
);
11551 FL_UNSET((VALUE
)iseq
, ISEQ_NOT_LOADED_YET
);
11552 rb_iseq_init_trace(iseq
);
11553 load
->iseq
= prev_src_iseq
;
11557 MJIT_FUNC_EXPORTED
const rb_iseq_t
*
11558 rb_iseq_complete(const rb_iseq_t
*iseq
)
11560 rb_ibf_load_iseq_complete((rb_iseq_t
*)iseq
);
11566 ibf_load_iseq(const struct ibf_load
*load
, const rb_iseq_t
*index_iseq
)
11568 int iseq_index
= (int)(VALUE
)index_iseq
;
11571 fprintf(stderr
, "ibf_load_iseq: index_iseq=%p iseq_list=%p\n",
11572 (void *)index_iseq
, (void *)load
->iseq_list
);
11574 if (iseq_index
== -1) {
11578 VALUE iseqv
= rb_ary_entry(load
->iseq_list
, iseq_index
);
11581 fprintf(stderr
, "ibf_load_iseq: iseqv=%p\n", (void *)iseqv
);
11583 if (iseqv
!= Qnil
) {
11584 return (rb_iseq_t
*)iseqv
;
11587 rb_iseq_t
*iseq
= iseq_imemo_alloc();
11589 fprintf(stderr
, "ibf_load_iseq: new iseq=%p\n", (void *)iseq
);
11591 FL_SET((VALUE
)iseq
, ISEQ_NOT_LOADED_YET
);
11592 iseq
->aux
.loader
.obj
= load
->loader_obj
;
11593 iseq
->aux
.loader
.index
= iseq_index
;
11595 fprintf(stderr
, "ibf_load_iseq: iseq=%p loader_obj=%p index=%d\n",
11596 (void *)iseq
, (void *)load
->loader_obj
, iseq_index
);
11598 rb_ary_store(load
->iseq_list
, iseq_index
, (VALUE
)iseq
);
11602 fprintf(stderr
, "ibf_load_iseq: loading iseq=%p\n", (void *)iseq
);
11604 rb_ibf_load_iseq_complete(iseq
);
11606 if (GET_VM()->builtin_function_table
) {
11607 rb_ibf_load_iseq_complete(iseq
);
11609 #endif /* !USE_LAZY_LOAD */
11612 fprintf(stderr
, "ibf_load_iseq: iseq=%p loaded %p\n",
11613 (void *)iseq
, (void *)load
->iseq
);
11621 ibf_load_setup_bytes(struct ibf_load
*load
, VALUE loader_obj
, const char *bytes
, size_t size
)
11623 load
->loader_obj
= loader_obj
;
11624 load
->global_buffer
.buff
= bytes
;
11625 load
->header
= (struct ibf_header
*)load
->global_buffer
.buff
;
11626 load
->global_buffer
.size
= load
->header
->size
;
11627 load
->global_buffer
.obj_list_offset
= load
->header
->global_object_list_offset
;
11628 load
->global_buffer
.obj_list_size
= load
->header
->global_object_list_size
;
11629 RB_OBJ_WRITE(loader_obj
, &load
->iseq_list
, rb_ary_tmp_new(0));
11630 RB_OBJ_WRITE(loader_obj
, &load
->global_buffer
.obj_list
, rb_ary_tmp_new(load
->global_buffer
.obj_list_size
));
11631 rb_ary_resize(load
->global_buffer
.obj_list
, load
->global_buffer
.obj_list_size
);
11634 load
->current_buffer
= &load
->global_buffer
;
11636 if (size
< load
->header
->size
) {
11637 rb_raise(rb_eRuntimeError
, "broken binary format");
11639 if (strncmp(load
->header
->magic
, "YARB", 4) != 0) {
11640 rb_raise(rb_eRuntimeError
, "unknown binary format");
11642 if (load
->header
->major_version
!= IBF_MAJOR_VERSION
||
11643 load
->header
->minor_version
!= IBF_MINOR_VERSION
) {
11644 rb_raise(rb_eRuntimeError
, "unmatched version file (%u.%u for %u.%u)",
11645 load
->header
->major_version
, load
->header
->minor_version
, IBF_MAJOR_VERSION
, IBF_MINOR_VERSION
);
11647 if (strcmp(load
->global_buffer
.buff
+ sizeof(struct ibf_header
), RUBY_PLATFORM
) != 0) {
11648 rb_raise(rb_eRuntimeError
, "unmatched platform");
11650 if (load
->header
->iseq_list_offset
% RUBY_ALIGNOF(ibf_offset_t
)) {
11651 rb_raise(rb_eArgError
, "unaligned iseq list offset: %u",
11652 load
->header
->iseq_list_offset
);
11654 if (load
->global_buffer
.obj_list_offset
% RUBY_ALIGNOF(ibf_offset_t
)) {
11655 rb_raise(rb_eArgError
, "unaligned object list offset: %u",
11656 load
->global_buffer
.obj_list_offset
);
11661 ibf_load_setup(struct ibf_load
*load
, VALUE loader_obj
, VALUE str
)
11663 if (RSTRING_LENINT(str
) < (int)sizeof(struct ibf_header
)) {
11664 rb_raise(rb_eRuntimeError
, "broken binary format");
11668 str
= rb_str_new(RSTRING_PTR(str
), RSTRING_LEN(str
));
11671 ibf_load_setup_bytes(load
, loader_obj
, StringValuePtr(str
), RSTRING_LEN(str
));
11672 RB_OBJ_WRITE(loader_obj
, &load
->str
, str
);
11676 ibf_loader_mark(void *ptr
)
11678 struct ibf_load
*load
= (struct ibf_load
*)ptr
;
11679 rb_gc_mark(load
->str
);
11680 rb_gc_mark(load
->iseq_list
);
11681 rb_gc_mark(load
->global_buffer
.obj_list
);
11685 ibf_loader_free(void *ptr
)
11687 struct ibf_load
*load
= (struct ibf_load
*)ptr
;
11692 ibf_loader_memsize(const void *ptr
)
11694 return sizeof(struct ibf_load
);
11697 static const rb_data_type_t ibf_load_type
= {
11699 {ibf_loader_mark
, ibf_loader_free
, ibf_loader_memsize
,},
11700 0, 0, RUBY_TYPED_WB_PROTECTED
| RUBY_TYPED_FREE_IMMEDIATELY
11704 rb_iseq_ibf_load(VALUE str
)
11706 struct ibf_load
*load
;
11708 VALUE loader_obj
= TypedData_Make_Struct(0, struct ibf_load
, &ibf_load_type
, load
);
11710 ibf_load_setup(load
, loader_obj
, str
);
11711 iseq
= ibf_load_iseq(load
, 0);
11713 RB_GC_GUARD(loader_obj
);
11718 rb_iseq_ibf_load_bytes(const char *bytes
, size_t size
)
11720 struct ibf_load
*load
;
11722 VALUE loader_obj
= TypedData_Make_Struct(0, struct ibf_load
, &ibf_load_type
, load
);
11724 ibf_load_setup_bytes(load
, loader_obj
, bytes
, size
);
11725 iseq
= ibf_load_iseq(load
, 0);
11727 RB_GC_GUARD(loader_obj
);
11732 rb_iseq_ibf_load_extra_data(VALUE str
)
11734 struct ibf_load
*load
;
11735 VALUE loader_obj
= TypedData_Make_Struct(0, struct ibf_load
, &ibf_load_type
, load
);
11738 ibf_load_setup(load
, loader_obj
, str
);
11739 extra_str
= rb_str_new(load
->global_buffer
.buff
+ load
->header
->size
, load
->header
->extra_size
);
11740 RB_GC_GUARD(loader_obj
);