Delete heredoc line mark references
[ruby.git] / node.c
blobf76e10e69b8fcd9cef410e2323c4e6a776009fa7
1 /**********************************************************************
3 node.c - ruby node tree
5 $Author: mame $
6 created at: 09/12/06 21:23:44 JST
8 Copyright (C) 2009 Yusuke Endoh
10 **********************************************************************/
12 #ifdef UNIVERSAL_PARSER
14 #include <stddef.h>
15 #include "node.h"
16 #include "rubyparser.h"
17 #include "internal/parse.h"
18 #define T_NODE 0x1b
20 #else
22 #include "internal.h"
23 #include "internal/hash.h"
24 #include "internal/variable.h"
25 #include "ruby/ruby.h"
26 #include "vm_core.h"
28 #endif
30 #define NODE_BUF_DEFAULT_SIZE (sizeof(struct RNode) * 16)
32 static void
33 init_node_buffer_elem(node_buffer_elem_t *nbe, size_t allocated, void *xmalloc(size_t))
35 nbe->allocated = allocated;
36 nbe->used = 0;
37 nbe->len = 0;
38 nbe->nodes = xmalloc(allocated / sizeof(struct RNode) * sizeof(struct RNode *)); /* All node requires at least RNode */
41 static void
42 init_node_buffer_list(node_buffer_list_t * nb, node_buffer_elem_t *head, void *xmalloc(size_t))
44 init_node_buffer_elem(head, NODE_BUF_DEFAULT_SIZE, xmalloc);
45 nb->head = nb->last = head;
46 nb->head->next = NULL;
49 #ifdef UNIVERSAL_PARSER
50 #define ruby_xmalloc config->malloc
51 #define Qnil config->qnil
52 #endif
54 #ifdef UNIVERSAL_PARSER
55 static node_buffer_t *
56 rb_node_buffer_new(rb_parser_config_t *config)
57 #else
58 static node_buffer_t *
59 rb_node_buffer_new(void)
60 #endif
62 const size_t bucket_size = offsetof(node_buffer_elem_t, buf) + NODE_BUF_DEFAULT_SIZE;
63 const size_t alloc_size = sizeof(node_buffer_t) + (bucket_size * 2);
64 STATIC_ASSERT(
65 integer_overflow,
66 offsetof(node_buffer_elem_t, buf) + NODE_BUF_DEFAULT_SIZE
67 > sizeof(node_buffer_t) + 2 * sizeof(node_buffer_elem_t));
68 node_buffer_t *nb = ruby_xmalloc(alloc_size);
69 init_node_buffer_list(&nb->unmarkable, (node_buffer_elem_t*)&nb[1], ruby_xmalloc);
70 init_node_buffer_list(&nb->markable, (node_buffer_elem_t*)((size_t)nb->unmarkable.head + bucket_size), ruby_xmalloc);
71 nb->local_tables = 0;
72 nb->mark_hash = Qnil;
73 nb->tokens = Qnil;
74 #ifdef UNIVERSAL_PARSER
75 nb->config = config;
76 #endif
77 return nb;
80 #ifdef UNIVERSAL_PARSER
81 #undef ruby_xmalloc
82 #define ruby_xmalloc ast->node_buffer->config->malloc
83 #undef xfree
84 #define xfree ast->node_buffer->config->free
85 #define rb_ident_hash_new ast->node_buffer->config->ident_hash_new
86 #define rb_xmalloc_mul_add ast->node_buffer->config->xmalloc_mul_add
87 #define ruby_xrealloc(var,size) (ast->node_buffer->config->realloc_n((void *)var, 1, size))
88 #define rb_gc_mark ast->node_buffer->config->gc_mark
89 #define rb_gc_location ast->node_buffer->config->gc_location
90 #define rb_gc_mark_movable ast->node_buffer->config->gc_mark_movable
91 #undef Qnil
92 #define Qnil ast->node_buffer->config->qnil
93 #define Qtrue ast->node_buffer->config->qtrue
94 #define NIL_P ast->node_buffer->config->nil_p
95 #define rb_hash_aset ast->node_buffer->config->hash_aset
96 #define rb_hash_delete ast->node_buffer->config->hash_delete
97 #define RB_OBJ_WRITE(old, slot, young) ast->node_buffer->config->obj_write((VALUE)(old), (VALUE *)(slot), (VALUE)(young))
98 #endif
100 typedef void node_itr_t(rb_ast_t *ast, void *ctx, NODE *node);
101 static void iterate_node_values(rb_ast_t *ast, node_buffer_list_t *nb, node_itr_t * func, void *ctx);
103 /* Setup NODE structure.
104 * NODE is not an object managed by GC, but it imitates an object
105 * so that it can work with `RB_TYPE_P(obj, T_NODE)`.
106 * This dirty hack is needed because Ripper jumbles NODEs and other type
107 * objects.
109 void
110 rb_node_init(NODE *n, enum node_type type)
112 RNODE(n)->flags = T_NODE;
113 nd_init_type(RNODE(n), type);
114 RNODE(n)->nd_loc.beg_pos.lineno = 0;
115 RNODE(n)->nd_loc.beg_pos.column = 0;
116 RNODE(n)->nd_loc.end_pos.lineno = 0;
117 RNODE(n)->nd_loc.end_pos.column = 0;
118 RNODE(n)->node_id = -1;
121 const char *
122 rb_node_name(int node)
124 switch (node) {
125 #include "node_name.inc"
126 default:
127 return 0;
131 #ifdef UNIVERSAL_PARSER
132 const char *
133 ruby_node_name(int node)
135 return rb_node_name(node);
137 #else
138 const char *
139 ruby_node_name(int node)
141 const char *name = rb_node_name(node);
143 if (!name) rb_bug("unknown node: %d", node);
144 return name;
146 #endif
148 static void
149 node_buffer_list_free(rb_ast_t *ast, node_buffer_list_t * nb)
151 node_buffer_elem_t *nbe = nb->head;
152 while (nbe != nb->last) {
153 void *buf = nbe;
154 xfree(nbe->nodes);
155 nbe = nbe->next;
156 xfree(buf);
159 /* The last node_buffer_elem_t is allocated in the node_buffer_t, so we
160 * only need to free the nodes. */
161 xfree(nbe->nodes);
164 struct rb_ast_local_table_link {
165 struct rb_ast_local_table_link *next;
166 // struct rb_ast_id_table {
167 int size;
168 ID ids[FLEX_ARY_LEN];
169 // }
172 static void
173 free_ast_value(rb_ast_t *ast, void *ctx, NODE *node)
175 switch (nd_type(node)) {
176 case NODE_ARGS:
177 xfree(RNODE_ARGS(node)->nd_ainfo);
178 break;
182 static void
183 rb_node_buffer_free(rb_ast_t *ast, node_buffer_t *nb)
185 iterate_node_values(ast, &nb->unmarkable, free_ast_value, NULL);
186 node_buffer_list_free(ast, &nb->unmarkable);
187 node_buffer_list_free(ast, &nb->markable);
188 struct rb_ast_local_table_link *local_table = nb->local_tables;
189 while (local_table) {
190 struct rb_ast_local_table_link *next_table = local_table->next;
191 xfree(local_table);
192 local_table = next_table;
194 xfree(nb);
197 #define buf_add_offset(nbe, offset) ((char *)(nbe->buf) + (offset))
199 static NODE *
200 ast_newnode_in_bucket(rb_ast_t *ast, node_buffer_list_t *nb, size_t size, size_t alignment)
202 size_t padding;
203 NODE *ptr;
205 padding = alignment - (size_t)buf_add_offset(nb->head, nb->head->used) % alignment;
206 padding = padding == alignment ? 0 : padding;
208 if (nb->head->used + size + padding > nb->head->allocated) {
209 size_t n = nb->head->allocated * 2;
210 node_buffer_elem_t *nbe;
211 nbe = rb_xmalloc_mul_add(n, sizeof(char *), offsetof(node_buffer_elem_t, buf));
212 init_node_buffer_elem(nbe, n, ruby_xmalloc);
213 nbe->next = nb->head;
214 nb->head = nbe;
215 padding = 0; /* malloc returns aligned address then no need to add padding */
218 ptr = (NODE *)buf_add_offset(nb->head, nb->head->used + padding);
219 nb->head->used += (size + padding);
220 nb->head->nodes[nb->head->len++] = ptr;
221 return ptr;
224 RBIMPL_ATTR_PURE()
225 static bool
226 nodetype_markable_p(enum node_type type)
228 switch (type) {
229 case NODE_MATCH:
230 case NODE_LIT:
231 case NODE_STR:
232 case NODE_XSTR:
233 case NODE_DSTR:
234 case NODE_DXSTR:
235 case NODE_DREGX:
236 case NODE_DSYM:
237 return true;
238 default:
239 return false;
243 NODE *
244 rb_ast_newnode(rb_ast_t *ast, enum node_type type, size_t size, size_t alignment)
246 node_buffer_t *nb = ast->node_buffer;
247 node_buffer_list_t *bucket =
248 (nodetype_markable_p(type) ? &nb->markable : &nb->unmarkable);
249 return ast_newnode_in_bucket(ast, bucket, size, alignment);
252 #if RUBY_DEBUG
253 void
254 rb_ast_node_type_change(NODE *n, enum node_type type)
256 enum node_type old_type = nd_type(n);
257 if (nodetype_markable_p(old_type) != nodetype_markable_p(type)) {
258 rb_bug("node type changed: %s -> %s",
259 ruby_node_name(old_type), ruby_node_name(type));
262 #endif
264 rb_ast_id_table_t *
265 rb_ast_new_local_table(rb_ast_t *ast, int size)
267 size_t alloc_size = sizeof(struct rb_ast_local_table_link) + size * sizeof(ID);
268 struct rb_ast_local_table_link *link = ruby_xmalloc(alloc_size);
269 link->next = ast->node_buffer->local_tables;
270 ast->node_buffer->local_tables = link;
271 link->size = size;
273 return (rb_ast_id_table_t *) &link->size;
276 rb_ast_id_table_t *
277 rb_ast_resize_latest_local_table(rb_ast_t *ast, int size)
279 struct rb_ast_local_table_link *link = ast->node_buffer->local_tables;
280 size_t alloc_size = sizeof(struct rb_ast_local_table_link) + size * sizeof(ID);
281 link = ruby_xrealloc(link, alloc_size);
282 ast->node_buffer->local_tables = link;
283 link->size = size;
285 return (rb_ast_id_table_t *) &link->size;
288 void
289 rb_ast_delete_node(rb_ast_t *ast, NODE *n)
291 (void)ast;
292 (void)n;
293 /* should we implement freelist? */
296 #ifdef UNIVERSAL_PARSER
297 rb_ast_t *
298 rb_ast_new(rb_parser_config_t *config)
300 node_buffer_t *nb = rb_node_buffer_new(config);
301 config->counter++;
302 return config->ast_new((VALUE)nb);
304 #else
305 rb_ast_t *
306 rb_ast_new(void)
308 node_buffer_t *nb = rb_node_buffer_new();
309 rb_ast_t *ast = (rb_ast_t *)rb_imemo_new(imemo_ast, 0, 0, 0, (VALUE)nb);
310 return ast;
312 #endif
314 static void
315 iterate_buffer_elements(rb_ast_t *ast, node_buffer_elem_t *nbe, long len, node_itr_t *func, void *ctx)
317 long cursor;
318 for (cursor = 0; cursor < len; cursor++) {
319 func(ast, ctx, nbe->nodes[cursor]);
323 static void
324 iterate_node_values(rb_ast_t *ast, node_buffer_list_t *nb, node_itr_t * func, void *ctx)
326 node_buffer_elem_t *nbe = nb->head;
328 while (nbe) {
329 iterate_buffer_elements(ast, nbe, nbe->len, func, ctx);
330 nbe = nbe->next;
334 static void
335 mark_ast_value(rb_ast_t *ast, void *ctx, NODE *node)
337 #ifdef UNIVERSAL_PARSER
338 bug_report_func rb_bug = ast->node_buffer->config->bug;
339 #endif
341 switch (nd_type(node)) {
342 case NODE_MATCH:
343 case NODE_LIT:
344 case NODE_STR:
345 case NODE_XSTR:
346 case NODE_DSTR:
347 case NODE_DXSTR:
348 case NODE_DREGX:
349 case NODE_DSYM:
350 rb_gc_mark_movable(RNODE_LIT(node)->nd_lit);
351 break;
352 default:
353 rb_bug("unreachable node %s", ruby_node_name(nd_type(node)));
357 static void
358 update_ast_value(rb_ast_t *ast, void *ctx, NODE *node)
360 #ifdef UNIVERSAL_PARSER
361 bug_report_func rb_bug = ast->node_buffer->config->bug;
362 #endif
364 switch (nd_type(node)) {
365 case NODE_MATCH:
366 case NODE_LIT:
367 case NODE_STR:
368 case NODE_XSTR:
369 case NODE_DSTR:
370 case NODE_DXSTR:
371 case NODE_DREGX:
372 case NODE_DSYM:
373 RNODE_LIT(node)->nd_lit = rb_gc_location(RNODE_LIT(node)->nd_lit);
374 break;
375 default:
376 rb_bug("unreachable");
380 void
381 rb_ast_update_references(rb_ast_t *ast)
383 if (ast->node_buffer) {
384 node_buffer_t *nb = ast->node_buffer;
386 iterate_node_values(ast, &nb->markable, update_ast_value, NULL);
390 void
391 rb_ast_mark(rb_ast_t *ast)
393 if (ast->node_buffer) {
394 rb_gc_mark(ast->node_buffer->mark_hash);
395 rb_gc_mark(ast->node_buffer->tokens);
396 node_buffer_t *nb = ast->node_buffer;
397 iterate_node_values(ast, &nb->markable, mark_ast_value, NULL);
398 if (ast->body.script_lines) rb_gc_mark(ast->body.script_lines);
402 void
403 rb_ast_free(rb_ast_t *ast)
405 if (ast->node_buffer) {
406 #ifdef UNIVERSAL_PARSER
407 rb_parser_config_t *config = ast->node_buffer->config;
408 #endif
410 rb_node_buffer_free(ast, ast->node_buffer);
411 ast->node_buffer = 0;
412 #ifdef UNIVERSAL_PARSER
413 config->counter--;
414 if (config->counter <= 0) {
415 rb_ruby_parser_config_free(config);
417 #endif
421 static size_t
422 buffer_list_size(node_buffer_list_t *nb)
424 size_t size = 0;
425 node_buffer_elem_t *nbe = nb->head;
426 while (nbe != nb->last) {
427 size += offsetof(node_buffer_elem_t, buf) + nbe->used;
428 nbe = nbe->next;
430 return size;
433 size_t
434 rb_ast_memsize(const rb_ast_t *ast)
436 size_t size = 0;
437 node_buffer_t *nb = ast->node_buffer;
439 if (nb) {
440 size += sizeof(node_buffer_t);
441 size += buffer_list_size(&nb->unmarkable);
442 size += buffer_list_size(&nb->markable);
444 return size;
447 void
448 rb_ast_dispose(rb_ast_t *ast)
450 rb_ast_free(ast);
453 void
454 rb_ast_add_mark_object(rb_ast_t *ast, VALUE obj)
456 if (NIL_P(ast->node_buffer->mark_hash)) {
457 RB_OBJ_WRITE(ast, &ast->node_buffer->mark_hash, rb_ident_hash_new());
459 rb_hash_aset(ast->node_buffer->mark_hash, obj, Qtrue);
462 void
463 rb_ast_delete_mark_object(rb_ast_t *ast, VALUE obj)
465 if (NIL_P(ast->node_buffer->mark_hash)) return;
466 rb_hash_delete(ast->node_buffer->mark_hash, obj);
469 VALUE
470 rb_ast_tokens(rb_ast_t *ast)
472 return ast->node_buffer->tokens;
475 void
476 rb_ast_set_tokens(rb_ast_t *ast, VALUE tokens)
478 RB_OBJ_WRITE(ast, &ast->node_buffer->tokens, tokens);
481 VALUE
482 rb_node_set_type(NODE *n, enum node_type t)
484 #if RUBY_DEBUG
485 rb_ast_node_type_change(n, t);
486 #endif
487 return nd_init_type(n, t);