diff options
Diffstat (limited to 'shape.h')
-rw-r--r-- | shape.h | 73 |
1 files changed, 58 insertions, 15 deletions
@@ -23,6 +23,10 @@ STATIC_ASSERT(shape_id_num_bits, SHAPE_ID_NUM_BITS == sizeof(shape_id_t) * CHAR_ #define SHAPE_ID_HEAP_INDEX_MAX ((1 << SHAPE_ID_HEAP_INDEX_BITS) - 1) #define SHAPE_ID_HEAP_INDEX_MASK (SHAPE_ID_HEAP_INDEX_MAX << SHAPE_ID_HEAP_INDEX_OFFSET) +// This masks allows to check if a shape_id contains any ivar. +// It rely on ROOT_SHAPE_WITH_OBJ_ID==1. +#define SHAPE_ID_HAS_IVAR_MASK (SHAPE_ID_FL_TOO_COMPLEX | (SHAPE_ID_OFFSET_MASK - 1)) + // The interpreter doesn't care about frozen status or slot size when reading ivars. // So we normalize shape_id by clearing these bits to improve cache hits. // JITs however might care about it. @@ -45,8 +49,6 @@ typedef uint32_t redblack_id_t; #define ROOT_TOO_COMPLEX_WITH_OBJ_ID (ROOT_SHAPE_WITH_OBJ_ID | SHAPE_ID_FL_TOO_COMPLEX | SHAPE_ID_FL_HAS_OBJECT_ID) #define SPECIAL_CONST_SHAPE_ID (ROOT_SHAPE_ID | SHAPE_ID_FL_FROZEN) -extern ID ruby_internal_object_id; - typedef struct redblack_node redblack_node_t; struct rb_shape { @@ -92,7 +94,10 @@ typedef struct { redblack_node_t *shape_cache; unsigned int cache_size; } rb_shape_tree_t; -RUBY_EXTERN rb_shape_tree_t *rb_shape_tree_ptr; + +RUBY_SYMBOL_EXPORT_BEGIN +RUBY_EXTERN rb_shape_tree_t rb_shape_tree; +RUBY_SYMBOL_EXPORT_END union rb_attr_index_cache { uint64_t pack; @@ -102,13 +107,6 @@ union rb_attr_index_cache { } unpack; }; -static inline rb_shape_tree_t * -rb_current_shape_tree(void) -{ - return rb_shape_tree_ptr; -} -#define GET_SHAPE_TREE() rb_current_shape_tree() - static inline shape_id_t RBASIC_SHAPE_ID(VALUE obj) { @@ -138,8 +136,6 @@ RBASIC_SET_SHAPE_ID(VALUE obj, shape_id_t shape_id) { RUBY_ASSERT(!RB_SPECIAL_CONST_P(obj)); RUBY_ASSERT(!RB_TYPE_P(obj, T_IMEMO) || IMEMO_TYPE_P(obj, imemo_class_fields)); - RUBY_ASSERT(rb_shape_verify_consistency(obj, shape_id)); - #if RBASIC_SHAPE_ID_FIELD RBASIC(obj)->shape_id = (VALUE)shape_id; #else @@ -147,13 +143,20 @@ RBASIC_SET_SHAPE_ID(VALUE obj, shape_id_t shape_id) RBASIC(obj)->flags &= SHAPE_FLAG_MASK; RBASIC(obj)->flags |= ((VALUE)(shape_id) << SHAPE_FLAG_SHIFT); #endif + RUBY_ASSERT(rb_shape_verify_consistency(obj, shape_id)); } -#define RSHAPE rb_shape_lookup +static inline rb_shape_t * +RSHAPE(shape_id_t shape_id) +{ + uint32_t offset = (shape_id & SHAPE_ID_OFFSET_MASK); + RUBY_ASSERT(offset != INVALID_SHAPE_ID); + + return &rb_shape_tree.shape_list[offset]; +} int32_t rb_shape_id_offset(void); -RUBY_FUNC_EXPORTED rb_shape_t *rb_shape_lookup(shape_id_t shape_id); RUBY_FUNC_EXPORTED shape_id_t rb_obj_shape_id(VALUE obj); shape_id_t rb_shape_get_next_iv_shape(shape_id_t shape_id, ID id); bool rb_shape_get_iv_index(shape_id_t shape_id, ID id, attr_index_t *value); @@ -238,7 +241,7 @@ RSHAPE_EMBEDDED_CAPACITY(shape_id_t shape_id) { uint8_t heap_index = rb_shape_heap_index(shape_id); if (heap_index) { - return GET_SHAPE_TREE()->capacities[heap_index - 1]; + return rb_shape_tree.capacities[heap_index - 1]; } return 0; } @@ -327,6 +330,46 @@ rb_shape_obj_has_id(VALUE obj) return rb_shape_has_object_id(RBASIC_SHAPE_ID(obj)); } +static inline bool +rb_shape_has_ivars(shape_id_t shape_id) +{ + return shape_id & SHAPE_ID_HAS_IVAR_MASK; +} + +static inline bool +rb_shape_obj_has_ivars(VALUE obj) +{ + return rb_shape_has_ivars(RBASIC_SHAPE_ID(obj)); +} + +static inline bool +rb_shape_has_fields(shape_id_t shape_id) +{ + return shape_id & (SHAPE_ID_OFFSET_MASK | SHAPE_ID_FL_TOO_COMPLEX); +} + +static inline bool +rb_shape_obj_has_fields(VALUE obj) +{ + return rb_shape_has_fields(RBASIC_SHAPE_ID(obj)); +} + +static inline bool +rb_obj_exivar_p(VALUE obj) +{ + switch (TYPE(obj)) { + case T_NONE: + case T_OBJECT: + case T_CLASS: + case T_MODULE: + case T_IMEMO: + return false; + default: + break; + } + return rb_shape_obj_has_fields(obj); +} + // For ext/objspace RUBY_SYMBOL_EXPORT_BEGIN typedef void each_shape_callback(shape_id_t shape_id, void *data); |