summaryrefslogtreecommitdiff
path: root/string.c
diff options
context:
space:
mode:
authorJean Boussier <[email protected]>2025-04-19 11:45:02 +0900
committerJean Boussier <[email protected]>2025-04-19 12:42:14 +0900
commit0f25886facd0b5f8e0998ec78ce3147d62c61bef (patch)
treeef4dd035e3f881b4188c1c00838579a6b6ccc595 /string.c
parent52487705d09aa4d304f3513594650c8bfbaad774 (diff)
Implement dsize function for `fstring_table_type`
The fstring table size used to be reported as part of the VM size, but since it was refactored to be lock-less it was no longer reported. Since it's now wrapped by a `T_DATA`, we can implement its `dsize` function and get a valuable insight into the size of the table. ``` {"address":"0x100ebff18", "type":"DATA", "shape_id":0, "slot_size":80, "struct":"VM/fstring_table", "memsize":131176, ... ```
Notes
Notes: Merged: https://github.com/ruby/ruby/pull/13138
Diffstat (limited to 'string.c')
-rw-r--r--string.c47
1 files changed, 36 insertions, 11 deletions
diff --git a/string.c b/string.c
index aeb494eeba..74acaac8d1 100644
--- a/string.c
+++ b/string.c
@@ -546,13 +546,20 @@ fstring_table_free(void *ptr)
xfree(table->entries);
}
+static size_t
+fstring_table_size(const void *ptr)
+{
+ const struct fstring_table_struct *table = ptr;
+ return sizeof(struct fstring_table_struct) + sizeof(struct fstring_table_entry) * table->capacity;
+}
+
// We declare a type for the table so that we can lean on Ruby's GC for deferred reclamation
static const rb_data_type_t fstring_table_type = {
- .wrap_struct_name = "fstring_table",
+ .wrap_struct_name = "VM/fstring_table",
.function = {
.dmark = NULL,
.dfree = fstring_table_free,
- .dsize = NULL,
+ .dsize = fstring_table_size,
},
.flags = RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
};
@@ -609,7 +616,9 @@ struct fstring_table_probe {
int mask;
};
-static int fstring_table_probe_start(struct fstring_table_probe *probe, struct fstring_table_struct *table, VALUE hash_code) {
+static int
+fstring_table_probe_start(struct fstring_table_probe *probe, struct fstring_table_struct *table, VALUE hash_code)
+{
RUBY_ASSERT((table->capacity & (table->capacity - 1)) == 0);
probe->d = 0;
probe->mask = table->capacity - 1;
@@ -617,7 +626,9 @@ static int fstring_table_probe_start(struct fstring_table_probe *probe, struct f
return probe->idx;
}
-static int fstring_table_probe_next(struct fstring_table_probe *probe) {
+static int
+fstring_table_probe_next(struct fstring_table_probe *probe)
+{
probe->d++;
probe->idx = (probe->idx + probe->d) & probe->mask;
return probe->idx;
@@ -626,7 +637,9 @@ static int fstring_table_probe_next(struct fstring_table_probe *probe) {
#define RUBY_ATOMIC_VALUE_LOAD(x) (VALUE)(RUBY_ATOMIC_PTR_LOAD(x))
-static void fstring_insert_on_resize(struct fstring_table_struct *table, VALUE hash_code, VALUE value) {
+static void
+fstring_insert_on_resize(struct fstring_table_struct *table, VALUE hash_code, VALUE value)
+{
struct fstring_table_probe probe;
int idx = fstring_table_probe_start(&probe, table, hash_code);
@@ -653,7 +666,9 @@ static void fstring_insert_on_resize(struct fstring_table_struct *table, VALUE h
}
// Rebuilds the table
-static void fstring_try_resize(VALUE old_table_obj) {
+static void
+fstring_try_resize(VALUE old_table_obj)
+{
RB_VM_LOCK_ENTER();
// Check if another thread has already resized
@@ -710,7 +725,9 @@ end:
RB_VM_LOCK_LEAVE();
}
-static VALUE fstring_find_or_insert(VALUE hash_code, VALUE value, struct fstr_update_arg *arg) {
+static VALUE
+fstring_find_or_insert(VALUE hash_code, VALUE value, struct fstr_update_arg *arg)
+{
struct fstring_table_probe probe;
bool inserting = false;
int idx;
@@ -790,7 +807,9 @@ static VALUE fstring_find_or_insert(VALUE hash_code, VALUE value, struct fstr_up
// Removes an fstring from the table. Compares by identity
-static void fstring_delete(VALUE hash_code, VALUE value) {
+static void
+fstring_delete(VALUE hash_code, VALUE value)
+{
// Delete is never called concurrently, so atomic operations are unnecessary
VALUE table_obj = RUBY_ATOMIC_VALUE_LOAD(fstring_table_obj);
RUBY_ASSERT_ALWAYS(table_obj);
@@ -849,7 +868,9 @@ register_fstring(VALUE str, bool copy, bool force_precompute_hash)
return result;
}
-void rb_fstring_foreach_with_replace(st_foreach_check_callback_func *func, st_update_callback_func *replace, st_data_t arg) {
+void
+rb_fstring_foreach_with_replace(st_foreach_check_callback_func *func, st_update_callback_func *replace, st_data_t arg)
+{
// Assume locking and barrier (which there is no assert for)
ASSERT_vm_locking();
@@ -888,13 +909,17 @@ void rb_fstring_foreach_with_replace(st_foreach_check_callback_func *func, st_up
}
}
-bool rb_obj_is_fstring_table(VALUE obj) {
+bool
+rb_obj_is_fstring_table(VALUE obj)
+{
ASSERT_vm_locking();
return obj == fstring_table_obj;
}
-void rb_gc_free_fstring(VALUE obj) {
+void
+rb_gc_free_fstring(VALUE obj)
+{
// Assume locking and barrier (which there is no assert for)
ASSERT_vm_locking();