summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPeter Zhu <[email protected]>2023-09-05 13:34:46 -0400
committerPeter Zhu <[email protected]>2023-09-05 14:32:15 -0400
commit9a8398a18f364d3bcfc8d2744162d3572d9491e4 (patch)
tree4d9202dffe750a9c9377f18755e33d3556ab3246
parent06a1d16dc2108c54090a0fca8b356f39ef353a99 (diff)
Introduce rb_gc_remove_weak
If we're during incremental marking, then Ruby code can execute that deallocates certain memory buffers that have been called with rb_gc_mark_weak, which can cause use-after-free bugs.
Notes
Notes: Merged: https://github.com/ruby/ruby/pull/8375
-rw-r--r--gc.c23
-rw-r--r--internal/gc.h1
-rw-r--r--weakmap.c7
3 files changed, 30 insertions, 1 deletions
diff --git a/gc.c b/gc.c
index 8bb9fbbecf..cf53cf5270 100644
--- a/gc.c
+++ b/gc.c
@@ -6924,6 +6924,27 @@ rb_gc_mark_weak(VALUE *ptr)
objspace->profile.weak_references_count++;
}
+void
+rb_gc_remove_weak(VALUE parent_obj, VALUE *ptr)
+{
+ rb_objspace_t *objspace = &rb_objspace;
+
+ /* If we're not incremental marking, then the state of the objects can't
+ * change so we don't need to do anything. */
+ if (!is_incremental_marking(objspace)) return;
+ /* If parent_obj has not been marked, then ptr has not yet been marked
+ * weak, so we don't need to do anything. */
+ if (!RVALUE_MARKED(parent_obj)) return;
+
+ VALUE **ptr_ptr;
+ rb_darray_foreach(objspace->weak_references, i, ptr_ptr) {
+ if (*ptr_ptr == ptr) {
+ *ptr_ptr = NULL;
+ break;
+ }
+ }
+}
+
/* CAUTION: THIS FUNCTION ENABLE *ONLY BEFORE* SWEEPING.
* This function is only for GC_END_MARK timing.
*/
@@ -8151,6 +8172,8 @@ gc_update_weak_references(rb_objspace_t *objspace)
size_t retained_weak_references_count = 0;
VALUE **ptr_ptr;
rb_darray_foreach(objspace->weak_references, i, ptr_ptr) {
+ if (!ptr_ptr) continue;
+
VALUE obj = **ptr_ptr;
if (RB_SPECIAL_CONST_P(obj)) continue;
diff --git a/internal/gc.h b/internal/gc.h
index be40a7a2f7..28b82f4196 100644
--- a/internal/gc.h
+++ b/internal/gc.h
@@ -236,6 +236,7 @@ VALUE rb_define_finalizer_no_check(VALUE obj, VALUE block);
void rb_gc_mark_and_move(VALUE *ptr);
void rb_gc_mark_weak(VALUE *ptr);
+void rb_gc_remove_weak(VALUE parent_obj, VALUE *ptr);
#define rb_gc_mark_and_move_ptr(ptr) do { \
VALUE _obj = (VALUE)*(ptr); \
diff --git a/weakmap.c b/weakmap.c
index 92aed105b1..d79f5b3f94 100644
--- a/weakmap.c
+++ b/weakmap.c
@@ -380,7 +380,7 @@ wmap_aset_replace(st_data_t *key, st_data_t *val, st_data_t new_key_ptr, int exi
VALUE new_val = *(((VALUE *)new_key_ptr) + 1);
if (existing) {
- assert(orig_pair[0] == *(VALUE *)new_key);
+ assert(*(VALUE *)*key == new_key);
}
else {
VALUE *pair = xmalloc(sizeof(VALUE) * 2);
@@ -450,6 +450,9 @@ wmap_delete(VALUE self, VALUE key)
if (st_delete(w->table, &orig_key_data, &orig_val_data)) {
VALUE orig_val = *(VALUE *)orig_val_data;
+ rb_gc_remove_weak(self, (VALUE *)orig_key_data);
+ rb_gc_remove_weak(self, (VALUE *)orig_val_data);
+
wmap_free_entry((VALUE *)orig_key_data, (VALUE *)orig_val_data);
if (wmap_live_p(orig_val)) {
@@ -776,6 +779,8 @@ wkmap_delete(VALUE self, VALUE key)
if (st_delete(w->table, &orig_key_data, &orig_val_data)) {
VALUE orig_val = (VALUE)orig_val_data;
+ rb_gc_remove_weak(self, (VALUE *)orig_key_data);
+
ruby_sized_xfree((VALUE *)orig_key_data, sizeof(VALUE));
return orig_val;