Skip to content

Commit 87bc5da

Browse files
peterzhu2118jhawthorn
authored andcommitted
Don't immediately promote children of old objects
[Feature #19678] References from an old object to a write barrier protected young object will not immediately promote the young object. Instead, the young object will age just like any other object, meaning that it has to survive three collections before being promoted to the old generation. References from an old object to a write barrier unprotected object will place the parent object in the remember set for marking during minor collections. This allows the child object to be reclaimed in minor collections at the cost of increased time for minor collections. On one of [Shopify's highest traffic Ruby apps, Storefront Renderer](https://shopify.engineering/how-shopify-reduced-storefront-response-times-rewrite), we saw significant improvements after deploying this feature in production. We compare the GC time and response time of web workers that have the original behaviour (non-experimental group) and this new behaviour (experimental group). We see that with this feature we spend significantly less time in the GC, 0.81x on average, 0.88x on p99, and 0.45x on p99.9. This translates to improvements in average response time (0.96x) and p99 response time (0.92x).
1 parent eac197c commit 87bc5da

File tree

2 files changed

+48
-78
lines changed

2 files changed

+48
-78
lines changed

gc.c

Lines changed: 22 additions & 78 deletions
Original file line numberDiff line numberDiff line change
@@ -967,6 +967,8 @@ struct heap_page {
967967
bits_t uncollectible_bits[HEAP_PAGE_BITMAP_LIMIT];
968968
bits_t marking_bits[HEAP_PAGE_BITMAP_LIMIT];
969969

970+
bits_t remembered_bits[HEAP_PAGE_BITMAP_LIMIT];
971+
970972
/* If set, the object is not movable */
971973
bits_t pinned_bits[HEAP_PAGE_BITMAP_LIMIT];
972974
};
@@ -1548,7 +1550,8 @@ check_rvalue_consistency_force(const VALUE obj, int terminate)
15481550
const int wb_unprotected_bit = RVALUE_WB_UNPROTECTED_BITMAP(obj) != 0;
15491551
const int uncollectible_bit = RVALUE_UNCOLLECTIBLE_BITMAP(obj) != 0;
15501552
const int mark_bit = RVALUE_MARK_BITMAP(obj) != 0;
1551-
const int marking_bit = RVALUE_MARKING_BITMAP(obj) != 0, remembered_bit = marking_bit;
1553+
const int marking_bit = RVALUE_MARKING_BITMAP(obj) != 0;
1554+
const int remembered_bit = MARKED_IN_BITMAP(GET_HEAP_PAGE(obj)->remembered_bits, obj) != 0;
15521555
const int age = RVALUE_FLAGS_AGE(RBASIC(obj)->flags);
15531556

15541557
if (GET_HEAP_PAGE(obj)->flags.in_tomb) {
@@ -1682,7 +1685,7 @@ static inline int
16821685
RVALUE_REMEMBERED(VALUE obj)
16831686
{
16841687
check_rvalue_consistency(obj);
1685-
return RVALUE_MARKING_BITMAP(obj) != 0;
1688+
return MARKED_IN_BITMAP(GET_HEAP_PAGE(obj)->remembered_bits, obj) != 0;
16861689
}
16871690

16881691
static inline int
@@ -1763,31 +1766,6 @@ RVALUE_AGE_INC(rb_objspace_t *objspace, VALUE obj)
17631766
check_rvalue_consistency(obj);
17641767
}
17651768

1766-
/* set age to RVALUE_OLD_AGE */
1767-
static inline void
1768-
RVALUE_AGE_SET_OLD(rb_objspace_t *objspace, VALUE obj)
1769-
{
1770-
check_rvalue_consistency(obj);
1771-
GC_ASSERT(!RVALUE_OLD_P(obj));
1772-
1773-
RBASIC(obj)->flags = RVALUE_FLAGS_AGE_SET(RBASIC(obj)->flags, RVALUE_OLD_AGE);
1774-
RVALUE_OLD_UNCOLLECTIBLE_SET(objspace, obj);
1775-
1776-
check_rvalue_consistency(obj);
1777-
}
1778-
1779-
/* set age to RVALUE_OLD_AGE - 1 */
1780-
static inline void
1781-
RVALUE_AGE_SET_CANDIDATE(rb_objspace_t *objspace, VALUE obj)
1782-
{
1783-
check_rvalue_consistency(obj);
1784-
GC_ASSERT(!RVALUE_OLD_P(obj));
1785-
1786-
RBASIC(obj)->flags = RVALUE_FLAGS_AGE_SET(RBASIC(obj)->flags, RVALUE_OLD_AGE - 1);
1787-
1788-
check_rvalue_consistency(obj);
1789-
}
1790-
17911769
static inline void
17921770
RVALUE_DEMOTE_RAW(rb_objspace_t *objspace, VALUE obj)
17931771
{
@@ -1802,7 +1780,7 @@ RVALUE_DEMOTE(rb_objspace_t *objspace, VALUE obj)
18021780
GC_ASSERT(RVALUE_OLD_P(obj));
18031781

18041782
if (!is_incremental_marking(objspace) && RVALUE_REMEMBERED(obj)) {
1805-
CLEAR_IN_BITMAP(GET_HEAP_MARKING_BITS(obj), obj);
1783+
CLEAR_IN_BITMAP(GET_HEAP_PAGE(obj)->remembered_bits, obj);
18061784
}
18071785

18081786
RVALUE_DEMOTE_RAW(objspace, obj);
@@ -6976,31 +6954,8 @@ rgengc_check_relation(rb_objspace_t *objspace, VALUE obj)
69766954
const VALUE old_parent = objspace->rgengc.parent_object;
69776955

69786956
if (old_parent) { /* parent object is old */
6979-
if (RVALUE_WB_UNPROTECTED(obj)) {
6980-
if (gc_remember_unprotected(objspace, obj)) {
6981-
gc_report(2, objspace, "relation: (O->S) %s -> %s\n", obj_info(old_parent), obj_info(obj));
6982-
}
6983-
}
6984-
else {
6985-
if (!RVALUE_OLD_P(obj)) {
6986-
if (RVALUE_MARKED(obj)) {
6987-
/* An object pointed from an OLD object should be OLD. */
6988-
gc_report(2, objspace, "relation: (O->unmarked Y) %s -> %s\n", obj_info(old_parent), obj_info(obj));
6989-
RVALUE_AGE_SET_OLD(objspace, obj);
6990-
if (is_incremental_marking(objspace)) {
6991-
if (!RVALUE_MARKING(obj)) {
6992-
gc_grey(objspace, obj);
6993-
}
6994-
}
6995-
else {
6996-
rgengc_remember(objspace, obj);
6997-
}
6998-
}
6999-
else {
7000-
gc_report(2, objspace, "relation: (O->Y) %s -> %s\n", obj_info(old_parent), obj_info(obj));
7001-
RVALUE_AGE_SET_CANDIDATE(objspace, obj);
7002-
}
7003-
}
6957+
if (RVALUE_WB_UNPROTECTED(obj) || !RVALUE_OLD_P(obj)) {
6958+
rgengc_remember(objspace, old_parent);
70046959
}
70056960
}
70066961

@@ -8791,9 +8746,7 @@ static int
87918746
rgengc_remembersetbits_set(rb_objspace_t *objspace, VALUE obj)
87928747
{
87938748
struct heap_page *page = GET_HEAP_PAGE(obj);
8794-
bits_t *bits = &page->marking_bits[0];
8795-
8796-
GC_ASSERT(!is_incremental_marking(objspace));
8749+
bits_t *bits = &page->remembered_bits[0];
87978750

87988751
if (MARKED_IN_BITMAP(bits, obj)) {
87998752
return FALSE;
@@ -8886,7 +8839,7 @@ rgengc_rememberset_mark(rb_objspace_t *objspace, rb_heap_t *heap)
88868839
if (page->flags.has_remembered_objects | page->flags.has_uncollectible_shady_objects) {
88878840
uintptr_t p = page->start;
88888841
bits_t bitset, bits[HEAP_PAGE_BITMAP_LIMIT];
8889-
bits_t *marking_bits = page->marking_bits;
8842+
bits_t *remembered_bits = page->remembered_bits;
88908843
bits_t *uncollectible_bits = page->uncollectible_bits;
88918844
bits_t *wb_unprotected_bits = page->wb_unprotected_bits;
88928845
#if PROFILE_REMEMBERSET_MARK
@@ -8895,8 +8848,8 @@ rgengc_rememberset_mark(rb_objspace_t *objspace, rb_heap_t *heap)
88958848
else if (page->flags.has_uncollectible_shady_objects) has_shady++;
88968849
#endif
88978850
for (j=0; j<HEAP_PAGE_BITMAP_LIMIT; j++) {
8898-
bits[j] = marking_bits[j] | (uncollectible_bits[j] & wb_unprotected_bits[j]);
8899-
marking_bits[j] = 0;
8851+
bits[j] = remembered_bits[j] | (uncollectible_bits[j] & wb_unprotected_bits[j]);
8852+
remembered_bits[j] = 0;
89008853
}
89018854
page->flags.has_remembered_objects = FALSE;
89028855

@@ -8933,6 +8886,7 @@ rgengc_mark_and_rememberset_clear(rb_objspace_t *objspace, rb_heap_t *heap)
89338886
memset(&page->mark_bits[0], 0, HEAP_PAGE_BITMAP_SIZE);
89348887
memset(&page->uncollectible_bits[0], 0, HEAP_PAGE_BITMAP_SIZE);
89358888
memset(&page->marking_bits[0], 0, HEAP_PAGE_BITMAP_SIZE);
8889+
memset(&page->remembered_bits[0], 0, HEAP_PAGE_BITMAP_SIZE);
89368890
memset(&page->pinned_bits[0], 0, HEAP_PAGE_BITMAP_SIZE);
89378891
page->flags.has_uncollectible_shady_objects = FALSE;
89388892
page->flags.has_remembered_objects = FALSE;
@@ -9006,18 +8960,7 @@ gc_writebarrier_incremental(VALUE a, VALUE b, rb_objspace_t *objspace)
90068960
}
90078961
}
90088962
else if (RVALUE_OLD_P(a) && !RVALUE_OLD_P(b)) {
9009-
if (!RVALUE_WB_UNPROTECTED(b)) {
9010-
gc_report(1, objspace, "gc_writebarrier_incremental: [GN] %p -> %s\n", (void *)a, obj_info(b));
9011-
RVALUE_AGE_SET_OLD(objspace, b);
9012-
9013-
if (RVALUE_BLACK_P(b)) {
9014-
gc_grey(objspace, b);
9015-
}
9016-
}
9017-
else {
9018-
gc_report(1, objspace, "gc_writebarrier_incremental: [LL] %p -> %s\n", (void *)a, obj_info(b));
9019-
gc_remember_unprotected(objspace, b);
9020-
}
8963+
rgengc_remember(objspace, a);
90218964
}
90228965

90238966
if (UNLIKELY(objspace->flags.during_compacting)) {
@@ -9947,7 +9890,6 @@ gc_move(rb_objspace_t *objspace, VALUE scan, VALUE free, size_t src_slot_size, s
99479890
int marked;
99489891
int wb_unprotected;
99499892
int uncollectible;
9950-
int marking;
99519893
RVALUE *dest = (RVALUE *)free;
99529894
RVALUE *src = (RVALUE *)scan;
99539895

@@ -9956,17 +9898,19 @@ gc_move(rb_objspace_t *objspace, VALUE scan, VALUE free, size_t src_slot_size, s
99569898
GC_ASSERT(BUILTIN_TYPE(scan) != T_NONE);
99579899
GC_ASSERT(!MARKED_IN_BITMAP(GET_HEAP_MARK_BITS(free), free));
99589900

9901+
GC_ASSERT(!RVALUE_MARKING((VALUE)src));
9902+
99599903
/* Save off bits for current object. */
99609904
marked = rb_objspace_marked_object_p((VALUE)src);
99619905
wb_unprotected = RVALUE_WB_UNPROTECTED((VALUE)src);
99629906
uncollectible = RVALUE_UNCOLLECTIBLE((VALUE)src);
9963-
marking = RVALUE_MARKING((VALUE)src);
9907+
bool remembered = RVALUE_REMEMBERED((VALUE)src);
99649908

99659909
/* Clear bits for eventual T_MOVED */
99669910
CLEAR_IN_BITMAP(GET_HEAP_MARK_BITS((VALUE)src), (VALUE)src);
99679911
CLEAR_IN_BITMAP(GET_HEAP_WB_UNPROTECTED_BITS((VALUE)src), (VALUE)src);
99689912
CLEAR_IN_BITMAP(GET_HEAP_UNCOLLECTIBLE_BITS((VALUE)src), (VALUE)src);
9969-
CLEAR_IN_BITMAP(GET_HEAP_MARKING_BITS((VALUE)src), (VALUE)src);
9913+
CLEAR_IN_BITMAP(GET_HEAP_PAGE((VALUE)src)->remembered_bits, (VALUE)src);
99709914

99719915
if (FL_TEST((VALUE)src, FL_EXIVAR)) {
99729916
/* Resizing the st table could cause a malloc */
@@ -10005,11 +9949,11 @@ gc_move(rb_objspace_t *objspace, VALUE scan, VALUE free, size_t src_slot_size, s
100059949
memset(src, 0, src_slot_size);
100069950

100079951
/* Set bits for object in new location */
10008-
if (marking) {
10009-
MARK_IN_BITMAP(GET_HEAP_MARKING_BITS((VALUE)dest), (VALUE)dest);
9952+
if (remembered) {
9953+
MARK_IN_BITMAP(GET_HEAP_PAGE(dest)->remembered_bits, (VALUE)dest);
100109954
}
100119955
else {
10012-
CLEAR_IN_BITMAP(GET_HEAP_MARKING_BITS((VALUE)dest), (VALUE)dest);
9956+
CLEAR_IN_BITMAP(GET_HEAP_PAGE(dest)->remembered_bits, (VALUE)dest);
100139957
}
100149958

100159959
if (marked) {
@@ -10781,7 +10725,7 @@ gc_ref_update(void *vstart, void *vend, size_t stride, rb_objspace_t * objspace,
1078110725
if (RVALUE_WB_UNPROTECTED(v)) {
1078210726
page->flags.has_uncollectible_shady_objects = TRUE;
1078310727
}
10784-
if (RVALUE_PAGE_MARKING(page, v)) {
10728+
if (RVALUE_REMEMBERED(v)) {
1078510729
page->flags.has_remembered_objects = TRUE;
1078610730
}
1078710731
if (page->flags.before_sweep) {

test/ruby/test_gc.rb

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -755,4 +755,30 @@ def test_ast_node_buffer
755755
Module.new.class_eval( (["# shareable_constant_value: literal"] +
756756
(0..100000).map {|i| "M#{ i } = {}" }).join("\n"))
757757
end
758+
759+
def test_old_to_young_reference
760+
original_gc_disabled = GC.disable
761+
762+
require "objspace"
763+
764+
old_obj = Object.new
765+
4.times { GC.start }
766+
767+
assert_include ObjectSpace.dump(old_obj), '"old":true'
768+
769+
young_obj = Object.new
770+
old_obj.instance_variable_set(:@test, young_obj)
771+
772+
# Not immediately promoted to old generation
773+
3.times do
774+
assert_not_include ObjectSpace.dump(young_obj), '"old":true'
775+
GC.start
776+
end
777+
778+
# Takes 4 GC to promote to old generation
779+
GC.start
780+
assert_include ObjectSpace.dump(young_obj), '"old":true'
781+
ensure
782+
GC.enable if !original_gc_disabled
783+
end
758784
end

0 commit comments

Comments
 (0)