repairs to initial merge of incremental promotion of objects

This commit is contained in:
Matthew Flatt 2020-08-14 06:10:13 -06:00
parent 48487ed6fb
commit 3aa2d99000
20 changed files with 351 additions and 219 deletions

View File

@ -47,13 +47,13 @@ MTZlibLib=..\zlib\zlibmt.lib
MDLZ4Lib=..\lz4\lib\liblz4.lib MDLZ4Lib=..\lz4\lib\liblz4.lib
MTLZ4Lib=..\lz4\lib\liblz4mt.lib MTLZ4Lib=..\lz4\lib\liblz4mt.lib
csrc=statics.c segment.c alloc.c symbol.c intern.c gcwrapper.c gc-oce.c gc-ocd.c\ csrc=statics.c segment.c alloc.c symbol.c intern.c gcwrapper.c gc-011.c gc-oce.c gc-ocd.c\
number.c schsig.c io.c new-io.c print.c fasl.c vfasl.c stats.c\ number.c schsig.c io.c new-io.c print.c fasl.c vfasl.c stats.c\
foreign.c prim.c prim5.c flushcache.c\ foreign.c prim.c prim5.c flushcache.c\
windows.c\ windows.c\
schlib.c thread.c expeditor.c scheme.c compress-io.c random.c schlib.c thread.c expeditor.c scheme.c compress-io.c random.c
cobj=statics.obj segment.obj alloc.obj symbol.obj intern.obj gcwrapper.obj gc-oce.obj gc-ocd.obj\ cobj=statics.obj segment.obj alloc.obj symbol.obj intern.obj gcwrapper.obj gc-011.obj gc-oce.obj gc-ocd.obj\
number.obj schsig.obj io.obj new-io.obj print.obj fasl.obj vfasl.obj stats.obj\ number.obj schsig.obj io.obj new-io.obj print.obj fasl.obj vfasl.obj stats.obj\
foreign.obj prim.obj prim5.obj flushcache.obj\ foreign.obj prim.obj prim5.obj flushcache.obj\
windows.obj\ windows.obj\

View File

@ -46,13 +46,13 @@ MTZlibLib=..\zlib\zlibmt.lib
MDLZ4Lib=..\lz4\lib\liblz4.lib MDLZ4Lib=..\lz4\lib\liblz4.lib
MTLZ4Lib=..\lz4\lib\liblz4mt.lib MTLZ4Lib=..\lz4\lib\liblz4mt.lib
csrc=statics.c segment.c alloc.c symbol.c intern.c gcwrapper.c gc-oce.c gc-ocd.c\ csrc=statics.c segment.c alloc.c symbol.c intern.c gcwrapper.c gc-011.c gc-oce.c gc-ocd.c\
number.c schsig.c io.c new-io.c print.c fasl.c vfasl.c stats.c\ number.c schsig.c io.c new-io.c print.c fasl.c vfasl.c stats.c\
foreign.c prim.c prim5.c flushcache.c\ foreign.c prim.c prim5.c flushcache.c\
windows.c\ windows.c\
schlib.c thread.c expeditor.c scheme.c compress-io.c random.c schlib.c thread.c expeditor.c scheme.c compress-io.c random.c
cobj=statics.obj segment.obj alloc.obj symbol.obj intern.obj gcwrapper.obj gc-oce.obj gc-ocd.obj\ cobj=statics.obj segment.obj alloc.obj symbol.obj intern.obj gcwrapper.obj gc-011.obj gc-oce.obj gc-ocd.obj\
number.obj schsig.obj io.obj new-io.obj print.obj fasl.obj vfasl.obj stats.obj\ number.obj schsig.obj io.obj new-io.obj print.obj fasl.obj vfasl.obj stats.obj\
foreign.obj prim.obj prim5.obj flushcache.obj\ foreign.obj prim.obj prim5.obj flushcache.obj\
windows.obj\ windows.obj\

View File

@ -47,13 +47,13 @@ MTZlibLib=..\zlib\zlibmt.lib
MDLZ4Lib=..\lz4\lib\liblz4.lib MDLZ4Lib=..\lz4\lib\liblz4.lib
MTLZ4Lib=..\lz4\lib\liblz4mt.lib MTLZ4Lib=..\lz4\lib\liblz4mt.lib
csrc=statics.c segment.c alloc.c symbol.c intern.c gcwrapper.c gc-oce.c gc-ocd.c\ csrc=statics.c segment.c alloc.c symbol.c intern.c gcwrapper.c gc-011.c gc-oce.c gc-ocd.c\
number.c schsig.c io.c new-io.c print.c fasl.c vfasl.c stats.c\ number.c schsig.c io.c new-io.c print.c fasl.c vfasl.c stats.c\
foreign.c prim.c prim5.c flushcache.c\ foreign.c prim.c prim5.c flushcache.c\
windows.c\ windows.c\
schlib.c thread.c expeditor.c scheme.c compress-io.c random.c schlib.c thread.c expeditor.c scheme.c compress-io.c random.c
cobj=statics.obj segment.obj alloc.obj symbol.obj intern.obj gcwrapper.obj gc-oce.obj gc-ocd.obj\ cobj=statics.obj segment.obj alloc.obj symbol.obj intern.obj gcwrapper.obj gc-011.obj gc-oce.obj gc-ocd.obj\
number.obj schsig.obj io.obj new-io.obj print.obj fasl.obj vfasl.obj stats.obj\ number.obj schsig.obj io.obj new-io.obj print.obj fasl.obj vfasl.obj stats.obj\
foreign.obj prim.obj prim5.obj flushcache.obj\ foreign.obj prim.obj prim5.obj flushcache.obj\
windows.obj\ windows.obj\

View File

@ -1,3 +1,4 @@
# Makefile.ti3nt # Makefile.ti3nt
# Copyright 1984-2017 Cisco Systems, Inc. # Copyright 1984-2017 Cisco Systems, Inc.
# #
@ -46,13 +47,13 @@ MTZlibLib=..\zlib\zlibmt.lib
MDLZ4Lib=..\lz4\lib\liblz4.lib MDLZ4Lib=..\lz4\lib\liblz4.lib
MTLZ4Lib=..\lz4\lib\liblz4mt.lib MTLZ4Lib=..\lz4\lib\liblz4mt.lib
csrc=statics.c segment.c alloc.c symbol.c intern.c gcwrapper.c gc-oce.c gc-ocd.c\ csrc=statics.c segment.c alloc.c symbol.c intern.c gcwrapper.c gc-011.c gc-oce.c gc-ocd.c\
number.c schsig.c io.c new-io.c print.c fasl.c vfasl.c stats.c\ number.c schsig.c io.c new-io.c print.c fasl.c vfasl.c stats.c\
foreign.c prim.c prim5.c flushcache.c\ foreign.c prim.c prim5.c flushcache.c\
windows.c\ windows.c\
schlib.c thread.c expeditor.c scheme.c compress-io.c random.c schlib.c thread.c expeditor.c scheme.c compress-io.c random.c
cobj=statics.obj segment.obj alloc.obj symbol.obj intern.obj gcwrapper.obj gc-oce.obj gc-ocd.obj\ cobj=statics.obj segment.obj alloc.obj symbol.obj intern.obj gcwrapper.obj gc-011.c gc-oce.obj gc-ocd.obj\
number.obj schsig.obj io.obj new-io.obj print.obj fasl.obj vfasl.obj stats.obj\ number.obj schsig.obj io.obj new-io.obj print.obj fasl.obj vfasl.obj stats.obj\
foreign.obj prim.obj prim5.obj flushcache.obj\ foreign.obj prim.obj prim5.obj flushcache.obj\
windows.obj\ windows.obj\

View File

@ -157,6 +157,10 @@ ptr S_compute_bytes_allocated(xg, xs) ptr xg; ptr xs; {
/* add in bytes in active segments */ /* add in bytes in active segments */
if (next_loc != FIX(0)) if (next_loc != FIX(0))
n += (uptr)next_loc - (uptr)S_G.base_loc[g][s]; n += (uptr)next_loc - (uptr)S_G.base_loc[g][s];
if (s == space_data) {
/* don't count space used for bitmaks */
n -= S_G.bitmask_overhead[g];
}
} }
if (g == S_G.max_nonstatic_generation) if (g == S_G.max_nonstatic_generation)
g = static_generation; g = static_generation;
@ -268,6 +272,21 @@ void S_reset_allocation_pointer(tc) ptr tc; {
S_pants_down -= 1; S_pants_down -= 1;
} }
void S_record_new_dirty_card(ptr *ppp, IGEN to_g) {
uptr card = (uptr)ppp >> card_offset_bits;
dirtycardinfo *ndc = S_G.new_dirty_cards;
if (ndc != NULL && ndc->card == card) {
if (to_g < ndc->youngest) ndc->youngest = to_g;
} else {
dirtycardinfo *next = ndc;
find_room_voidp(space_new, 0, ptr_align(sizeof(dirtycardinfo)), ndc);
ndc->card = card;
ndc->youngest = to_g;
ndc->next = next;
S_G.new_dirty_cards = ndc;
}
}
FORCEINLINE void mark_segment_dirty(seginfo *si, IGEN from_g, IGEN to_g) { FORCEINLINE void mark_segment_dirty(seginfo *si, IGEN from_g, IGEN to_g) {
IGEN old_to_g = si->min_dirty_byte; IGEN old_to_g = si->min_dirty_byte;
@ -297,7 +316,7 @@ void S_dirty_set(ptr *loc, ptr x) {
if (!IMMEDIATE(x)) { if (!IMMEDIATE(x)) {
seginfo *t_si = SegInfo(ptr_get_segment(x)); seginfo *t_si = SegInfo(ptr_get_segment(x));
if (t_si->generation < si->generation) if (t_si->generation < si->generation)
S_error_abort("wrong-way pointer installed during GC"); S_record_new_dirty_card(loc, t_si->generation);
} }
} else { } else {
IGEN from_g = si->generation; IGEN from_g = si->generation;

View File

@ -66,6 +66,7 @@ extern void S_reset_allocation_pointer PROTO((ptr tc));
extern ptr S_compute_bytes_allocated PROTO((ptr xg, ptr xs)); extern ptr S_compute_bytes_allocated PROTO((ptr xg, ptr xs));
extern ptr S_bytes_finalized PROTO(()); extern ptr S_bytes_finalized PROTO(());
extern ptr S_find_more_room PROTO((ISPC s, IGEN g, iptr n, ptr old)); extern ptr S_find_more_room PROTO((ISPC s, IGEN g, iptr n, ptr old));
extern void S_record_new_dirty_card PROTO((ptr *ppp, IGEN to_g));
extern void S_dirty_set PROTO((ptr *loc, ptr x)); extern void S_dirty_set PROTO((ptr *loc, ptr x));
extern void S_mark_card_dirty PROTO((uptr card, IGEN to_g)); extern void S_mark_card_dirty PROTO((uptr card, IGEN to_g));
extern void S_scan_dirty PROTO((ptr *p, ptr *endp)); extern void S_scan_dirty PROTO((ptr *p, ptr *endp));

View File

@ -18,7 +18,7 @@
#define MAX_CG 0 #define MAX_CG 0
#define MIN_TG 1 #define MIN_TG 1
#define MAX_TG 1 #define MAX_TG 1
#define NO_LOCKED_OLDSPACE_OBJECTS #define NO_NEWSPACE_MARKS
#include "gc.c" #include "gc.c"
void S_gc_011(ptr tc) { void S_gc_011(ptr tc) {

View File

@ -38,21 +38,24 @@
Generations range from 0 to `S_G.max_nonstatic_generation` plus a Generations range from 0 to `S_G.max_nonstatic_generation` plus a
static generation. After an object moves to the static generation, static generation. After an object moves to the static generation,
it doesn't move anymore. (In the case of code objects, relocations it doesn't move anymore. In the case of code objects, relocations
may be discarded when the code object moves into a static may be discarded when the code object moves into a static
generation.) generation.
For the most part, collecting generations 0 through mgc (= max For the most part, collecting generations 0 through MAX_CG (= max
copied generation) to tg (= target generation) means copying copied generation) to MIN_TG to MAX_TG (= target generation) means
objects from old segments into fresh segments at generation tg. copying objects from old segments into fresh segments generations
Note that tg is either the same as or one larger than mgc. MIN_TG through MAX_TG. Note that MAX_TG is either the same as or
one larger than MAX_CG. For objects in generation 0 through MAX_CG,
the target generation is either one more than the current
generation or it's MIN_TG.
But objects might be marked [and swept] instead of copied [and Objects might be marked [and swept] instead of copied [and swept]
swept] as triggered by two possibilities: one or more objects on as triggered by two possibilities: one or more objects on the
the source segment are immobile (subsumes locked) or mgc == tg and source segment are immobile (subsumes locked) or MAX_CG == MAX_TG
the object is on a segment that hasn't been disovered as sparse by and the object is on a MAX_CG segment that hasn't been disovered as
a precious marking (non-copying) pass. Segments with marked objects sparse by a previous marking (non-copying) pass. Segments with
are promoted to generation tg. marked objects are promoted to the target generation.
As a special case, locking on `space_new` does not mark all objects As a special case, locking on `space_new` does not mark all objects
on that segment, because dirty-write handling cannot deal with on that segment, because dirty-write handling cannot deal with
@ -70,14 +73,14 @@
Marking an object means setting a bit in `marked_mask`, which is Marking an object means setting a bit in `marked_mask`, which is
allocated as needed. Any segments that ends up with a non-NULL allocated as needed. Any segments that ends up with a non-NULL
`marked_mask` is promoted to tg at the end of collection. If a `marked_mask` is kept in its new generation at the end of
marked object spans multiple segments, then `masked_mask` is collection. If a marked object spans multiple segments, then
created across all of the segments. It's possible for a segment to `masked_mask` is created across all of the segments. It's possible
end up with `marked_mask` even though `use_marks` was not set: an for a segment to end up with `marked_mask` even though `use_marks`
marked object spanned into the segment, or it's `space_new` segment was not set: an marked object spanned into the segment, or it's a
with locked objects; in that case, other objects will be copied out `space_new` segment with locked objects; in that case, other
of the segment, because `use_marks` is how relocation decides objects will be copied out of the segment, because `use_marks` is
whether to copy or mark. how relocation decides whether to copy or mark.
If an object is copied, then its first word is set to If an object is copied, then its first word is set to
`forward_marker` and its second word is set to the new address. `forward_marker` and its second word is set to the new address.
@ -85,25 +88,25 @@
whether an object has been reached: whether an object has been reached:
* the object must be in an `old_space` segment, otherwise it counts * the object must be in an `old_space` segment, otherwise it counts
as reached because it's in a generation older than mcg; as reached because it's in a generation older than MAX_CG;
* the object either starts with `forward_marker` or its mark bit is * the object either starts with `forward_marker` or its mark bit is
set (and those arer mutually exclusive). set (and those are mutually exclusive).
Besides the one bit at the start of an object, extra bits for the Besides the one bit for the start of an object in the mark mask,
object content may be set as well. Those extra bits tell the extra bits for the object content may be set as well. Those extra
dirty-object sweeper which words in a previously marked page should bits tell the dirty-object sweeper which words in a previously
be swept and which should be skipped, so the extra bits are only marked page should be swept and which should be skipped, so the
needed for impure objects in certain kinds of spaces. Only every extra bits are only needed for impure objects in certain kinds of
alternate word needs to be marked that way, so half of the mark spaces. Only every alternate word needs to be marked that way, so
bits are usually irrelevant; the exception is that flonums can be half of the mark bits are usually irrelevant; the exception is that
between normal object-start positions, so those mark bits can flonums can be between normal object-start positions, so those mark
matter, at least if we're preserving `eq?` on flonums (but the bits bits can matter, at least if we're preserving `eq?` on flonums (but
are not relevant to dirty-object sweeping, since flonums don't have the bits are not relevant to dirty-object sweeping, since flonums
pointer fields). don't have pointer fields).
It's ok to sweep an object multiple times (but to be be avoided if It's ok to sweep an object multiple times, but that's to be be
possible). avoided if possible.
Pending Ephemerons and Guardians Pending Ephemerons and Guardians
-------------------------------- --------------------------------
@ -124,7 +127,8 @@
static IGEN copy PROTO((ptr pp, seginfo *si, ptr *dest)); static IGEN copy PROTO((ptr pp, seginfo *si, ptr *dest));
static IGEN mark_object PROTO((ptr pp, seginfo *si)); static IGEN mark_object PROTO((ptr pp, seginfo *si));
static void sweep PROTO((ptr tc, ptr p, IGEN from_g)); static void sweep PROTO((ptr tc, ptr p, IGEN from_g));
static void sweep_in_old PROTO((ptr tc, ptr p, IGEN from_g)); static void sweep_in_old PROTO((ptr p));
static void sweep_object_in_old PROTO((ptr p));
static IBOOL object_directly_refers_to_self PROTO((ptr p)); static IBOOL object_directly_refers_to_self PROTO((ptr p));
static ptr copy_stack PROTO((ptr old, iptr *length, iptr clength)); static ptr copy_stack PROTO((ptr old, iptr *length, iptr clength));
static void resweep_weak_pairs PROTO((seginfo *oldweakspacesegments)); static void resweep_weak_pairs PROTO((seginfo *oldweakspacesegments));
@ -136,7 +140,7 @@ static uptr size_object PROTO((ptr p));
static iptr sweep_typed_object PROTO((ptr tc, ptr p, IGEN from_g)); static iptr sweep_typed_object PROTO((ptr tc, ptr p, IGEN from_g));
static void sweep_symbol PROTO((ptr p, IGEN from_g)); static void sweep_symbol PROTO((ptr p, IGEN from_g));
static void sweep_port PROTO((ptr p, IGEN from_g)); static void sweep_port PROTO((ptr p, IGEN from_g));
static void sweep_thread PROTO((ptr p, IGEN from_g)); static void sweep_thread PROTO((ptr p));
static void sweep_continuation PROTO((ptr p, IGEN from_g)); static void sweep_continuation PROTO((ptr p, IGEN from_g));
static void sweep_record PROTO((ptr x, IGEN from_g)); static void sweep_record PROTO((ptr x, IGEN from_g));
static IGEN sweep_dirty_record PROTO((ptr x, IGEN youngest)); static IGEN sweep_dirty_record PROTO((ptr x, IGEN youngest));
@ -152,8 +156,8 @@ static void add_trigger_guardians_to_recheck PROTO((ptr ls));
static void add_ephemeron_to_pending PROTO((ptr p)); static void add_ephemeron_to_pending PROTO((ptr p));
static void add_trigger_ephemerons_to_pending PROTO((ptr p)); static void add_trigger_ephemerons_to_pending PROTO((ptr p));
static void check_triggers PROTO((seginfo *si)); static void check_triggers PROTO((seginfo *si));
static void check_ephemeron PROTO((ptr pe, IGEN from_g)); static void check_ephemeron PROTO((ptr pe));
static void check_pending_ephemerons PROTO((IGEN from_g)); static void check_pending_ephemerons PROTO(());
static int check_dirty_ephemeron PROTO((ptr pe, int youngest)); static int check_dirty_ephemeron PROTO((ptr pe, int youngest));
static void finish_pending_ephemerons PROTO((seginfo *si)); static void finish_pending_ephemerons PROTO((seginfo *si));
static void init_fully_marked_mask(IGEN g); static void init_fully_marked_mask(IGEN g);
@ -186,10 +190,6 @@ static void check_pending_measure_ephemerons();
# endif # endif
#endif #endif
#ifndef NO_DIRTY_NEWSPACE_POINTERS
static void record_new_dirty_card PROTO((ptr *ppp, IGEN to_g));
#endif /* !NO_DIRTY_NEWSPACE_POINTERS */
/* #define DEBUG */ /* #define DEBUG */
/* initialized and used each gc cycle. any others should be defined in globals.h */ /* initialized and used each gc cycle. any others should be defined in globals.h */
@ -217,7 +217,8 @@ static IGEN MAX_CG, MIN_TG, MAX_TG;
#if defined(MIN_TG) && defined(MAX_TG) && (MIN_TG == MAX_TG) #if defined(MIN_TG) && defined(MAX_TG) && (MIN_TG == MAX_TG)
# define TARGET_GENERATION(si) MIN_TG # define TARGET_GENERATION(si) MIN_TG
# define compute_target_generation(g) MIN_TG # define compute_target_generation(g) MIN_TG
# define CONSTANT_TARGET_GENERATION
#else #else
# define TARGET_GENERATION(si) si->generation # define TARGET_GENERATION(si) si->generation
FORCEINLINE IGEN compute_target_generation(IGEN g) { FORCEINLINE IGEN compute_target_generation(IGEN g) {
@ -297,10 +298,19 @@ uptr list_length(ptr ls) {
#define init_mask(dest, tg, init) { \ #define init_mask(dest, tg, init) { \
find_room_voidp(space_data, tg, ptr_align(segment_bitmap_bytes), dest); \ find_room_voidp(space_data, tg, ptr_align(segment_bitmap_bytes), dest); \
memset(dest, init, segment_bitmap_bytes); \ memset(dest, init, segment_bitmap_bytes); \
S_G.bitmask_overhead[tg] += ptr_align(segment_bitmap_bytes); \
} }
#define marked(si, p) (si->marked_mask && (si->marked_mask[segment_bitmap_byte(p)] & segment_bitmap_bit(p))) #define marked(si, p) (si->marked_mask && (si->marked_mask[segment_bitmap_byte(p)] & segment_bitmap_bit(p)))
#ifdef NO_NEWSPACE_MARKS
# define new_marked(si, p) 0
# define CAN_MARK_AND(x) 0
#else
# define new_marked(si, p) marked(si, p)
# define CAN_MARK_AND(x) x
#endif
static void init_fully_marked_mask(IGEN g) { static void init_fully_marked_mask(IGEN g) {
init_mask(fully_marked_mask[g], g, 0xFF); init_mask(fully_marked_mask[g], g, 0xFF);
} }
@ -357,7 +367,7 @@ static int flonum_is_forwarded_p(ptr p, seginfo *si) {
#define relocate_pure_help_help(ppp, pp, si) do { \ #define relocate_pure_help_help(ppp, pp, si) do { \
if (FORWARDEDP(pp, si)) \ if (FORWARDEDP(pp, si)) \
*ppp = GET_FWDADDRESS(pp); \ *ppp = GET_FWDADDRESS(pp); \
else if (!marked(si, pp)) \ else if (!new_marked(si, pp)) \
mark_or_copy_pure(ppp, pp, si); \ mark_or_copy_pure(ppp, pp, si); \
} while (0) } while (0)
@ -365,13 +375,13 @@ static int flonum_is_forwarded_p(ptr p, seginfo *si) {
if (FWDMARKER(pp) == forward_marker) \ if (FWDMARKER(pp) == forward_marker) \
pp = GET_FWDADDRESS(pp); \ pp = GET_FWDADDRESS(pp); \
else if (si->old_space) { \ else if (si->old_space) { \
if (!marked(si, pp)) \ if (!new_marked(si, pp)) \
mark_or_copy_pure(&pp, pp, si); \ mark_or_copy_pure(&pp, pp, si); \
} ELSE_MEASURE_NONOLDSPACE(pp) \ } ELSE_MEASURE_NONOLDSPACE(pp) \
} while (0) } while (0)
#define mark_or_copy_pure(dest, p, si) do { \ #define mark_or_copy_pure(dest, p, si) do { \
if (si->use_marks) \ if (CAN_MARK_AND(si->use_marks)) \
(void)mark_object(p, si); \ (void)mark_object(p, si); \
else \ else \
(void)copy(p, si, dest); \ (void)copy(p, si, dest); \
@ -398,7 +408,7 @@ static int flonum_is_forwarded_p(ptr p, seginfo *si) {
if (SI->old_space) \ if (SI->old_space) \
relocate_impure_help_help(ppp, pp, from_g, SI); \ relocate_impure_help_help(ppp, pp, from_g, SI); \
ELSE_MEASURE_NONOLDSPACE(pp) \ ELSE_MEASURE_NONOLDSPACE(pp) \
} \ } \
} while (0) } while (0)
#define relocate_impure_help_help(ppp, pp, from_g, si) do { \ #define relocate_impure_help_help(ppp, pp, from_g, si) do { \
@ -406,44 +416,20 @@ static int flonum_is_forwarded_p(ptr p, seginfo *si) {
if (FORWARDEDP(pp, si)) { \ if (FORWARDEDP(pp, si)) { \
*ppp = GET_FWDADDRESS(pp); \ *ppp = GET_FWDADDRESS(pp); \
__to_g = TARGET_GENERATION(si); \ __to_g = TARGET_GENERATION(si); \
if (__to_g < from_g) record_new_dirty_card(ppp, __to_g); \ if (__to_g < from_g) S_record_new_dirty_card(ppp, __to_g); \
} else if (!marked(si, pp)) { \ } else if (!new_marked(si, pp)) { \
mark_or_copy_impure(__to_g, ppp, pp, from_g, si); \ mark_or_copy_impure(__to_g, ppp, pp, from_g, si); \
if (__to_g < from_g) record_new_dirty_card(ppp, __to_g); \ if (__to_g < from_g) S_record_new_dirty_card(ppp, __to_g); \
} \ } \
} while (0) } while (0)
#define mark_or_copy_impure(to_g, dest, p, from_g, si) do { \ #define mark_or_copy_impure(to_g, dest, p, from_g, si) do { \
if (si->use_marks) \ if (CAN_MARK_AND(si->use_marks)) \
to_g = mark_object(p, si); \ to_g = mark_object(p, si); \
else \ else \
to_g = copy(p, si, dest); \ to_g = copy(p, si, dest); \
} while (0) } while (0)
typedef struct _dirtycardinfo {
uptr card;
IGEN youngest;
struct _dirtycardinfo *next;
} dirtycardinfo;
static dirtycardinfo *new_dirty_cards;
static void record_new_dirty_card(ptr *ppp, IGEN to_g) {
uptr card = (uptr)ppp >> card_offset_bits;
dirtycardinfo *ndc = new_dirty_cards;
if (ndc != NULL && ndc->card == card) {
if (to_g < ndc->youngest) ndc->youngest = to_g;
} else {
dirtycardinfo *next = ndc;
find_room(space_new, 0, typemod, ptr_align(sizeof(dirtycardinfo)), ndc);
ndc->card = card;
ndc->youngest = to_g;
ndc->next = next;
new_dirty_cards = ndc;
}
}
#endif /* !NO_DIRTY_NEWSPACE_POINTERS */ #endif /* !NO_DIRTY_NEWSPACE_POINTERS */
#define relocate_dirty(PPP, YOUNGEST) do { \ #define relocate_dirty(PPP, YOUNGEST) do { \
@ -454,8 +440,10 @@ static void record_new_dirty_card(ptr *ppp, IGEN to_g) {
} else if (FORWARDEDP(_pp, _si)) { \ } else if (FORWARDEDP(_pp, _si)) { \
*_ppp = GET_FWDADDRESS(_pp); \ *_ppp = GET_FWDADDRESS(_pp); \
_pg = TARGET_GENERATION(_si); \ _pg = TARGET_GENERATION(_si); \
} else if (marked(_si, _pp)) { \ } else if (new_marked(_si, _pp)) { \
_pg = TARGET_GENERATION(_si); \ _pg = TARGET_GENERATION(_si); \
} else if (CAN_MARK_AND(_si->use_marks)) { \
_pg = mark_object(_pp, _si); \
} else { \ } else { \
_pg = copy(_pp, _si, _ppp); \ _pg = copy(_pp, _si, _ppp); \
} \ } \
@ -467,6 +455,10 @@ static void record_new_dirty_card(ptr *ppp, IGEN to_g) {
# define is_counting_root(si, p) (si->counting_mask && (si->counting_mask[segment_bitmap_byte(p)] & segment_bitmap_bit(p))) # define is_counting_root(si, p) (si->counting_mask && (si->counting_mask[segment_bitmap_byte(p)] & segment_bitmap_bit(p)))
#endif #endif
static void relocate_indirect(ptr p) {
relocate_pure(&p);
}
FORCEINLINE void check_triggers(seginfo *si) { FORCEINLINE void check_triggers(seginfo *si) {
/* Registering ephemerons and guardians to recheck at the /* Registering ephemerons and guardians to recheck at the
granularity of a segment means that the worst-case complexity of granularity of a segment means that the worst-case complexity of
@ -501,7 +493,7 @@ FORCEINLINE void check_triggers(seginfo *si) {
set to a forwarding marker and pointer. To handle that problem, set to a forwarding marker and pointer. To handle that problem,
sweep_in_old() is allowed to copy the object, since the object sweep_in_old() is allowed to copy the object, since the object
is going to get copied anyway. */ is going to get copied anyway. */
static void sweep_in_old(ptr tc, ptr p, IGEN from_g) { static void sweep_in_old(ptr p) {
/* Detect all the cases when we need to give up on in-place /* Detect all the cases when we need to give up on in-place
sweeping: */ sweeping: */
if (object_directly_refers_to_self(p)) { if (object_directly_refers_to_self(p)) {
@ -510,8 +502,10 @@ static void sweep_in_old(ptr tc, ptr p, IGEN from_g) {
} }
/* We've determined that `p` won't refer immediately back to itself, /* We've determined that `p` won't refer immediately back to itself,
so it's ok to use sweep(). */ so it's ok to sweep(), but only update `p` for pure relocations;
sweep(tc, p, from_g); impure oness must that will happen later, after `p` is
potentially copied, so the card updates will be right. */
sweep_object_in_old(p);
} }
static void sweep_dirty_object_if_space_new(ptr p) { static void sweep_dirty_object_if_space_new(ptr p) {
@ -532,7 +526,8 @@ static ptr copy_stack(ptr old, iptr *length, iptr clength) {
newg = TARGET_GENERATION(si); newg = TARGET_GENERATION(si);
n = *length; n = *length;
#ifndef NO_NEWSPACE_MARKS
if (si->use_marks) { if (si->use_marks) {
if (!marked(si, old)) { if (!marked(si, old)) {
mark_typemod_data_object(old, n, si); mark_typemod_data_object(old, n, si);
@ -545,6 +540,7 @@ static ptr copy_stack(ptr old, iptr *length, iptr clength) {
return old; return old;
} }
#endif
/* reduce headroom created for excessively large frames (typically resulting from apply with long lists) */ /* reduce headroom created for excessively large frames (typically resulting from apply with long lists) */
if (n != clength && n > default_stack_size && n > (m = clength + one_shot_headroom)) { if (n != clength && n > default_stack_size && n > (m = clength + one_shot_headroom)) {
@ -557,13 +553,17 @@ static ptr copy_stack(ptr old, iptr *length, iptr clength) {
S_G.bytesof[newg][countof_stack] += n; S_G.bytesof[newg][countof_stack] += n;
#endif /* ENABLE_OBJECT_COUNTS */ #endif /* ENABLE_OBJECT_COUNTS */
find_room(space_data, newg, typemod, n, new); if (n == 0) {
n = ptr_align(clength); return (ptr)0;
/* warning: stack may have been left non-double-aligned by split_and_resize */ } else {
memcpy_aligned(TO_VOIDP(new), TO_VOIDP(old), n); find_room(space_data, newg, typemod, n, new);
n = ptr_align(clength);
/* warning: stack may have been left non-double-aligned by split_and_resize */
memcpy_aligned(TO_VOIDP(new), TO_VOIDP(old), n);
/* also returning possibly updated value in *length */ /* also returning possibly updated value in *length */
return new; return new;
}
} }
#define NONSTATICINHEAP(si, x) (!IMMEDIATE(x) && (si = MaybeSegInfo(ptr_get_segment(x))) != NULL && si->generation != static_generation) #define NONSTATICINHEAP(si, x) (!IMMEDIATE(x) && (si = MaybeSegInfo(ptr_get_segment(x))) != NULL && si->generation != static_generation)
@ -574,7 +574,7 @@ static ptr copy_stack(ptr old, iptr *length, iptr clength) {
obj = GUARDIANOBJ(ls); \ obj = GUARDIANOBJ(ls); \
next = GUARDIANNEXT(ls); \ next = GUARDIANNEXT(ls); \
if (FILTER(si, obj)) { \ if (FILTER(si, obj)) { \
if (!si->old_space || marked(si, obj)) { \ if (!si->old_space || new_marked(si, obj)) { \
INITGUARDIANNEXT(ls) = pend_hold_ls; \ INITGUARDIANNEXT(ls) = pend_hold_ls; \
pend_hold_ls = ls; \ pend_hold_ls = ls; \
} else if (FORWARDEDP(obj, si)) { \ } else if (FORWARDEDP(obj, si)) { \
@ -585,7 +585,7 @@ static ptr copy_stack(ptr old, iptr *length, iptr clength) {
seginfo *t_si; \ seginfo *t_si; \
tconc = GUARDIANTCONC(ls); \ tconc = GUARDIANTCONC(ls); \
t_si = SegInfo(ptr_get_segment(tconc)); \ t_si = SegInfo(ptr_get_segment(tconc)); \
if (!t_si->old_space || marked(t_si, tconc)) { \ if (!t_si->old_space || new_marked(t_si, tconc)) { \
INITGUARDIANNEXT(ls) = final_ls; \ INITGUARDIANNEXT(ls) = final_ls; \
final_ls = ls; \ final_ls = ls; \
} else if (FWDMARKER(tconc) == forward_marker) { \ } else if (FWDMARKER(tconc) == forward_marker) { \
@ -627,8 +627,9 @@ ptr GCENTRY(ptr tc, ptr count_roots_ls) {
tlcs_to_rehash = Snil; tlcs_to_rehash = Snil;
conts_to_promote = Snil; conts_to_promote = Snil;
#ifndef NO_DIRTY_NEWSPACE_POINTERS #ifndef NO_DIRTY_NEWSPACE_POINTERS
new_dirty_cards = NULL; S_G.new_dirty_cards = NULL;
#endif /* !NO_DIRTY_NEWSPACE_POINTERS */ #endif /* !NO_DIRTY_NEWSPACE_POINTERS */
S_G.must_mark_gen0 = 0;
for (ls = S_threads; ls != Snil; ls = Scdr(ls)) { for (ls = S_threads; ls != Snil; ls = Scdr(ls)) {
ptr tc = (ptr)THREADTC(Scar(ls)); ptr tc = (ptr)THREADTC(Scar(ls));
@ -655,6 +656,7 @@ ptr GCENTRY(ptr tc, ptr count_roots_ls) {
S_G.next_loc[g][s] = FIX(0); S_G.next_loc[g][s] = FIX(0);
S_G.bytes_left[g][s] = 0; S_G.bytes_left[g][s] = 0;
S_G.bytes_of_space[g][s] = 0; S_G.bytes_of_space[g][s] = 0;
S_G.bitmask_overhead[g] = 0;
} }
} }
@ -682,6 +684,14 @@ ptr GCENTRY(ptr tc, ptr count_roots_ls) {
for (g = 0; g <= MAX_CG; g += 1) { for (g = 0; g <= MAX_CG; g += 1) {
IBOOL maybe_mark = ((g >= S_G.min_mark_gen) && (g >= MIN_TG)); IBOOL maybe_mark = ((g >= S_G.min_mark_gen) && (g >= MIN_TG));
for (s = 0; s <= max_real_space; s += 1) { for (s = 0; s <= max_real_space; s += 1) {
seginfo *saved;
if (s == space_weakpair) {
saved = oldspacesegments;
oldspacesegments = oldweakspacesegments;
} else
saved = NULL;
for (si = S_G.occupied_segments[g][s]; si != NULL; si = nextsi) { for (si = S_G.occupied_segments[g][s]; si != NULL; si = nextsi) {
nextsi = si->next; nextsi = si->next;
si->next = oldspacesegments; si->next = oldspacesegments;
@ -704,12 +714,20 @@ ptr GCENTRY(ptr tc, ptr count_roots_ls) {
si->min_dirty_byte = 0; /* prevent registering as dirty while GCing */ si->min_dirty_byte = 0; /* prevent registering as dirty while GCing */
} }
S_G.occupied_segments[g][s] = NULL; S_G.occupied_segments[g][s] = NULL;
if (s == space_weakpair) {
oldweakspacesegments = oldspacesegments;
oldspacesegments = saved;
}
} }
if (s == space_weakpair) { }
/* prefix of oldweakspacesegments is for weak pairs */ if (oldweakspacesegments) {
oldweakspacesegments = oldspacesegments; /* make oldweakspacesegments a prefix of weakspacesegments */
} seginfo *p;
} for (p = oldweakspacesegments; p->next; p = p->next);
p->next = oldspacesegments;
oldspacesegments = oldweakspacesegments;
}
#ifdef ENABLE_OBJECT_COUNTS #ifdef ENABLE_OBJECT_COUNTS
/* clear object counts & bytes for copied generations; bump timestamp */ /* clear object counts & bytes for copied generations; bump timestamp */
@ -894,14 +912,13 @@ ptr GCENTRY(ptr tc, ptr count_roots_ls) {
/* sweep non-oldspace threads, since any thread may have an active stack */ /* sweep non-oldspace threads, since any thread may have an active stack */
for (ls = S_threads; ls != Snil; ls = Scdr(ls)) { for (ls = S_threads; ls != Snil; ls = Scdr(ls)) {
ptr thread; seginfo *thread_si; ptr thread;
/* someone may have their paws on the list */ /* someone may have their paws on the list */
if (FWDMARKER(ls) == forward_marker) ls = FWDADDRESS(ls); if (FWDMARKER(ls) == forward_marker) ls = FWDADDRESS(ls);
thread = Scar(ls); thread = Scar(ls);
thread_si = SegInfo(ptr_get_segment(thread)); if (!OLDSPACE(thread)) sweep_thread(thread);
if (!thread_si->old_space) sweep_thread(thread, thread_si->generation);
} }
relocate_pure(&S_threads); relocate_pure(&S_threads);
@ -931,7 +948,7 @@ ptr GCENTRY(ptr tc, ptr count_roots_ls) {
/* coordinate with alloc.c */ /* coordinate with alloc.c */
(SYMVAL(sym) != sunbound || SYMPLIST(sym) != Snil || SYMSPLIST(sym) != Snil)) { (SYMVAL(sym) != sunbound || SYMPLIST(sym) != Snil || SYMSPLIST(sym) != Snil)) {
seginfo *sym_si = SegInfo(ptr_get_segment(sym)); seginfo *sym_si = SegInfo(ptr_get_segment(sym));
if (!marked(sym_si, sym)) if (!new_marked(sym_si, sym))
mark_or_copy_pure(&sym, sym, sym_si); mark_or_copy_pure(&sym, sym, sym_si);
} }
} }
@ -1025,7 +1042,7 @@ ptr GCENTRY(ptr tc, ptr count_roots_ls) {
won't discover any new tconcs at that point. */ won't discover any new tconcs at that point. */
ptr obj = GUARDIANOBJ(ls); ptr obj = GUARDIANOBJ(ls);
seginfo *o_si = SegInfo(ptr_get_segment(obj)); seginfo *o_si = SegInfo(ptr_get_segment(obj));
if (FORWARDEDP(obj, o_si) || marked(o_si, obj)) { if (FORWARDEDP(obj, o_si) || new_marked(o_si, obj)) {
/* Object is reachable, so we might as well move /* Object is reachable, so we might as well move
this one to the hold list --- via pend_hold_ls, which this one to the hold list --- via pend_hold_ls, which
leads to a copy to move to hold_ls */ leads to a copy to move to hold_ls */
@ -1034,8 +1051,10 @@ ptr GCENTRY(ptr tc, ptr count_roots_ls) {
} else { } else {
seginfo *si; seginfo *si;
if (!IMMEDIATE(rep) && (si = MaybeSegInfo(ptr_get_segment(rep))) != NULL && si->old_space) { if (!IMMEDIATE(rep) && (si = MaybeSegInfo(ptr_get_segment(rep))) != NULL && si->old_space) {
PUSH_BACKREFERENCE(rep) /* mark things reachable from `rep`, but not `rep` itself, unless
sweep_in_old(tc, rep, si->generation); `rep` is immediately reachable from itself */
PUSH_BACKREFERENCE(ls)
sweep_in_old(rep);
POP_BACKREFERENCE() POP_BACKREFERENCE()
} }
INITGUARDIANNEXT(ls) = maybe_final_ordered_ls; INITGUARDIANNEXT(ls) = maybe_final_ordered_ls;
@ -1043,20 +1062,17 @@ ptr GCENTRY(ptr tc, ptr count_roots_ls) {
} }
} else { } else {
/* if tconc was old it's been forwarded */ /* if tconc was old it's been forwarded */
IGEN tg;
tconc = GUARDIANTCONC(ls); tconc = GUARDIANTCONC(ls);
WITH_TOP_BACKREFERENCE(tconc, relocate_pure(&rep)); WITH_TOP_BACKREFERENCE(tconc, relocate_pure(&rep));
tg = GENERATION(tconc);
old_end = Scdr(tconc); old_end = Scdr(tconc);
/* allocate new_end in tg, in case `tconc` is on a marked segment */ new_end = S_cons_in(space_impure, 0, FIX(0), FIX(0));
new_end = S_cons_in(space_impure, tg, FIX(0), FIX(0));
#ifdef ENABLE_OBJECT_COUNTS #ifdef ENABLE_OBJECT_COUNTS
S_G.countof[tg][countof_pair] += 1; S_G.countof[0][countof_pair] += 1;
#endif /* ENABLE_OBJECT_COUNTS */ #endif /* ENABLE_OBJECT_COUNTS */
/* These assignments may trigger card marking or additions to `new_dirty_cards`: */
SETCAR(old_end,rep); SETCAR(old_end,rep);
SETCDR(old_end,new_end); SETCDR(old_end,new_end);
SETCDR(tconc,new_end); SETCDR(tconc,new_end);
@ -1067,20 +1083,25 @@ ptr GCENTRY(ptr tc, ptr count_roots_ls) {
ls = pend_hold_ls; pend_hold_ls = Snil; ls = pend_hold_ls; pend_hold_ls = Snil;
for ( ; ls != Snil; ls = next) { for ( ; ls != Snil; ls = next) {
ptr p; ptr p;
seginfo *g_si, *t_si; seginfo *t_si;
#ifdef CONSTANT_TARGET_GENERATION
g = MAX_TG;
#else
seginfo *g_si;
g_si = SegInfo(ptr_get_segment(ls));
g = TARGET_GENERATION(g_si);
#endif
next = GUARDIANNEXT(ls); next = GUARDIANNEXT(ls);
/* discard static pend_hold_ls entries */ /* discard static pend_hold_ls entries */
g_si = SegInfo(ptr_get_segment(ls));
g = TARGET_GENERATION(g_si);
if (g == static_generation) continue; if (g == static_generation) continue;
tconc = GUARDIANTCONC(ls); tconc = GUARDIANTCONC(ls);
t_si = SegInfo(ptr_get_segment(tconc)); t_si = SegInfo(ptr_get_segment(tconc));
if (t_si->old_space && !marked(t_si, tconc)) { if (t_si->old_space && !new_marked(t_si, tconc)) {
if (FWDMARKER(tconc) == forward_marker) if (FWDMARKER(tconc) == forward_marker)
tconc = FWDADDRESS(tconc); tconc = FWDADDRESS(tconc);
else { else {
@ -1120,7 +1141,7 @@ ptr GCENTRY(ptr tc, ptr count_roots_ls) {
ptr obj = GUARDIANOBJ(ls); ptr obj = GUARDIANOBJ(ls);
seginfo *o_si = SegInfo(ptr_get_segment(obj)); seginfo *o_si = SegInfo(ptr_get_segment(obj));
next = GUARDIANNEXT(ls); next = GUARDIANNEXT(ls);
if (FORWARDEDP(obj, o_si) || marked(o_si, obj)) { if (FORWARDEDP(obj, o_si) || new_marked(o_si, obj)) {
/* Will defintely move to hold_ls, but the entry /* Will defintely move to hold_ls, but the entry
must be copied to move from pend_hold_ls to must be copied to move from pend_hold_ls to
hold_ls: */ hold_ls: */
@ -1162,8 +1183,10 @@ ptr GCENTRY(ptr tc, ptr count_roots_ls) {
INITGUARDIANNEXT(ls) = final_ls; INITGUARDIANNEXT(ls) = final_ls;
final_ls = ls; final_ls = ls;
} else { } else {
#ifndef NO_NEWSPACE_MARKS
seginfo *t_si = SegInfo(ptr_get_segment(tconc)); seginfo *t_si = SegInfo(ptr_get_segment(tconc));
if (marked(t_si, tconc)) { #endif
if (new_marked(t_si, tconc)) {
INITGUARDIANNEXT(ls) = final_ls; INITGUARDIANNEXT(ls) = final_ls;
final_ls = ls; final_ls = ls;
} else { } else {
@ -1199,7 +1222,7 @@ ptr GCENTRY(ptr tc, ptr count_roots_ls) {
bnext = TO_VOIDP((uptr)TO_PTR(b->next) - 1); bnext = TO_VOIDP((uptr)TO_PTR(b->next) - 1);
sym = b->sym; sym = b->sym;
si = SegInfo(ptr_get_segment(sym)); si = SegInfo(ptr_get_segment(sym));
if (marked(si, sym) || (FWDMARKER(sym) == forward_marker && ((sym = FWDADDRESS(sym)) || 1))) { if (new_marked(si, sym) || (FWDMARKER(sym) == forward_marker && ((sym = FWDADDRESS(sym)) || 1))) {
IGEN g = si->generation; IGEN g = si->generation;
find_room_voidp(space_data, g, ptr_align(sizeof(bucket)), b); find_room_voidp(space_data, g, ptr_align(sizeof(bucket)), b);
#ifdef ENABLE_OBJECT_COUNTS #ifdef ENABLE_OBJECT_COUNTS
@ -1235,7 +1258,7 @@ ptr GCENTRY(ptr tc, ptr count_roots_ls) {
count++; count++;
p = Scar(ls); p = Scar(ls);
si = SegInfo(ptr_get_segment(p)); si = SegInfo(ptr_get_segment(p));
if (!si->old_space || marked(si, p)) { if (!si->old_space || new_marked(si, p)) {
newg = TARGET_GENERATION(si); newg = TARGET_GENERATION(si);
S_G.rtds_with_counts[newg] = S_cons_in(space_impure, newg, p, S_G.rtds_with_counts[newg]); S_G.rtds_with_counts[newg] = S_cons_in(space_impure, newg, p, S_G.rtds_with_counts[newg]);
#ifdef ENABLE_OBJECT_COUNTS #ifdef ENABLE_OBJECT_COUNTS
@ -1307,7 +1330,7 @@ ptr GCENTRY(ptr tc, ptr count_roots_ls) {
#endif #endif
} else { } else {
chunkinfo *chunk = si->chunk; chunkinfo *chunk = si->chunk;
if (si->generation != static_generation) S_G.number_of_nonstatic_segments -= 1; S_G.number_of_nonstatic_segments -= 1;
S_G.number_of_empty_segments += 1; S_G.number_of_empty_segments += 1;
si->space = space_empty; si->space = space_empty;
si->next = chunk->unused_segs; si->next = chunk->unused_segs;
@ -1335,11 +1358,10 @@ ptr GCENTRY(ptr tc, ptr count_roots_ls) {
S_flush_instruction_cache(tc); S_flush_instruction_cache(tc);
#ifndef NO_DIRTY_NEWSPACE_POINTERS #ifndef NO_DIRTY_NEWSPACE_POINTERS
/* mark dirty those newspace cards to which we've added wrong-way pointers */ /* mark dirty those newspace cards to which we've added wrong-way pointers */
{ dirtycardinfo *ndc; { dirtycardinfo *ndc;
for (ndc = new_dirty_cards; ndc != NULL; ndc = ndc->next) for (ndc = S_G.new_dirty_cards; ndc != NULL; ndc = ndc->next)
S_mark_card_dirty(ndc->card, ndc->youngest); S_mark_card_dirty(ndc->card, ndc->youngest);
} }
#endif /* !NO_DIRTY_NEWSPACE_POINTERS */ #endif /* !NO_DIRTY_NEWSPACE_POINTERS */
@ -1389,7 +1411,7 @@ ptr GCENTRY(ptr tc, ptr count_roots_ls) {
} }
/* Promote opportunistic 1-shot continuations, because we can no /* Promote opportunistic 1-shot continuations, because we can no
longer cached one and we can no longer reliably fuse the stack longer cache one and we can no longer reliably fuse the stack
back. */ back. */
while (conts_to_promote != Snil) { while (conts_to_promote != Snil) {
S_promote_to_multishot(CONTLINK(Scar(conts_to_promote))); S_promote_to_multishot(CONTLINK(Scar(conts_to_promote)));
@ -1419,13 +1441,14 @@ ptr GCENTRY(ptr tc, ptr count_roots_ls) {
nlp = &S_G.next_loc[from_g][s]; \ nlp = &S_G.next_loc[from_g][s]; \
if (*slp == 0) *slp = S_G.first_loc[from_g][s]; \ if (*slp == 0) *slp = S_G.first_loc[from_g][s]; \
pp = TO_VOIDP(*slp); \ pp = TO_VOIDP(*slp); \
while (pp != (nl = (ptr *)*nlp)) \ while (pp != (nl = (ptr *)*nlp)) { \
do \ do { \
if ((p = *pp) == forward_marker) \ if ((p = *pp) == forward_marker) \
pp = TO_VOIDP(*(pp + 1)); \ pp = TO_VOIDP(*(pp + 1)); \
else \ else \
body \ body \
while (pp != nl); \ } while (pp != nl); \
} \
*slp = TO_PTR(pp); \ *slp = TO_PTR(pp); \
} }
@ -1472,7 +1495,7 @@ static void resweep_weak_pairs(seginfo *oldweakspacesegments) {
static void forward_or_bwp(pp, p) ptr *pp; ptr p; { static void forward_or_bwp(pp, p) ptr *pp; ptr p; {
seginfo *si; seginfo *si;
/* adapted from relocate */ /* adapted from relocate */
if (!IMMEDIATE(p) && (si = MaybeSegInfo(ptr_get_segment(p))) != NULL && si->old_space && !marked(si, p)) { if (!IMMEDIATE(p) && (si = MaybeSegInfo(ptr_get_segment(p))) != NULL && si->old_space && !new_marked(si, p)) {
if (FORWARDEDP(p, si)) { if (FORWARDEDP(p, si)) {
*pp = GET_FWDADDRESS(p); *pp = GET_FWDADDRESS(p);
} else { } else {
@ -1580,8 +1603,7 @@ static void sweep_generation(ptr tc) {
segment-specific trigger or gets triggered for recheck, but segment-specific trigger or gets triggered for recheck, but
it doesn't change the worst-case complexity. */ it doesn't change the worst-case complexity. */
if (!change) if (!change)
for (from_g = MIN_TG; from_g <= MAX_TG; from_g += 1) check_pending_ephemerons();
check_pending_ephemerons(from_g);
} while (change); } while (change);
} }
@ -1618,7 +1640,7 @@ static iptr sweep_typed_object(ptr tc, ptr p, IGEN from_g) {
sweep_record(p, from_g); sweep_record(p, from_g);
return size_record_inst(UNFIX(RECORDDESCSIZE(RECORDINSTTYPE(p)))); return size_record_inst(UNFIX(RECORDDESCSIZE(RECORDINSTTYPE(p))));
} else if (TYPEP(tf, mask_thread, type_thread)) { } else if (TYPEP(tf, mask_thread, type_thread)) {
sweep_thread(p, from_g); sweep_thread(p);
return size_thread; return size_thread;
} else { } else {
/* We get here only if backreference mode pushed other typed objects into /* We get here only if backreference mode pushed other typed objects into
@ -1716,21 +1738,31 @@ static void sweep_dirty() {
if (pp <= nl && nl < ppend) ppend = nl; if (pp <= nl && nl < ppend) ppend = nl;
if (dirty_si->dirty_bytes[d] <= MAX_CG) { if (dirty_si->dirty_bytes[d] <= MAX_CG) {
/* assume we won't find any wrong-way pointers */ /* start out with assumption that we won't find any wrong-way pointers */
youngest = 0xff; youngest = 0xff;
if ((s == space_impure) || (s == space_immobile_impure) if ((s == space_impure) || (s == space_immobile_impure)
|| (s == space_impure_typed_object) || (s == space_count_impure) || (s == space_impure_typed_object) || (s == space_count_impure)
|| (s == space_closure)) { || (s == space_closure)) {
while (pp < ppend && *pp != forward_marker) { if (dirty_si->marked_mask) {
/* handle two pointers at a time */ while (pp < ppend) {
if (!dirty_si->marked_mask || marked(dirty_si, TO_PTR(pp))) { /* handle two pointers at a time */
if (marked(dirty_si, TO_PTR(pp))) {
relocate_dirty(pp,youngest);
pp += 1;
relocate_dirty(pp,youngest);
pp += 1;
} else
pp += 2;
}
} else {
while (pp < ppend && *pp != forward_marker) {
/* handle two pointers at a time */
relocate_dirty(pp,youngest); relocate_dirty(pp,youngest);
pp += 1; pp += 1;
relocate_dirty(pp,youngest); relocate_dirty(pp,youngest);
pp += 1; pp += 1;
} else }
pp += 2;
} }
} else if (s == space_symbol) { } else if (s == space_symbol) {
/* old symbols cannot overlap segment boundaries /* old symbols cannot overlap segment boundaries
@ -1743,7 +1775,8 @@ static void sweep_dirty() {
(size_symbol / sizeof(ptr))) * (size_symbol / sizeof(ptr))) *
(size_symbol / sizeof(ptr)); (size_symbol / sizeof(ptr));
while (pp < ppend && *pp != forward_marker) { /* might overshoot card by part of a symbol. no harm. */ /* might overshoot card by part of a symbol. no harm. */
while (pp < ppend && (dirty_si->marked_mask || (*pp != forward_marker))) {
ptr p = TYPE(TO_PTR(pp), type_symbol); ptr p = TYPE(TO_PTR(pp), type_symbol);
if (!dirty_si->marked_mask || marked(dirty_si, p)) if (!dirty_si->marked_mask || marked(dirty_si, p))
@ -1762,7 +1795,8 @@ static void sweep_dirty() {
(size_port / sizeof(ptr))) * (size_port / sizeof(ptr))) *
(size_port / sizeof(ptr)); (size_port / sizeof(ptr));
while (pp < ppend && *pp != forward_marker) { /* might overshoot card by part of a port. no harm. */ /* might overshoot card by part of a port. no harm. */
while (pp < ppend && (dirty_si->marked_mask || (*pp != forward_marker))) {
ptr p = TYPE(TO_PTR(pp), type_typed_object); ptr p = TYPE(TO_PTR(pp), type_typed_object);
if (!dirty_si->marked_mask || marked(dirty_si, p)) if (!dirty_si->marked_mask || marked(dirty_si, p))
@ -1849,9 +1883,6 @@ static void sweep_dirty() {
/* skip unmarked words */ /* skip unmarked words */
p = (ptr)((uptr)p + byte_alignment); p = (ptr)((uptr)p + byte_alignment);
} else { } else {
/* quit on end of segment */
if (FWDMARKER(p) == forward_marker) break;
youngest = sweep_dirty_record(p, youngest); youngest = sweep_dirty_record(p, youngest);
p = (ptr)((iptr)p + p = (ptr)((iptr)p +
size_record_inst(UNFIX(RECORDDESCSIZE( size_record_inst(UNFIX(RECORDDESCSIZE(
@ -1905,7 +1936,7 @@ static void sweep_dirty() {
} }
} }
} else if (s == space_weakpair) { } else if (s == space_weakpair) {
while (pp < ppend && *pp != forward_marker) { while (pp < ppend && (dirty_si->marked_mask || (*pp != forward_marker))) {
/* skip car field and handle cdr field */ /* skip car field and handle cdr field */
if (!dirty_si->marked_mask || marked(dirty_si, TO_PTR(pp))) { if (!dirty_si->marked_mask || marked(dirty_si, TO_PTR(pp))) {
pp += 1; pp += 1;
@ -1915,7 +1946,7 @@ static void sweep_dirty() {
pp += 2; pp += 2;
} }
} else if (s == space_ephemeron) { } else if (s == space_ephemeron) {
while (pp < ppend && *pp != forward_marker) { while (pp < ppend && (dirty_si->marked_mask || (*pp != forward_marker))) {
ptr p = TYPE(TO_PTR(pp), type_pair); ptr p = TYPE(TO_PTR(pp), type_pair);
if (!dirty_si->marked_mask || marked(dirty_si, p)) if (!dirty_si->marked_mask || marked(dirty_si, p))
youngest = check_dirty_ephemeron(p, youngest); youngest = check_dirty_ephemeron(p, youngest);
@ -1976,25 +2007,27 @@ static void resweep_dirty_weak_pairs() {
if (dirty_si->dirty_bytes[d] <= MAX_CG) { if (dirty_si->dirty_bytes[d] <= MAX_CG) {
youngest = ls->youngest[d]; youngest = ls->youngest[d];
while (pp < ppend) { while (pp < ppend) {
p = *pp; if (!dirty_si->marked_mask || marked(dirty_si, TO_PTR(pp))) {
seginfo *si; p = *pp;
seginfo *si;
/* handle car field */ /* handle car field */
if (!IMMEDIATE(p) && (si = MaybeSegInfo(ptr_get_segment(p))) != NULL) { if (!IMMEDIATE(p) && (si = MaybeSegInfo(ptr_get_segment(p))) != NULL) {
if (si->old_space) { if (si->old_space) {
if (marked(si, p)) { if (new_marked(si, p)) {
youngest = TARGET_GENERATION(si); youngest = TARGET_GENERATION(si);
} else if (FORWARDEDP(p, si)) { } else if (FORWARDEDP(p, si)) {
IGEN newpg; IGEN newpg;
*pp = FWDADDRESS(p); *pp = FWDADDRESS(p);
newpg = TARGET_GENERATION(si); newpg = TARGET_GENERATION(si);
if (newpg < youngest) youngest = newpg; if (newpg < youngest) youngest = newpg;
} else {
*pp = Sbwp_object;
}
} else { } else {
*pp = Sbwp_object; IGEN pg = si->generation;
if (pg < youngest) youngest = pg;
} }
} else {
IGEN pg = si->generation;
if (pg < youngest) youngest = pg;
} }
} }
@ -2074,19 +2107,30 @@ static void add_trigger_ephemerons_to_pending(ptr pe) {
ephemeron_add(&pending_ephemerons, pe); ephemeron_add(&pending_ephemerons, pe);
} }
static void check_ephemeron(ptr pe, IGEN from_g) { static void check_ephemeron(ptr pe) {
ptr p; ptr p;
seginfo *si; seginfo *si;
IGEN from_g;
PUSH_BACKREFERENCE(pe); PUSH_BACKREFERENCE(pe);
EPHEMERONNEXT(pe) = 0; EPHEMERONNEXT(pe) = 0;
EPHEMERONPREVREF(pe) = 0; EPHEMERONPREVREF(pe) = 0;
from_g = GENERATION(pe);
p = Scar(pe); p = Scar(pe);
if (!IMMEDIATE(p) && (si = MaybeSegInfo(ptr_get_segment(p))) != NULL && si->old_space) { if (!IMMEDIATE(p) && (si = MaybeSegInfo(ptr_get_segment(p))) != NULL && si->old_space) {
if (marked(si, p)) { if (new_marked(si, p)) {
#ifndef NO_DIRTY_NEWSPACE_POINTERS
IGEN tg = TARGET_GENERATION(si);
if (tg < from_g) S_record_new_dirty_card(&INITCAR(pe), tg);
#endif
relocate_impure(&INITCDR(pe), from_g); relocate_impure(&INITCDR(pe), from_g);
} else if (FORWARDEDP(p, si)) { } else if (FORWARDEDP(p, si)) {
#ifndef NO_DIRTY_NEWSPACE_POINTERS
IGEN tg = TARGET_GENERATION(si);
if (tg < from_g) S_record_new_dirty_card(&INITCAR(pe), tg);
#endif
INITCAR(pe) = FWDADDRESS(p); INITCAR(pe) = FWDADDRESS(p);
relocate_impure(&INITCDR(pe), from_g); relocate_impure(&INITCDR(pe), from_g);
} else { } else {
@ -2101,14 +2145,14 @@ static void check_ephemeron(ptr pe, IGEN from_g) {
POP_BACKREFERENCE(); POP_BACKREFERENCE();
} }
static void check_pending_ephemerons(IGEN from_g) { static void check_pending_ephemerons() {
ptr pe, next_pe; ptr pe, next_pe;
pe = pending_ephemerons; pe = pending_ephemerons;
pending_ephemerons = 0; pending_ephemerons = 0;
while (pe != 0) { while (pe != 0) {
next_pe = EPHEMERONNEXT(pe); next_pe = EPHEMERONNEXT(pe);
check_ephemeron(pe, from_g); check_ephemeron(pe);
pe = next_pe; pe = next_pe;
} }
} }
@ -2126,7 +2170,7 @@ static IGEN check_dirty_ephemeron(ptr pe, IGEN youngest) {
p = Scar(pe); p = Scar(pe);
if (!IMMEDIATE(p) && (si = MaybeSegInfo(ptr_get_segment(p))) != NULL) { if (!IMMEDIATE(p) && (si = MaybeSegInfo(ptr_get_segment(p))) != NULL) {
if (si->old_space) { if (si->old_space) {
if (marked(si, p)) { if (new_marked(si, p)) {
relocate_dirty(&INITCDR(pe), youngest); relocate_dirty(&INITCDR(pe), youngest);
} else if (FORWARDEDP(p, si)) { } else if (FORWARDEDP(p, si)) {
INITCAR(pe) = GET_FWDADDRESS(p); INITCAR(pe) = GET_FWDADDRESS(p);
@ -2225,8 +2269,8 @@ void copy_and_clear_list_bits(seginfo *oldspacesegments) {
for (si = oldspacesegments; si != NULL; si = si->next) { for (si = oldspacesegments; si != NULL; si = si->next) {
if (si->list_bits) { if (si->list_bits) {
if ((si->generation == 0) && !si->marked_mask) { if ((si->generation == 1) && !si->marked_mask) {
/* drop generation-0 bits, because probably the relevant pairs /* drop (former) generation-0 bits, because probably the relevant pairs
were short-lived, and it's ok to recompute them if needed */ were short-lived, and it's ok to recompute them if needed */
} else { } else {
if (si->marked_mask) { if (si->marked_mask) {
@ -2245,6 +2289,7 @@ void copy_and_clear_list_bits(seginfo *oldspacesegments) {
find_room_voidp(space_data, bits_si->generation, ptr_align(segment_bitmap_bytes), copied_bits); find_room_voidp(space_data, bits_si->generation, ptr_align(segment_bitmap_bytes), copied_bits);
memcpy_aligned(copied_bits, si->list_bits, segment_bitmap_bytes); memcpy_aligned(copied_bits, si->list_bits, segment_bitmap_bytes);
si->list_bits = copied_bits; si->list_bits = copied_bits;
S_G.bitmask_overhead[bits_si->generation] += ptr_align(segment_bitmap_bytes);
} }
} }

View File

@ -192,8 +192,11 @@ void S_immobilize_object(x) ptr x; {
/* Try a little to to support cancellation of segment-level /* Try a little to to support cancellation of segment-level
* immobilzation --- but we don't try too hard */ * immobilzation --- but we don't try too hard */
if (si->must_mark < MUST_MARK_INFINITY) if (si->must_mark < MUST_MARK_INFINITY) {
si->must_mark++; si->must_mark++;
if (si->generation == 0)
S_G.must_mark_gen0 = 1;
}
/* Note: for `space_new`, `must_mark` doesn't really mean all /* Note: for `space_new`, `must_mark` doesn't really mean all
objects must be marked; only those in the locked list must be objects must be marked; only those in the locked list must be
@ -297,8 +300,11 @@ void Slock_object(x) ptr x; {
tc_mutex_acquire() tc_mutex_acquire()
S_pants_down += 1; S_pants_down += 1;
/* immobilize */ /* immobilize */
if (si->must_mark < MUST_MARK_INFINITY) if (si->must_mark < MUST_MARK_INFINITY) {
si->must_mark++; si->must_mark++;
if (si->generation == 0)
S_G.must_mark_gen0 = 1;
}
/* add x to locked list. remove from unlocked list */ /* add x to locked list. remove from unlocked list */
S_G.locked_objects[g] = S_cons_in((g == 0 ? space_new : space_impure), g, x, S_G.locked_objects[g]); S_G.locked_objects[g] = S_cons_in((g == 0 ? space_new : space_impure), g, x, S_G.locked_objects[g]);
if (S_G.enable_object_counts) { if (S_G.enable_object_counts) {
@ -512,6 +518,7 @@ static void segment_tell(seg) uptr seg; {
if ((si = MaybeSegInfo(seg)) == NULL) { if ((si = MaybeSegInfo(seg)) == NULL) {
printf(" out of heap bounds\n"); printf(" out of heap bounds\n");
} else { } else {
printf(" si=%p", si);
printf(" generation=%d", si->generation); printf(" generation=%d", si->generation);
s = si->space; s = si->space;
s1 = si->space; s1 = si->space;
@ -671,7 +678,10 @@ void S_check_heap(aftergc, mcg) IBOOL aftergc; IGEN mcg; {
if (psi != NULL) { if (psi != NULL) {
if ((psi->space == space_empty) if ((psi->space == space_empty)
|| psi->old_space || psi->old_space
|| (psi->marked_mask && !(psi->marked_mask[segment_bitmap_byte(p)] & segment_bitmap_bit(p)))) { || (psi->marked_mask && !(psi->marked_mask[segment_bitmap_byte(p)] & segment_bitmap_bit(p))
/* corner case: a continuation in space_count_pure can refer to code via CLOSENTRY
where the entry point doesn't have a mark bit: */
&& !((s == space_count_pure) && (psi->space == space_code)))) {
S_checkheap_errors += 1; S_checkheap_errors += 1;
printf("!!! dangling reference at "PHtx" to "PHtx"%s\n", (ptrdiff_t)pp1, (ptrdiff_t)p, (aftergc ? " after gc" : "")); printf("!!! dangling reference at "PHtx" to "PHtx"%s\n", (ptrdiff_t)pp1, (ptrdiff_t)p, (aftergc ? " after gc" : ""));
printf("from: "); segment_tell(seg); printf("from: "); segment_tell(seg);
@ -870,7 +880,7 @@ static void check_dirty() {
S_checkheap_errors += 1; S_checkheap_errors += 1;
printf("!!! (check_dirty): dirty byte = %d for segment "PHtx" in %d -> %d dirty list\n", mingval, (ptrdiff_t)(si->number), from_g, to_g); printf("!!! (check_dirty): dirty byte = %d for segment "PHtx" in %d -> %d dirty list\n", mingval, (ptrdiff_t)(si->number), from_g, to_g);
} }
if (s != space_new && s != space_impure && s != space_symbol && s != space_port if (s != space_new && s != space_impure && s != space_count_impure && s != space_symbol && s != space_port
&& s != space_impure_record && s != space_impure_typed_object && s != space_immobile_impure && s != space_impure_record && s != space_impure_typed_object && s != space_immobile_impure
&& s != space_weakpair && s != space_ephemeron) { && s != space_weakpair && s != space_ephemeron) {
S_checkheap_errors += 1; S_checkheap_errors += 1;
@ -989,7 +999,9 @@ ptr S_do_gc(IGEN max_cg, IGEN min_tg, IGEN max_tg, ptr count_roots) {
RTDCOUNTSIT(counts, new_g) = RTDCOUNTSIT(counts, old_g); RTDCOUNTSIT(counts, old_g) = 0; RTDCOUNTSIT(counts, new_g) = RTDCOUNTSIT(counts, old_g); RTDCOUNTSIT(counts, old_g) = 0;
} }
} }
#ifndef WIN32
S_child_processes[new_g] = S_child_processes[old_g]; S_child_processes[new_g] = S_child_processes[old_g];
#endif
/* change old_g dirty bytes in static generation to new_g; splice list of old_g /* change old_g dirty bytes in static generation to new_g; splice list of old_g
seginfos onto front of new_g seginfos */ seginfos onto front of new_g seginfos */
@ -1061,9 +1073,11 @@ ptr S_gc(ptr tc, IGEN max_cg, IGEN min_tg, IGEN max_tg, ptr count_roots) {
|| S_G.enable_object_counts || S_G.enable_object_backreferences || S_G.enable_object_counts || S_G.enable_object_backreferences
|| (count_roots != Sfalse)) || (count_roots != Sfalse))
return S_gc_oce(tc, max_cg, min_tg, max_tg, count_roots); return S_gc_oce(tc, max_cg, min_tg, max_tg, count_roots);
else if (max_cg == 0 && min_tg == 1 && max_tg == 1 && S_G.locked_objects[0] == Snil) { else if (max_cg == 0 && min_tg == 1 && max_tg == 1
&& !S_G.must_mark_gen0 && S_G.locked_objects[0] == Snil
&& (S_G.min_mark_gen > 0)) {
S_gc_011(tc); S_gc_011(tc);
return Sfalse; return Svoid;
} else } else
return S_gc_ocd(tc, max_cg, min_tg, max_tg, Sfalse); return S_gc_ocd(tc, max_cg, min_tg, max_tg, Sfalse);
} }

View File

@ -102,6 +102,7 @@ EXTERN struct S_G_struct {
iptr bytes_left[static_generation+1][max_real_space+1]; iptr bytes_left[static_generation+1][max_real_space+1];
uptr bytes_of_space[static_generation+1][max_real_space+1]; uptr bytes_of_space[static_generation+1][max_real_space+1];
uptr bytes_of_generation[static_generation+1]; uptr bytes_of_generation[static_generation+1];
uptr bitmask_overhead[static_generation+1];
uptr g0_bytes_after_last_gc; uptr g0_bytes_after_last_gc;
uptr collect_trip_bytes; uptr collect_trip_bytes;
ptr nonprocedure_code; ptr nonprocedure_code;
@ -142,6 +143,8 @@ EXTERN struct S_G_struct {
ptr gcbackreference[static_generation+1]; ptr gcbackreference[static_generation+1];
IGEN prcgeneration; IGEN prcgeneration;
uptr bytes_finalized; uptr bytes_finalized;
dirtycardinfo *new_dirty_cards;
IBOOL must_mark_gen0;
/* intern.c */ /* intern.c */
iptr oblist_length; iptr oblist_length;

View File

@ -841,6 +841,10 @@ void S_pb_interp(ptr tc, void *bytecode) {
regs[Cretval] = ((pb_uptr_int32_int32_uptr_uptr_t)proc)(regs[Carg1], regs[Carg2], regs[Carg3], regs[Cretval] = ((pb_uptr_int32_int32_uptr_uptr_t)proc)(regs[Carg1], regs[Carg2], regs[Carg3],
regs[Carg4]); regs[Carg4]);
break; break;
case pb_call_uptr_int32_int32_int32_uptr:
regs[Cretval] = ((pb_uptr_int32_int32_int32_uptr_t)proc)(regs[Carg1], regs[Carg2], regs[Carg3],
regs[Carg4]);
break;
case pb_call_uptr_int32_voids_uptr_uptr: case pb_call_uptr_int32_voids_uptr_uptr:
regs[Cretval] = ((pb_uptr_int32_voids_uptr_uptr_t)proc)(regs[Carg1], TO_VOIDP(regs[Carg2]), regs[Carg3], regs[Cretval] = ((pb_uptr_int32_voids_uptr_uptr_t)proc)(regs[Carg1], TO_VOIDP(regs[Carg2]), regs[Carg3],
regs[Carg4]); regs[Carg4]);

View File

@ -49,6 +49,7 @@ void S_promote_to_multishot(k) ptr k; {
static void split(k, s) ptr k; ptr *s; { static void split(k, s) ptr k; ptr *s; {
iptr m, n; iptr m, n;
seginfo *si; seginfo *si;
ISPC spc;
tc_mutex_acquire() tc_mutex_acquire()
/* set m to size of lower piece, n to size of upper piece */ /* set m to size of lower piece, n to size of upper piece */
@ -56,8 +57,11 @@ static void split(k, s) ptr k; ptr *s; {
n = CONTCLENGTH(k) - m; n = CONTCLENGTH(k) - m;
si = SegInfo(ptr_get_segment(k)); si = SegInfo(ptr_get_segment(k));
spc = si->space;
if (spc != space_new) spc = space_continuation; /* to avoid space_count_pure */
/* insert a new continuation between k and link(k) */ /* insert a new continuation between k and link(k) */
CONTLINK(k) = S_mkcontinuation(si->space, CONTLINK(k) = S_mkcontinuation(spc,
si->generation, si->generation,
CLOSENTRY(k), CLOSENTRY(k),
CONTSTACK(k), CONTSTACK(k),

View File

@ -298,6 +298,12 @@ typedef struct _bucket_pointer_list {
#define DIRTYSET(lhs,rhs) S_dirty_set(lhs, rhs); #define DIRTYSET(lhs,rhs) S_dirty_set(lhs, rhs);
typedef struct _dirtycardinfo {
uptr card;
IGEN youngest;
struct _dirtycardinfo *next;
} dirtycardinfo;
/* derived accessors/constructors */ /* derived accessors/constructors */
#define FWDMARKER(p) FORWARDMARKER((uptr)UNTYPE_ANY(p)) #define FWDMARKER(p) FORWARDMARKER((uptr)UNTYPE_ANY(p))
#define FWDADDRESS(p) FORWARDADDRESS((uptr)UNTYPE_ANY(p)) #define FWDADDRESS(p) FORWARDADDRESS((uptr)UNTYPE_ANY(p))

View File

@ -62,7 +62,7 @@ InstallLZ4Target=
# no changes should be needed below this point # # no changes should be needed below this point #
############################################################################### ###############################################################################
Version=csv9.5.3.36 Version=csv9.5.3.37
Include=boot/$m Include=boot/$m
PetiteBoot=boot/$m/petite.boot PetiteBoot=boot/$m/petite.boot
SchemeBoot=boot/$m/scheme.boot SchemeBoot=boot/$m/scheme.boot

View File

@ -4804,6 +4804,24 @@
(eq? #!bwp (ephemeron-key e)) (eq? #!bwp (ephemeron-key e))
(eq? #!bwp (ephemeron-value e))))))))) (eq? #!bwp (ephemeron-value e)))))))))
;; ----------------------------------------
;; Check interaction of mutation and incremental generation promotion
(with-interrupts-disabled
(let ([key "key"])
(let ([e (ephemeron-cons key #f)])
(collect 0 1 1)
(let ([key2 (gensym key)])
;; e is gen 1, key2 is gen 0:
(set-car! e key2)
(collect 1 1 2)
;; Now, e is gen 1, key2 is gen 0
(and (eq? (car e) key2)
(begin
(collect 1 2 2)
;; Check that the GC update the reference to `key2` in `e`:
(eq? (car e) key2)))))))
;; ---------------------------------------- ;; ----------------------------------------
;; Check fasl: ;; Check fasl:
(let ([s (gensym)]) (let ([s (gensym)])

View File

@ -5645,7 +5645,7 @@ evaluating module init
(let ([b0-0 (bytes-allocated 0)] (let ([b0-0 (bytes-allocated 0)]
[b1-0 (bytes-allocated 1)] [b1-0 (bytes-allocated 1)]
[bm-0 (bytes-allocated (collect-maximum-generation))]) [bm-0 (bytes-allocated (collect-maximum-generation))])
(let* ([v (make-vector 2000)] [n (compute-size v)]) (let* ([v (make-vector 20000)] [n (compute-size v)])
(let ([b0-1 (bytes-allocated 0)] (let ([b0-1 (bytes-allocated 0)]
[b1-1 (bytes-allocated 1)] [b1-1 (bytes-allocated 1)]
[bm-1 (bytes-allocated (collect-maximum-generation))]) [bm-1 (bytes-allocated (collect-maximum-generation))])

View File

@ -3989,8 +3989,9 @@ misc.mo:Expected error in mat compute-size-increments: "compute-size-increments:
misc.mo:Expected error in mat collect+compute-size-increments: "collect: invalid counting-roots list not-a-list". misc.mo:Expected error in mat collect+compute-size-increments: "collect: invalid counting-roots list not-a-list".
misc.mo:Expected error in mat collect+compute-size-increments: "collect: invalid counting-roots list 0". misc.mo:Expected error in mat collect+compute-size-increments: "collect: invalid counting-roots list 0".
misc.mo:Expected error in mat collect+compute-size-increments: "collect: invalid generation not-a-generation". misc.mo:Expected error in mat collect+compute-size-increments: "collect: invalid generation not-a-generation".
misc.mo:Expected error in mat collect+compute-size-increments: "collect: invalid target generation not-a-generation for generation 0". misc.mo:Expected error in mat collect+compute-size-increments: "collect: invalid minimum target generation not-a-generation for generation 0 and maximum target generation 0".
misc.mo:Expected error in mat collect+compute-size-increments: "collect: invalid target generation 0 for generation 1". misc.mo:Expected error in mat collect+compute-size-increments: "collect: invalid maximum target generation not-a-generation for generation 0".
misc.mo:Expected error in mat collect+compute-size-increments: "collect: invalid maximum target generation 0 for generation 1".
misc.mo:Expected error in mat compute-composition: "compute-composition: invalid generation -1". misc.mo:Expected error in mat compute-composition: "compute-composition: invalid generation -1".
misc.mo:Expected error in mat compute-composition: "compute-composition: invalid generation "static"". misc.mo:Expected error in mat compute-composition: "compute-composition: invalid generation "static"".
misc.mo:Expected error in mat make-object-finder: "make-object-finder: 17 is not a procedure". misc.mo:Expected error in mat make-object-finder: "make-object-finder: 17 is not a procedure".

View File

@ -357,7 +357,7 @@
;; --------------------------------------------------------------------- ;; ---------------------------------------------------------------------
;; Version and machine types: ;; Version and machine types:
(define-constant scheme-version #x09050324) (define-constant scheme-version #x09050325)
(define-syntax define-machine-types (define-syntax define-machine-types
(lambda (x) (lambda (x)
@ -2082,11 +2082,11 @@
(define-constant unscaled-shot-1-shot-flag -1) (define-constant unscaled-shot-1-shot-flag -1)
(define-constant scaled-shot-1-shot-flag (define-constant scaled-shot-1-shot-flag
(* (constant unscaled-shot-1-shot-flag) (constant ptr-bytes))) (* (constant unscaled-shot-1-shot-flag) (constant ptr-bytes)))
;; opportunistic--1-shot-flag is in the continuation length field for ;; opportunistic-1-shot-flag is in the continuation length field for
;; a one-shot continuation that is only treated a 1-shot when ;; a one-shot continuation that is only treated a 1-shot when
;; it's contiguous with the current stack when called, in which case ;; it's contiguous with the current stack when called, in which case
;; the continuation can be just merged back with the current stack ;; the continuation can be just merged back with the current stack
(define-constant opportunistic-1-shot-flag 0) (define-constant opportunistic-1-shot-flag (* -2 (constant ptr-bytes)))
;;; underflow limit determines how much we're willing to copy on ;;; underflow limit determines how much we're willing to copy on
;;; stack underflow/continuation invocation ;;; stack underflow/continuation invocation
@ -3238,6 +3238,7 @@
[uptr int32 int32 uptr uptr] [uptr int32 int32 uptr uptr]
[uptr int32 void* uptr uptr] [uptr int32 void* uptr uptr]
[uptr uptr uptr uptr uptr] [uptr uptr uptr uptr uptr]
[uptr int32 int32 int32 uptr]
[uptr uptr void* uptr uptr] [uptr uptr void* uptr uptr]
[uptr uptr uptr uptr uptr int32] [uptr uptr uptr uptr uptr int32]
[uptr uptr uptr uptr uptr uptr] [uptr uptr uptr uptr uptr uptr]

View File

@ -18,6 +18,7 @@
;; Currently supported traversal modes: ;; Currently supported traversal modes:
;; - copy ;; - copy
;; - sweep ;; - sweep
;; - sweep-in-old ; like sweep, but don't update impure
;; - mark ;; - mark
;; - self-test : check immediate pointers only for self references ;; - self-test : check immediate pointers only for self references
;; - size : immediate size, so does not recur ;; - size : immediate size, so does not recur
@ -201,9 +202,11 @@
[else space-continuation])) [else space-continuation]))
(vfasl-fail "closure") (vfasl-fail "closure")
(size size-continuation) (size size-continuation)
(mark one-bit counting-root)
(case-mode (case-mode
[self-test] [self-test]
[mark
(copy-stack-length continuation-stack-length continuation-stack-clength)
(mark one-bit counting-root)]
[else [else
(copy-clos-code code) (copy-clos-code code)
(copy-stack-length continuation-stack-length continuation-stack-clength) (copy-stack-length continuation-stack-length continuation-stack-clength)
@ -214,14 +217,15 @@
[(== (continuation-stack-length _) scaled-shot-1-shot-flag)] [(== (continuation-stack-length _) scaled-shot-1-shot-flag)]
[else [else
(case-mode (case-mode
[sweep [(sweep)
(when (OLDSPACE (continuation-stack _)) (define stk : ptr (continuation-stack _))
(when (&& (!= stk (cast ptr 0)) (OLDSPACE stk))
(set! (continuation-stack _) (set! (continuation-stack _)
(copy_stack (continuation-stack _) (copy_stack (continuation-stack _)
(& (continuation-stack-length _)) (& (continuation-stack-length _))
(continuation-stack-clength _))))] (continuation-stack-clength _))))]
[else]) [else])
(count countof-stack (continuation-stack-length _) 1 [sweep measure]) (count countof-stack (continuation-stack-length _) 1 [measure])
(trace-pure continuation-link) (trace-pure continuation-link)
(trace-return continuation-return-address (continuation-return-address _)) (trace-return continuation-return-address (continuation-return-address _))
(case-mode (case-mode
@ -607,13 +611,11 @@
[(&& (!= cdr_p _) [(&& (!= cdr_p _)
(&& (== (TYPEBITS cdr_p) type_pair) (&& (== (TYPEBITS cdr_p) type_pair)
(&& (!= (set! qsi (MaybeSegInfo (ptr_get_segment cdr_p))) NULL) (&& (!= (set! qsi (MaybeSegInfo (ptr_get_segment cdr_p))) NULL)
(&& (-> qsi old_space) (&& (== qsi si)
(&& (== (-> qsi space) (-> si space)) (&& (!= (FWDMARKER cdr_p) forward_marker)
(&& (!= (FWDMARKER cdr_p) forward_marker) ;; Checking `marked_mask`, in
(&& (! (-> qsi use_marks)) ;; case the pair is locked
;; Checking `marked_mask`, too, in (! (-> qsi marked_mask)))))))
;; case the pair is locked
(! (-> qsi marked_mask)))))))))
(check_triggers qsi) (check_triggers qsi)
(size size-pair 2) (size size-pair 2)
(define new_cdr_p : ptr (cast ptr (+ (cast uptr _copy_) size_pair))) (define new_cdr_p : ptr (cast ptr (+ (cast uptr _copy_) size_pair)))
@ -672,7 +674,7 @@
(case-mode (case-mode
[(copy vfasl-copy) [(copy vfasl-copy)
(SETCLOSCODE _copy_ code)] (SETCLOSCODE _copy_ code)]
[(sweep) [(sweep sweep-in-old)
(unless-code-relocated (unless-code-relocated
(SETCLOSCODE _copy_ code))] (SETCLOSCODE _copy_ code))]
[(vfasl-sweep) [(vfasl-sweep)
@ -684,13 +686,13 @@
(define-trace-macro (copy-stack-length continuation-stack-length continuation-stack-clength) (define-trace-macro (copy-stack-length continuation-stack-length continuation-stack-clength)
(case-mode (case-mode
[copy [(copy mark)
;; Don't promote general one-shots, but promote opportunistic one-shots ;; Don't promote general one-shots, but promote opportunistic one-shots
(cond (cond
[(== (continuation-stack-length _) opportunistic-1-shot-flag) [(== (continuation-stack-length _) opportunistic-1-shot-flag)
(set! (continuation-stack-length _copy_) (continuation-stack-clength _)) (set! (continuation-stack-length _copy_) (continuation-stack-clength _))
;; May need to recur at end to promote link: ;; May need to recur at end to promote link:
(set! conts_to_promote (S_cons_in space_new 0 new_p conts_to_promote))] (set! conts_to_promote (S_cons_in space_new 0 _copy_ conts_to_promote))]
[else [else
(copy continuation-stack-length)])] (copy continuation-stack-length)])]
[else [else
@ -700,7 +702,7 @@
(case-mode (case-mode
[(copy measure) [(copy measure)
(trace ref)] (trace ref)]
[sweep [(sweep sweep-in-old)
(trace ref) ; can't trace `val` directly, because we need an impure relocate (trace ref) ; can't trace `val` directly, because we need an impure relocate
(define val : ptr (ref _))] (define val : ptr (ref _))]
[vfasl-copy [vfasl-copy
@ -709,7 +711,7 @@
(define-trace-macro (trace-symcode symbol-pvalue val) (define-trace-macro (trace-symcode symbol-pvalue val)
(case-mode (case-mode
[sweep [(sweep sweep-in-old)
(define code : ptr (cond (define code : ptr (cond
[(Sprocedurep val) (CLOSCODE val)] [(Sprocedurep val) (CLOSCODE val)]
[else (SYMCODE _)])) [else (SYMCODE _)]))
@ -780,7 +782,7 @@
[on] [on]
[off [off
(case-mode (case-mode
[(sweep self-test) [(sweep sweep-in-old self-test)
;; Bignum pointer mask may need forwarding ;; Bignum pointer mask may need forwarding
(trace-pure (record-type-pm rtd)) (trace-pure (record-type-pm rtd))
(set! num (record-type-pm rtd))] (set! num (record-type-pm rtd))]
@ -895,6 +897,9 @@
(cast iptr (port-buffer _)))) (cast iptr (port-buffer _))))
(trace port-buffer) (trace port-buffer)
(set! (port-last _) (cast ptr (+ (cast iptr (port-buffer _)) n))))] (set! (port-last _) (cast ptr (+ (cast iptr (port-buffer _)) n))))]
[sweep-in-old
(when (& (cast uptr _tf_) flag)
(trace port-buffer))]
[else [else
(trace-nonself port-buffer)])) (trace-nonself port-buffer)]))
@ -906,7 +911,7 @@
(define tc : ptr (cast ptr (offset _))) (define tc : ptr (cast ptr (offset _)))
(when (!= tc (cast ptr 0)) (when (!= tc (cast ptr 0))
(case-mode (case-mode
[sweep [(sweep)
(let* ([old_stack : ptr (tc-scheme-stack tc)]) (let* ([old_stack : ptr (tc-scheme-stack tc)])
(when (OLDSPACE old_stack) (when (OLDSPACE old_stack)
(let* ([clength : iptr (- (cast uptr (SFP tc)) (cast uptr old_stack))]) (let* ([clength : iptr (- (cast uptr (SFP tc)) (cast uptr old_stack))])
@ -914,7 +919,6 @@
(set! (tc-scheme-stack tc) (copy_stack old_stack (set! (tc-scheme-stack tc) (copy_stack old_stack
(& (tc-scheme-stack-size tc)) (& (tc-scheme-stack-size tc))
(+ clength (sizeof ptr)))) (+ clength (sizeof ptr))))
(count countof-stack (tc-scheme-stack-size tc) 1 sweep)
(set! (tc-sfp tc) (cast ptr (+ (cast uptr (tc-scheme-stack tc)) clength))) (set! (tc-sfp tc) (cast ptr (+ (cast uptr (tc-scheme-stack tc)) clength)))
(set! (tc-esp tc) (cast ptr (- (+ (cast uptr (tc-scheme-stack tc)) (set! (tc-esp tc) (cast ptr (- (+ (cast uptr (tc-scheme-stack tc))
(tc-scheme-stack-size tc)) (tc-scheme-stack-size tc))
@ -1027,11 +1031,14 @@
(define co : iptr (+ (ENTRYOFFSET xcp) (- (cast uptr xcp) (cast uptr (TO_PTR (ENTRYOFFSETADDR xcp)))))) (define co : iptr (+ (ENTRYOFFSET xcp) (- (cast uptr xcp) (cast uptr (TO_PTR (ENTRYOFFSETADDR xcp))))))
(define c_p : ptr (cast ptr (- (cast uptr xcp) co))) (define c_p : ptr (cast ptr (- (cast uptr xcp) co)))
(case-mode (case-mode
[sweep [(sweep sweep-in-old)
(define x_si : seginfo* (SegInfo (ptr_get_segment c_p))) (define x_si : seginfo* (SegInfo (ptr_get_segment c_p)))
(when (-> x_si old_space) (when (-> x_si old_space)
(relocate_code c_p x_si) (relocate_code c_p x_si)
(set! field (cast ptr (+ (cast uptr c_p) co))))] (case-mode
[sweep-in-old]
[else
(set! field (cast ptr (+ (cast uptr c_p) co)))]))]
[else [else
(trace-pure (just c_p))])) (trace-pure (just c_p))]))
@ -1042,7 +1049,7 @@
[else [else
(define t : ptr (code-reloc _)) (define t : ptr (code-reloc _))
(case-mode (case-mode
[(sweep vfasl-sweep) [(sweep sweep-in-old vfasl-sweep)
(define m : iptr (reloc-table-size t)) (define m : iptr (reloc-table-size t))
(define oldco : ptr (reloc-table-code t))] (define oldco : ptr (reloc-table-code t))]
[else [else
@ -1154,7 +1161,7 @@
(define-trace-macro (and-purity-sensitive-mode e) (define-trace-macro (and-purity-sensitive-mode e)
(case-mode (case-mode
[sweep e] [(sweep sweep-in-old) e]
[else 0])) [else 0]))
(define-trace-macro (when-vfasl e) (define-trace-macro (when-vfasl e)
@ -1342,6 +1349,7 @@
[(sweep) (if (lookup 'as-dirty? config #f) [(sweep) (if (lookup 'as-dirty? config #f)
"IGEN" "IGEN"
"void")] "void")]
[(sweep-in-old) "void"]
[else "void"]) [else "void"])
name name
(case (lookup 'mode config) (case (lookup 'mode config)
@ -1359,9 +1367,7 @@
[(sweep) [(sweep)
(cond (cond
[(lookup 'as-dirty? config #f) ", IGEN youngest"] [(lookup 'as-dirty? config #f) ", IGEN youngest"]
[(and (lookup 'from-g-only-counting? config #f) [(lookup 'no-from-g? config #f) ""]
(not (lookup 'counts? config #f)))
", IGEN UNUSED(from_g)"]
[else ", IGEN from_g"])] [else ", IGEN from_g"])]
[else ""])) [else ""]))
(let ([body (let ([body
@ -1529,7 +1535,7 @@
(code (case (and (not (lookup 'as-dirty? config #f)) (code (case (and (not (lookup 'as-dirty? config #f))
(not (lookup 'rtd-relocated? config #f)) (not (lookup 'rtd-relocated? config #f))
(lookup 'mode config)) (lookup 'mode config))
[(copy sweep mark) [(copy sweep sweep-in-old mark)
(code (code
"/* Relocate to make sure we aren't using an oldspace descriptor" "/* Relocate to make sure we aren't using an oldspace descriptor"
" that has been overwritten by a forwarding marker, but don't loop" " that has been overwritten by a forwarding marker, but don't loop"
@ -1638,7 +1644,7 @@
(statements (cons `(copy-bytes ,offset (* ptr_bytes ,len)) (statements (cons `(copy-bytes ,offset (* ptr_bytes ,len))
(cdr l)) (cdr l))
config)] config)]
[(sweep measure vfasl-sweep) [(sweep measure sweep-in-old vfasl-sweep)
(code (code
(loop-over-pointers (loop-over-pointers
(field-expression offset config "p" #t) (field-expression offset config "p" #t)
@ -2057,6 +2063,7 @@
(define mode (lookup 'mode config)) (define mode (lookup 'mode config))
(cond (cond
[(or (eq? mode 'sweep) [(or (eq? mode 'sweep)
(eq? mode 'sweep-in-old)
(eq? mode 'vfasl-sweep) (eq? mode 'vfasl-sweep)
(and early? (or (eq? mode 'copy) (and early? (or (eq? mode 'copy)
(eq? mode 'mark)))) (eq? mode 'mark))))
@ -2075,6 +2082,10 @@
(case mode (case mode
[(vfasl-sweep) [(vfasl-sweep)
(format "vfasl_relocate(vfi, &~a);" e)] (format "vfasl_relocate(vfi, &~a);" e)]
[(sweep-in-old)
(if (eq? purity 'pure)
(format "relocate_pure(&~a);" e)
(format "relocate_indirect(~a);" e))]
[else [else
(if (lookup 'as-dirty? config #f) (if (lookup 'as-dirty? config #f)
(begin (begin
@ -2286,6 +2297,7 @@
(if (memq 'no-clear flags) (if (memq 'no-clear flags)
(format "~a /* no clearing needed */" inset) (format "~a /* no clearing needed */" inset)
(format "~a memset(~a->marked_mask, 0, segment_bitmap_bytes);" inset si)) (format "~a memset(~a->marked_mask, 0, segment_bitmap_bytes);" inset si))
(format "~a S_G.bitmask_overhead[~a->generation] += ptr_align(segment_bitmap_bytes);" inset si)
(format "~a}" inset))) (format "~a}" inset)))
(define (just-mark-bit-space? sp) (define (just-mark-bit-space? sp)
@ -2464,6 +2476,9 @@
`((mode sweep) `((mode sweep)
(maybe-backreferences? ,count?) (maybe-backreferences? ,count?)
(counts? ,count?)))) (counts? ,count?))))
(print-code (generate "sweep_object_in_old"
`((mode sweep-in-old)
(maybe-backreferences? ,count?))))
(print-code (generate "sweep_dirty_object" (print-code (generate "sweep_dirty_object"
`((mode sweep) `((mode sweep)
(maybe-backreferences? ,count?) (maybe-backreferences? ,count?)
@ -2486,7 +2501,7 @@
(as-dirty? #t))) (as-dirty? #t)))
(sweep1 'symbol) (sweep1 'symbol)
(sweep1 'symbol "sweep_dirty_symbol" '((as-dirty? #t))) (sweep1 'symbol "sweep_dirty_symbol" '((as-dirty? #t)))
(sweep1 'thread "sweep_thread" '((from-g-only-counting? #t))) (sweep1 'thread "sweep_thread" '((no-from-g? #t)))
(sweep1 'port) (sweep1 'port)
(sweep1 'port "sweep_dirty_port" '((as-dirty? #t))) (sweep1 'port "sweep_dirty_port" '((as-dirty? #t)))
(sweep1 'closure "sweep_continuation" '((code-relocated? #t) (sweep1 'closure "sweep_continuation" '((code-relocated? #t)

View File

@ -1224,7 +1224,7 @@
(chmod [sig [(pathname sub-ufixnum) -> (void)]] [flags]) (chmod [sig [(pathname sub-ufixnum) -> (void)]] [flags])
(clear-input-port [sig [() (input-port) -> (void)]] [flags true]) (clear-input-port [sig [() (input-port) -> (void)]] [flags true])
(clear-output-port [sig [() (output-port) -> (void)]] [flags true]) (clear-output-port [sig [() (output-port) -> (void)]] [flags true])
(collect [sig [() (sub-ufixnum) (sub-ufixnum ptr) (sub-ufixnum ptr ptr) (sub-ufixnum ptr ptr ptr) -> (void)]] [flags true]) (collect [sig [() (sub-ufixnum) (sub-ufixnum ptr) (sub-ufixnum ptr ptr) (sub-ufixnum ptr ptr ptr) -> (void/list)]] [flags true])
(collect-rendezvous [sig [() -> (void)]] [flags]) (collect-rendezvous [sig [() -> (void)]] [flags])
(collections [sig [() -> (uint)]] [flags unrestricted alloc]) (collections [sig [() -> (uint)]] [flags unrestricted alloc])
(compile [sig [(sub-ptr) (sub-ptr environment) -> (ptr ...)]] [flags]) (compile [sig [(sub-ptr) (sub-ptr environment) -> (ptr ...)]] [flags])