aboutsummaryrefslogtreecommitdiff
path: root/boehm-gc/stubborn.c
blob: bef7b98a48685950a0e6f97e9c87ff0f568805c3 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
/* 
 * Copyright 1988, 1989 Hans-J. Boehm, Alan J. Demers
 * Copyright (c) 1991-1994 by Xerox Corporation.  All rights reserved.
 *
 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
 * OR IMPLIED.  ANY USE IS AT YOUR OWN RISK.
 *
 * Permission is hereby granted to use or copy this program
 * for any purpose,  provided the above notices are retained on all copies.
 * Permission to modify the code and to distribute modified code is granted,
 * provided the above notices are retained, and a notice that the code was
 * modified is included with the above copyright notice.
 */
/* Boehm, July 31, 1995 5:02 pm PDT */


#include "gc_priv.h"

# ifdef STUBBORN_ALLOC
/* Stubborn object (hard to change, nearly immutable) allocation. */

extern ptr_t GC_clear_stack();	/* in misc.c, behaves like identity */

#define GENERAL_MALLOC(lb,k) \
    (GC_PTR)GC_clear_stack(GC_generic_malloc((word)lb, k))

/* Data structure representing immutable objects that 	*/
/* are still being initialized.				*/
/* This is a bit baroque in order to avoid acquiring	*/
/* the lock twice for a typical allocation.		*/

GC_PTR * GC_changing_list_start;

# ifdef THREADS
  VOLATILE GC_PTR * VOLATILE GC_changing_list_current;
# else
  GC_PTR * GC_changing_list_current;
# endif
	/* Points at last added element.  Also (ab)used for		*/
	/* synchronization.  Updates and reads are assumed atomic.	*/

GC_PTR * GC_changing_list_limit;
	/* Points at the last word of the buffer, which is always 0	*/
	/* All entries in (GC_changing_list_current,			*/
	/* GC_changing_list_limit] are 0				*/


void GC_stubborn_init()
{
#   define INIT_SIZE 10

    GC_changing_list_start = (GC_PTR *)
    			GC_generic_malloc_inner(
    				(word)(INIT_SIZE * sizeof(GC_PTR)),
    				PTRFREE);
    BZERO(GC_changing_list_start,
    	  INIT_SIZE * sizeof(GC_PTR));
    if (GC_changing_list_start == 0) {
        GC_err_printf0("Insufficient space to start up\n");
        ABORT("GC_stubborn_init: put of space");
    }
    GC_changing_list_current = GC_changing_list_start;
    GC_changing_list_limit = GC_changing_list_start + INIT_SIZE - 1;
    * GC_changing_list_limit = 0;
}

/* Compact and possibly grow GC_uninit_list.  The old copy is		*/
/* left alone.	Lock must be held.					*/
/* When called GC_changing_list_current == GC_changing_list_limit	*/
/* which is one past the current element.				*/
/* When we finish GC_changing_list_current again points one past last	*/
/* element.								*/
/* Invariant while this is running: GC_changing_list_current    	*/
/* points at a word containing 0.					*/
/* Returns FALSE on failure.						*/
GC_bool GC_compact_changing_list()
{
    register GC_PTR *p, *q;
    register word count = 0;
    word old_size = (char **)GC_changing_list_limit
    		    - (char **)GC_changing_list_start+1;
    		    /* The casts are needed as a workaround for an Amiga bug */
    register word new_size = old_size;
    GC_PTR * new_list;
    
    for (p = GC_changing_list_start; p < GC_changing_list_limit; p++) {
        if (*p != 0) count++;
    }
    if (2 * count > old_size) new_size = 2 * count;
    new_list = (GC_PTR *)
    		GC_generic_malloc_inner(
    				new_size * sizeof(GC_PTR), PTRFREE);
    		/* PTRFREE is a lie.  But we don't want the collector to  */
    		/* consider these.  We do want the list itself to be  	  */
    		/* collectable.						  */
    if (new_list == 0) return(FALSE);
    BZERO(new_list, new_size * sizeof(GC_PTR));
    q = new_list;
    for (p = GC_changing_list_start; p < GC_changing_list_limit; p++) {
        if (*p != 0) *q++ = *p;
    }
    GC_changing_list_start = new_list;
    GC_changing_list_limit = new_list + new_size - 1;
    GC_changing_list_current = q;
    return(TRUE);
}

/* Add p to changing list.  Clear p on failure.	*/
# define ADD_CHANGING(p) \
	{	\
	    register struct hblk * h = HBLKPTR(p);	\
	    register word index = PHT_HASH(h);	\
	    \
	    set_pht_entry_from_index(GC_changed_pages, index);	\
	}	\
	if (*GC_changing_list_current != 0 \
	    && ++GC_changing_list_current == GC_changing_list_limit) { \
	    if (!GC_compact_changing_list()) (p) = 0; \
	} \
	*GC_changing_list_current = p;

void GC_change_stubborn(p)
GC_PTR p;
{
    DCL_LOCK_STATE;
    
    DISABLE_SIGNALS();
    LOCK();
    ADD_CHANGING(p);
    UNLOCK();
    ENABLE_SIGNALS();
}

void GC_end_stubborn_change(p)
GC_PTR p;
{
#   ifdef THREADS
      register VOLATILE GC_PTR * my_current = GC_changing_list_current;
#   else
      register GC_PTR * my_current = GC_changing_list_current;
#   endif
    register GC_bool tried_quick;
    DCL_LOCK_STATE;
    
    if (*my_current == p) {
        /* Hopefully the normal case.					*/
        /* Compaction could not have been running when we started.	*/
        *my_current = 0;
#	ifdef THREADS
          if (my_current == GC_changing_list_current) {
            /* Compaction can't have run in the interim. 	*/
            /* We got away with the quick and dirty approach.   */
            return;
          }
          tried_quick = TRUE;
#	else
	  return;
#	endif
    } else {
        tried_quick = FALSE;
    }
    DISABLE_SIGNALS();
    LOCK();
    my_current = GC_changing_list_current;
    for (; my_current >= GC_changing_list_start; my_current--) {
        if (*my_current == p) {
            *my_current = 0;
            UNLOCK();
            ENABLE_SIGNALS();
            return;
        }
    }
    if (!tried_quick) {
        GC_err_printf1("Bad arg to GC_end_stubborn_change: 0x%lx\n",
        	       (unsigned long)p);
        ABORT("Bad arg to GC_end_stubborn_change");
    }
    UNLOCK();
    ENABLE_SIGNALS();
}

/* Allocate lb bytes of composite (pointerful) data	*/
/* No pointer fields may be changed after a call to	*/
/* GC_end_stubborn_change(p) where p is the value	*/
/* returned by GC_malloc_stubborn.			*/
# ifdef __STDC__
    GC_PTR GC_malloc_stubborn(size_t lb)
# else
    GC_PTR GC_malloc_stubborn(lb)
    size_t lb;
# endif
{
register ptr_t op;
register ptr_t *opp;
register word lw;
ptr_t result;
DCL_LOCK_STATE;

    if( SMALL_OBJ(lb) ) {
#       ifdef MERGE_SIZES
	  lw = GC_size_map[lb];
#	else
	  lw = ALIGNED_WORDS(lb);
#       endif
	opp = &(GC_sobjfreelist[lw]);
	FASTLOCK();
        if( !FASTLOCK_SUCCEEDED() || (op = *opp) == 0 ) {
            FASTUNLOCK();
            result = GC_generic_malloc((word)lb, STUBBORN);
            goto record;
        }
        *opp = obj_link(op);
        obj_link(op) = 0;
        GC_words_allocd += lw;
        result = (GC_PTR) op;
        ADD_CHANGING(result);
        FASTUNLOCK();
        return((GC_PTR)result);
   } else {
       result = (GC_PTR)
          	GC_generic_malloc((word)lb, STUBBORN);
   }
record:
   DISABLE_SIGNALS();
   LOCK();
   ADD_CHANGING(result);
   UNLOCK();
   ENABLE_SIGNALS();
   return((GC_PTR)GC_clear_stack(result));
}


/* Functions analogous to GC_read_dirty and GC_page_was_dirty.	*/
/* Report pages on which stubborn objects were changed.		*/
void GC_read_changed()
{
    register GC_PTR * p = GC_changing_list_start;
    register GC_PTR q;
    register struct hblk * h;
    register word index;
    
    if (p == 0) /* initializing */ return;
    BCOPY(GC_changed_pages, GC_prev_changed_pages,
          (sizeof GC_changed_pages));
    BZERO(GC_changed_pages, (sizeof GC_changed_pages));
    for (; p <= GC_changing_list_current; p++) {
        if ((q = *p) != 0) {
            h = HBLKPTR(q);
            index = PHT_HASH(h);
            set_pht_entry_from_index(GC_changed_pages, index);
        }
    }
}

GC_bool GC_page_was_changed(h)
struct hblk * h;
{
    register word index = PHT_HASH(h);
    
    return(get_pht_entry_from_index(GC_prev_changed_pages, index));
}

/* Remove unreachable entries from changed list. Should only be	*/
/* called with mark bits consistent and lock held.		*/
void GC_clean_changing_list()
{
    register GC_PTR * p = GC_changing_list_start;
    register GC_PTR q;
    register ptr_t r;
    register unsigned long count = 0;
    register unsigned long dropped_count = 0;
    
    if (p == 0) /* initializing */ return;
    for (; p <= GC_changing_list_current; p++) {
        if ((q = *p) != 0) {
            count++;
            r = (ptr_t)GC_base(q);
            if (r == 0 || !GC_is_marked(r)) {
                *p = 0;
                dropped_count++;
	    }
        }
    }
#   ifdef PRINTSTATS
      if (count > 0) {
        GC_printf2("%lu entries in changing list: reclaimed %lu\n",
                  (unsigned long)count, (unsigned long)dropped_count);
      }
#   endif
}

#else /* !STUBBORN_ALLOC */

# ifdef __STDC__
    GC_PTR GC_malloc_stubborn(size_t lb)
# else
    GC_PTR GC_malloc_stubborn(lb)
    size_t lb;
# endif
{
    return(GC_malloc(lb));
}

/*ARGSUSED*/
void GC_end_stubborn_change(p)
GC_PTR p;
{
}

/*ARGSUSED*/
void GC_change_stubborn(p)
GC_PTR p;
{
}


#endif