forked from MacRuby/MacRuby
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathauto_zone_1060.h
377 lines (292 loc) · 18.6 KB
/
auto_zone_1060.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
/*
auto_zone.h
Automatic Garbage Collection.
Copyright (c) 2002-2010 Apple Inc. All rights reserved.
*/
#ifndef __AUTO_ZONE__
#define __AUTO_ZONE__
#include <stdint.h>
#include <stdio.h>
#include <sys/types.h>
#include <malloc/malloc.h>
__BEGIN_DECLS
typedef malloc_zone_t auto_zone_t;
// an auto zone carries a little more state but can be cast into a malloc_zone_t
extern auto_zone_t *auto_zone_create(const char *name);
// create an garbage collected zone. Can be (theoretically) done more than once.
// memory can be allocated by malloc_zone_malloc(result, size)
// by default, this memory must be malloc_zone_free(result, ptr) as well (or generic free())
extern struct malloc_introspection_t auto_zone_introspection();
// access the zone introspection functions independent of any particular auto zone instance.
// this is used by tools to be able to introspect a zone in another process.
// the introspection functions returned are required to do version checking on the zone.
#define AUTO_RETAINED_BLOCK_TYPE 0x100 /* zone enumerator returns only blocks with nonzero retain count */
/********* External (Global) Use counting ************/
extern void auto_zone_retain(auto_zone_t *zone, void *ptr);
extern unsigned int auto_zone_release(auto_zone_t *zone, void *ptr);
extern unsigned int auto_zone_retain_count(auto_zone_t *zone, const void *ptr);
// All pointer in the auto zone have an explicit retain count
// Objects will not be collected when the retain count is non-zero
/********* Object information ************/
extern const void *auto_zone_base_pointer(auto_zone_t *zone, const void *ptr);
// return base of interior pointer (or NULL).
extern boolean_t auto_zone_is_valid_pointer(auto_zone_t *zone, const void *ptr);
// is this a pointer to the base of an allocated block?
extern size_t auto_zone_size(auto_zone_t *zone, const void *ptr);
/********* Write-barrier ************/
extern boolean_t auto_zone_set_write_barrier(auto_zone_t *zone, const void *dest, const void *new_value);
// must be used when an object field/slot in the auto zone is set to another object in the auto zone
// returns true if the dest was a valid target whose write-barrier was set
boolean_t auto_zone_atomicCompareAndSwap(auto_zone_t *zone, void *existingValue, void *newValue, void *volatile *location, boolean_t isGlobal, boolean_t issueBarrier);
// Atomically update a location with a new GC value. These use OSAtomicCompareAndSwapPtr{Barrier} with appropriate write-barrier interlocking logic.
boolean_t auto_zone_atomicCompareAndSwapPtr(auto_zone_t *zone, void *existingValue, void *newValue, void *volatile *location, boolean_t issueBarrier);
// Atomically update a location with a new GC value. These use OSAtomicCompareAndSwapPtr{Barrier} with appropriate write-barrier interlocking logic.
// This version checks location, and if it points into global storage, registers a root.
extern void *auto_zone_write_barrier_memmove(auto_zone_t *zone, void *dst, const void *src, size_t size);
// copy content from an arbitrary source area to an arbitrary destination area
// marking write barrier if necessary
/********* Statistics ************/
typedef uint64_t auto_date_t;
typedef struct {
auto_date_t total_duration;
auto_date_t scan_duration;
auto_date_t enlivening_duration;
auto_date_t finalize_duration;
auto_date_t reclaim_duration;
} auto_collection_durations_t;
typedef struct {
/* Memory usage */
malloc_statistics_t malloc_statistics;
/* GC stats */
// version 0
uint32_t version; // set to 1 before calling
/* When there is an array, 0 stands for full collection, 1 for generational */
size_t num_collections[2];
boolean_t last_collection_was_generational;
size_t bytes_in_use_after_last_collection[2];
size_t bytes_allocated_after_last_collection[2];
size_t bytes_freed_during_last_collection[2];
// durations
auto_collection_durations_t total[2]; // running total of each field
auto_collection_durations_t last[2]; // most recent result
auto_collection_durations_t maximum[2]; // on a per item basis, the max. Thus, total != scan + finalize ...
// version 1 additions
size_t thread_collections_total;
size_t thread_blocks_recovered_total;
size_t thread_bytes_recovered_total;
} auto_statistics_t;
extern void auto_zone_statistics(auto_zone_t *zone, auto_statistics_t *stats); // set version to 0
/********* Garbage Collection ************/
enum {
AUTO_COLLECT_RATIO_COLLECTION = (0 << 0), // run generational or full depending on applying AUTO_COLLECTION_RATIO
AUTO_COLLECT_GENERATIONAL_COLLECTION = (1 << 0), // collect young objects. Internal only.
AUTO_COLLECT_FULL_COLLECTION = (2 << 0), // collect entire heap. Internal only.
AUTO_COLLECT_EXHAUSTIVE_COLLECTION = (3 << 0), // run full collections until object count stabilizes.
AUTO_COLLECT_SYNCHRONOUS = (1 << 2), // block caller until scanning is finished.
AUTO_COLLECT_IF_NEEDED = (1 << 3), // only collect if AUTO_COLLECTION_THRESHOLD exceeded.
};
typedef uint32_t auto_collection_mode_t;
enum {
AUTO_LOG_COLLECTIONS = (1 << 1), // log whenever a collection occurs
AUTO_LOG_REGIONS = (1 << 4), // log whenever a new region is allocated
AUTO_LOG_UNUSUAL = (1 << 5), // log unusual circumstances
AUTO_LOG_WEAK = (1 << 6), // log weak reference manipulation
AUTO_LOG_ALL = (~0u),
AUTO_LOG_NONE = 0
};
typedef uint32_t auto_log_mask_t;
enum {
AUTO_HEAP_HOLES_SHRINKING = 1, // total size of holes is approaching zero
AUTO_HEAP_HOLES_EXHAUSTED = 2, // all holes exhausted, will use hitherto unused memory in "subzone"
AUTO_HEAP_SUBZONE_EXHAUSTED = 3, // will add subzone
AUTO_HEAP_REGION_EXHAUSTED = 4, // no more subzones available, need to add region
AUTO_HEAP_ARENA_EXHAUSTED = 5, // arena exhausted. (64-bit only)
};
typedef uint32_t auto_heap_growth_info_t;
typedef struct auto_zone_cursor *auto_zone_cursor_t;
typedef void (*auto_zone_foreach_object_t) (auto_zone_cursor_t cursor, void (*op) (void *ptr, void *data), void* data);
typedef struct {
uint32_t version; // reserved - 0 for now
void (*batch_invalidate) (auto_zone_t *zone, auto_zone_foreach_object_t foreach, auto_zone_cursor_t cursor, size_t cursor_size);
// After unreached objects are found, collector calls this routine with internal context.
// Typically, one enters a try block to call back into the collector with a function pointer to be used to
// invalidate each object. This amortizes the cost of the try block as well as allows the collector to use
// efficient contexts.
void (*resurrect) (auto_zone_t *zone, void *ptr);
// Objects on the garbage list may be assigned into live objects in an attempted resurrection. This is not allowed.
// This function, if supplied, is called for these objects to turn them into zombies. The zombies may well hold
// pointers to other objects on the garbage list. No attempt is made to preserved these objects beyond this collection.
const unsigned char* (*layout_for_address)(auto_zone_t *zone, void *ptr);
// The collector assumes that the first word of every "object" is a class pointer.
// For each class pointer discovered this function is called to return a layout, or NULL
// if the object should be scanned conservatively.
// The layout format is nibble pairs {skipcount, scancount} XXX
const unsigned char* (*weak_layout_for_address)(auto_zone_t *zone, void *ptr);
// called once for each allocation encountered for which we don't know the weak layout
// the callee returns a weak layout for the allocation or NULL if the allocation has no weak references.
char* (*name_for_address) (auto_zone_t *zone, vm_address_t base, vm_address_t offset);
// if supplied, is used during logging for errors such as resurrections
auto_log_mask_t log;
// set to auto_log_mask_t bits as desired
boolean_t disable_generational;
// if true, ignores requests to do generational GC.
boolean_t malloc_stack_logging;
// if true, logs allocations for malloc stack logging. Automatically set if MallocStackLogging{NoCompact} is set
void (*scan_external_callout)(void *context, void (*scanner)(void *context, void *start, void *end));
// an external function that is passed a memory scanner entry point
// if set, the function will be called during scanning so that the
// function the collector supplies will be called on all external memory that might
// have references. Useful, for example, for green thread systems.
void (*will_grow)(auto_zone_t *zone, auto_heap_growth_info_t);
// collector calls this when it is about to grow the heap. Advise if memory was returned to the collector, or not.
// if memory was returned, return 0 and the allocation will be attempted again, otherwise the heap will be grown.
size_t collection_threshold;
// if_needed threshold: collector will initiate a collection after this number of bytes is allocated.
size_t full_vs_gen_frequency;
// after full_vs_gen_frequency generational collections, a full collection will occur, if the if_needed threshold exceeded
} auto_collection_control_t;
extern auto_collection_control_t *auto_collection_parameters(auto_zone_t *zone);
// FIXME: API is to get the control struct and slam it
// sets a parameter that decides when callback gets called
extern void auto_collector_disable(auto_zone_t *zone);
extern void auto_collector_reenable(auto_zone_t *zone);
// these two functions turn off/on the collector
// default is on
// use with great care.
extern boolean_t auto_zone_is_enabled(auto_zone_t *zone);
extern boolean_t auto_zone_is_collecting(auto_zone_t *zone);
extern void auto_collect(auto_zone_t *zone, auto_collection_mode_t mode, void *collection_context);
// request a collection. By default, the collection will occur only on the main thread.
extern void auto_collect_multithreaded(auto_zone_t *zone);
// start a dedicated thread to do collections. The invalidate callback will subsequently be called from this new thread.
/********* Object layout for compaction ************/
// For compaction of the zone, we need to know for sure where are the pointers
// each object is assumed to have a class pointer as word 0 (the "isa")
// This layout information is also used for collection (for "tracing" pointers)
// Exact layout knowledge is also important for ignoring weak references
enum {
AUTO_TYPE_UNKNOWN = -1, // this is an error value
AUTO_UNSCANNED = 1,
AUTO_OBJECT = 2,
AUTO_MEMORY_SCANNED = 0, // holds conservatively scanned pointers
AUTO_MEMORY_UNSCANNED = AUTO_UNSCANNED, // holds unscanned memory (bits)
AUTO_OBJECT_SCANNED = AUTO_OBJECT, // first word is 'isa', may have 'exact' layout info elsewhere
AUTO_OBJECT_UNSCANNED = AUTO_OBJECT | AUTO_UNSCANNED, // first word is 'isa', good for bits or auto_zone_retain'ed items
};
typedef int auto_memory_type_t;
extern auto_memory_type_t auto_zone_get_layout_type(auto_zone_t *zone, void *ptr);
extern void* auto_zone_allocate_object(auto_zone_t *zone, size_t size, auto_memory_type_t type, boolean_t initial_refcount_to_one, boolean_t clear);
// Create copy of AUTO_MEMORY object preserving "scanned" attribute
// If not auto memory then create unscanned memory copy
void *auto_zone_create_copy(auto_zone_t *zone, void *ptr);
extern void auto_zone_register_thread(auto_zone_t *zone);
extern void auto_zone_unregister_thread(auto_zone_t *zone);
extern void auto_zone_assert_thread_registered(auto_zone_t *zone);
extern void auto_zone_register_datasegment(auto_zone_t *zone, void *address, size_t size);
extern void auto_zone_unregister_datasegment(auto_zone_t *zone, void *address, size_t size);
// Weak references
// The collector maintains a weak reference system.
// Essentially, locations in which references are stored are registered along with the reference itself.
// The location should not be within scanned GC memory.
// After a collection, before finalization, all registered locations are examined and any containing references to
// newly discovered garbage will be "zeroed" and the registration cancelled.
//
// Reading values from locations must be done through the weak read function because there is a race with such
// reads and the collector having just determined that that value read is in fact otherwise garbage.
//
// The address of a callback block may be supplied optionally. If supplied, if the location is zeroed, the callback
// block is queued to be called later with the arguments supplied in the callback block. The same callback block both
// can and should be used as an aggregation point. A table of weak locations could supply each registration with the
// same pointer to a callback block that will call that table if items are zerod. The callbacks are made before
// finalization. Note that only thread-safe operations may be performed by this callback.
//
// It is important to cancel all registrations before deallocating the memory containing locations or callback blocks.
// Cancellation is done by calling the registration function with a NULL "reference" parameter for that location.
typedef struct auto_weak_callback_block {
struct auto_weak_callback_block *next; // must be set to zero before first use
void (*callback_function)(void *arg1, void *arg2);
void *arg1;
void *arg2;
} auto_weak_callback_block_t;
extern void auto_assign_weak_reference(auto_zone_t *zone, const void *value, const void **location, auto_weak_callback_block_t *block);
// Read a weak-reference, informing the collector that it is now strongly referenced.
extern void* auto_read_weak_reference(auto_zone_t *zone, void **referrer);
extern void auto_zone_add_root(auto_zone_t *zone, void *address_of_root_ptr, void *value);
extern void auto_zone_remove_root(auto_zone_t *zone, void *address_of_root_ptr);
extern void auto_zone_root_write_barrier(auto_zone_t *zone, void *address_of_possible_root_ptr, void *value);
// Associative references.
// This informs the collector that an object A wishes to associate one or more secondary objects with object A's lifetime.
// This can be used to implement GC-safe associations that will neither cause uncollectable cycles, nor suffer the limitations
// of weak references.
extern void auto_zone_set_associative_ref(auto_zone_t *zone, void *object, void *key, void *value);
extern void *auto_zone_get_associative_ref(auto_zone_t *zone, void *object, void *key);
extern void auto_zone_erase_associative_refs(auto_zone_t *zone, void *object);
/***** SPI ******/
extern void auto_zone_start_monitor(boolean_t force);
extern void auto_zone_set_class_list(int (*get_class_list)(void **buffer, int count));
extern boolean_t auto_zone_is_finalized(auto_zone_t *zone, const void *ptr);
extern void auto_zone_stats(void); // write stats to stdout
extern void auto_zone_write_stats(FILE *f); // write stats to the given stream
extern char *auto_zone_stats_string(); // return a char * containing the stats string, which should be free()'d
extern void auto_zone_set_nofinalize(auto_zone_t *zone, void *ptr);
extern void auto_zone_set_unscanned(auto_zone_t *zone, void *ptr);
extern void auto_zone_clear_stack(auto_zone_t *zone, unsigned long options);
// Reference count logging support for ObjectAlloc et. al.
enum {
AUTO_RETAIN_EVENT = 14,
AUTO_RELEASE_EVENT = 15
};
extern void (*__auto_reference_logger)(uint32_t eventtype, void *ptr, uintptr_t data);
// Reference tracing
// referrer_base[referrer_offset] -> referent
typedef struct
{
vm_address_t referent;
vm_address_t referrer_base;
intptr_t referrer_offset;
} auto_reference_t;
typedef void (*auto_reference_recorder_t)(auto_zone_t *zone, void *ctx,
auto_reference_t reference);
extern void auto_enumerate_references(auto_zone_t *zone, void *referent,
auto_reference_recorder_t callback,
void *stack_bottom, void *ctx);
void **auto_weak_find_first_referrer(auto_zone_t *zone, void **location, unsigned long count);
/************ DEPRECATED ***********/
extern auto_zone_t *auto_zone(void);
// returns a pointer to the first garbage collected zone created.
extern unsigned auto_zone_touched_size(auto_zone_t *zone);
// conservative (upper bound) on memory touched by the allocator itself.
extern double auto_zone_utilization(auto_zone_t *zone);
// conservative measure of utilization of allocator touched memory.
/************* EXPERIMENTAL *********/
#ifdef __BLOCKS__
typedef void (^auto_zone_stack_dump)(const void *base, unsigned long byte_size);
typedef void (^auto_zone_register_dump)(const void *base, unsigned long byte_size);
typedef void (^auto_zone_node_dump)(const void *address, unsigned long size, unsigned int layout, unsigned long refcount);
typedef void (^auto_zone_root_dump)(const void **address);
typedef void (^auto_zone_weak_dump)(const void **address, const void *item);
// Instruments.app utility; causes significant disruption.
// This is SPI for Apple's use only. Can and likely will change without regard to 3rd party use.
void auto_zone_dump(auto_zone_t *zone,
auto_zone_stack_dump stack_dump,
auto_zone_register_dump register_dump,
auto_zone_node_dump thread_local_node_dump, // unsupported
auto_zone_root_dump root_dump,
auto_zone_node_dump global_node_dump,
auto_zone_weak_dump weak_dump
);
#endif
enum {
auto_is_not_auto = 0,
auto_is_auto = (1 << 1), // always on for a start of a node
auto_is_local = (1 << 2), // is/was node local
};
typedef int auto_probe_results_t;
// Instruments.app utility; causes significant disruption.
// This is SPI for Apple's use only. Can and likely will change without regard to 3rd party use.
auto_probe_results_t auto_zone_probe_unlocked(auto_zone_t *zone, void *address);
#ifdef __BLOCKS__
void auto_zone_scan_exact(auto_zone_t *zone, void *address, void (^callback)(void *base, unsigned long byte_offset, void *candidate));
#endif
__END_DECLS
#endif /* __AUTO_ZONE__ */