Ruby  3.1.4p223 (2023-03-30 revision HEAD)
transient_heap.c
1 /**********************************************************************
2 
3  transient_heap.c - implement transient_heap.
4 
5  Copyright (C) 2018 Koichi Sasada
6 
7 **********************************************************************/
8 
9 #include "debug_counter.h"
10 #include "gc.h"
11 #include "internal.h"
12 #include "internal/gc.h"
13 #include "internal/hash.h"
14 #include "internal/sanitizers.h"
15 #include "internal/static_assert.h"
16 #include "internal/struct.h"
17 #include "internal/variable.h"
18 #include "ruby/debug.h"
19 #include "ruby/ruby.h"
20 #include "ruby_assert.h"
21 #include "transient_heap.h"
22 #include "vm_debug.h"
23 #include "vm_sync.h"
24 
25 #if USE_TRANSIENT_HEAP /* USE_TRANSIENT_HEAP */
26 /*
27  * 1: enable assertions
28  * 2: enable verify all transient heaps
29  */
30 #ifndef TRANSIENT_HEAP_CHECK_MODE
31 #define TRANSIENT_HEAP_CHECK_MODE 0
32 #endif
33 #define TH_ASSERT(expr) RUBY_ASSERT_MESG_WHEN(TRANSIENT_HEAP_CHECK_MODE > 0, expr, #expr)
34 
35 /*
36  * 1: show events
37  * 2: show dump at events
38  * 3: show all operations
39  */
40 #define TRANSIENT_HEAP_DEBUG 0
41 
42 /* For Debug: Provide blocks infinitely.
43  * This mode generates blocks unlimitedly
44  * and prohibit access free'ed blocks to check invalid access.
45  */
46 #define TRANSIENT_HEAP_DEBUG_INFINITE_BLOCK 0
47 
48 #if TRANSIENT_HEAP_DEBUG_INFINITE_BLOCK
49 #include <sys/mman.h>
50 #include <errno.h>
51 #endif
52 
53 /* For Debug: Prohibit promoting to malloc space.
54  */
55 #define TRANSIENT_HEAP_DEBUG_DONT_PROMOTE 0
56 
57 /* size configuration */
58 #define TRANSIENT_HEAP_PROMOTED_DEFAULT_SIZE 1024
59 
60  /* K M */
61 #define TRANSIENT_HEAP_BLOCK_SIZE (1024 * 32 ) /* 32KB int16_t */
62 #ifndef TRANSIENT_HEAP_TOTAL_SIZE
63 #define TRANSIENT_HEAP_TOTAL_SIZE (1024 * 1024 * 32) /* 32 MB */
64 #endif
65 #define TRANSIENT_HEAP_ALLOC_MAX (1024 * 2 ) /* 2 KB */
66 #define TRANSIENT_HEAP_BLOCK_NUM (TRANSIENT_HEAP_TOTAL_SIZE / TRANSIENT_HEAP_BLOCK_SIZE)
67 #define TRANSIENT_HEAP_USABLE_SIZE (TRANSIENT_HEAP_BLOCK_SIZE - sizeof(struct transient_heap_block_header))
68 
69 #define TRANSIENT_HEAP_ALLOC_MAGIC 0xfeab
70 #define TRANSIENT_HEAP_ALLOC_ALIGN RUBY_ALIGNOF(void *)
71 
72 #define TRANSIENT_HEAP_ALLOC_MARKING_LAST -1
73 #define TRANSIENT_HEAP_ALLOC_MARKING_FREE -2
74 
75 enum transient_heap_status {
76  transient_heap_none,
77  transient_heap_marking,
78  transient_heap_escaping
79 };
80 
83  int16_t index;
84  int16_t last_marked_index;
85  int16_t objects;
86  struct transient_heap_block *next_block;
87  } info;
88  char buff[TRANSIENT_HEAP_USABLE_SIZE];
89 };
90 
92  struct transient_heap_block *using_blocks;
93  struct transient_heap_block *marked_blocks;
94  struct transient_heap_block *free_blocks;
95  int total_objects;
96  int total_marked_objects;
97  int total_blocks;
98  enum transient_heap_status status;
99 
100  VALUE *promoted_objects;
101  int promoted_objects_size;
102  int promoted_objects_index;
103 
104  struct transient_heap_block *arena;
105  int arena_index; /* increment only */
106 };
107 
109  uint16_t magic;
110  uint16_t size;
111  int16_t next_marked_index;
112  int16_t dummy;
113  VALUE obj;
114 };
115 
116 static struct transient_heap global_transient_heap;
117 
118 static void transient_heap_promote_add(struct transient_heap* theap, VALUE obj);
119 static const void *transient_heap_ptr(VALUE obj, int error);
120 static int transient_header_managed_ptr_p(struct transient_heap* theap, const void *ptr);
121 
122 #define ROUND_UP(v, a) (((size_t)(v) + (a) - 1) & ~((a) - 1))
123 
124 static void
125 transient_heap_block_dump(struct transient_heap* theap, struct transient_heap_block *block)
126 {
127  int i=0, n=0;
128 
129  while (i<block->info.index) {
130  void *ptr = &block->buff[i];
131  struct transient_alloc_header *header = ptr;
132  fprintf(stderr, "%4d %8d %p size:%4d next:%4d %s\n", n, i, ptr, header->size, header->next_marked_index, rb_obj_info(header->obj));
133  i += header->size;
134  n++;
135  }
136 }
137 
138 static void
139 transient_heap_blocks_dump(struct transient_heap* theap, struct transient_heap_block *block, const char *type_str)
140 {
141  while (block) {
142  fprintf(stderr, "- transient_heap_dump: %s:%p index:%d objects:%d last_marked_index:%d next:%p\n",
143  type_str, (void *)block, block->info.index, block->info.objects, block->info.last_marked_index, (void *)block->info.next_block);
144 
145  transient_heap_block_dump(theap, block);
146  block = block->info.next_block;
147  }
148 }
149 
150 static void
151 transient_heap_dump(struct transient_heap* theap)
152 {
153  fprintf(stderr, "transient_heap_dump objects:%d marked_objects:%d blocks:%d\n", theap->total_objects, theap->total_marked_objects, theap->total_blocks);
154  transient_heap_blocks_dump(theap, theap->using_blocks, "using_blocks");
155  transient_heap_blocks_dump(theap, theap->marked_blocks, "marked_blocks");
156  transient_heap_blocks_dump(theap, theap->free_blocks, "free_blocks");
157 }
158 
159 /* Debug: dump all transient_heap blocks */
160 void
161 rb_transient_heap_dump(void)
162 {
163  transient_heap_dump(&global_transient_heap);
164 }
165 
166 #if TRANSIENT_HEAP_CHECK_MODE >= 2
167 ATTRIBUTE_NO_ADDRESS_SAFETY_ANALYSIS(static void transient_heap_ptr_check(struct transient_heap *theap, VALUE obj));
168 static void
169 transient_heap_ptr_check(struct transient_heap *theap, VALUE obj)
170 {
171  if (obj != Qundef) {
172  const void *ptr = transient_heap_ptr(obj, FALSE);
173  TH_ASSERT(ptr == NULL || transient_header_managed_ptr_p(theap, ptr));
174  }
175 }
176 
177 ATTRIBUTE_NO_ADDRESS_SAFETY_ANALYSIS(static int transient_heap_block_verify(struct transient_heap *theap, struct transient_heap_block *block));
178 static int
179 transient_heap_block_verify(struct transient_heap *theap, struct transient_heap_block *block)
180 {
181  int i=0, n=0;
182  struct transient_alloc_header *header;
183 
184  while (i<block->info.index) {
185  header = (void *)&block->buff[i];
186  TH_ASSERT(header->magic == TRANSIENT_HEAP_ALLOC_MAGIC);
187  transient_heap_ptr_check(theap, header->obj);
188  n ++;
189  i += header->size;
190  }
191  TH_ASSERT(block->info.objects == n);
192 
193  return n;
194 }
195 
196 static int
197 transient_heap_blocks_verify(struct transient_heap *theap, struct transient_heap_block *blocks, int *block_num_ptr)
198 {
199  int n = 0;
200  struct transient_heap_block *block = blocks;
201  while (block) {
202  n += transient_heap_block_verify(theap, block);
203  *block_num_ptr += 1;
204  block = block->info.next_block;
205  }
206 
207  return n;
208 }
209 #endif
210 
211 static void
212 transient_heap_verify(struct transient_heap *theap)
213 {
214 #if TRANSIENT_HEAP_CHECK_MODE >= 2
215  int n=0, block_num=0;
216 
217  n += transient_heap_blocks_verify(theap, theap->using_blocks, &block_num);
218  n += transient_heap_blocks_verify(theap, theap->marked_blocks, &block_num);
219 
220  TH_ASSERT(n == theap->total_objects);
221  TH_ASSERT(n >= theap->total_marked_objects);
222  TH_ASSERT(block_num == theap->total_blocks);
223 #endif
224 }
225 
226 /* Debug: check assertions for all transient_heap blocks */
227 void
228 rb_transient_heap_verify(void)
229 {
230  transient_heap_verify(&global_transient_heap);
231 }
232 
233 static struct transient_heap*
234 transient_heap_get(void)
235 {
236  struct transient_heap* theap = &global_transient_heap;
237  transient_heap_verify(theap);
238  return theap;
239 }
240 
241 static void
242 reset_block(struct transient_heap_block *block)
243 {
244  __msan_allocated_memory(block, sizeof block);
245  block->info.index = 0;
246  block->info.objects = 0;
247  block->info.last_marked_index = TRANSIENT_HEAP_ALLOC_MARKING_LAST;
248  block->info.next_block = NULL;
249  __asan_poison_memory_region(&block->buff, sizeof block->buff);
250 }
251 
252 static void
253 connect_to_free_blocks(struct transient_heap *theap, struct transient_heap_block *block)
254 {
255  block->info.next_block = theap->free_blocks;
256  theap->free_blocks = block;
257 }
258 
259 static void
260 connect_to_using_blocks(struct transient_heap *theap, struct transient_heap_block *block)
261 {
262  block->info.next_block = theap->using_blocks;
263  theap->using_blocks = block;
264 }
265 
266 #if 0
267 static void
268 connect_to_marked_blocks(struct transient_heap *theap, struct transient_heap_block *block)
269 {
270  block->info.next_block = theap->marked_blocks;
271  theap->marked_blocks = block;
272 }
273 #endif
274 
275 static void
276 append_to_marked_blocks(struct transient_heap *theap, struct transient_heap_block *append_blocks)
277 {
278  if (theap->marked_blocks) {
279  struct transient_heap_block *block = theap->marked_blocks, *last_block = NULL;
280  while (block) {
281  last_block = block;
282  block = block->info.next_block;
283  }
284 
285  TH_ASSERT(last_block->info.next_block == NULL);
286  last_block->info.next_block = append_blocks;
287  }
288  else {
289  theap->marked_blocks = append_blocks;
290  }
291 }
292 
293 static struct transient_heap_block *
294 transient_heap_block_alloc(struct transient_heap* theap)
295 {
296  struct transient_heap_block *block;
297 #if TRANSIENT_HEAP_DEBUG_INFINITE_BLOCK
298  block = mmap(NULL, TRANSIENT_HEAP_BLOCK_SIZE, PROT_READ | PROT_WRITE,
299  MAP_PRIVATE | MAP_ANONYMOUS,
300  -1, 0);
301  if (block == MAP_FAILED) rb_bug("transient_heap_block_alloc: err:%d\n", errno);
302 #else
303  if (theap->arena == NULL) {
304  theap->arena = rb_aligned_malloc(TRANSIENT_HEAP_BLOCK_SIZE, TRANSIENT_HEAP_TOTAL_SIZE);
305  if (theap->arena == NULL) {
306  rb_bug("transient_heap_block_alloc: failed\n");
307  }
308  }
309 
310  TH_ASSERT(theap->arena_index < TRANSIENT_HEAP_BLOCK_NUM);
311  block = &theap->arena[theap->arena_index++];
312  TH_ASSERT(((intptr_t)block & (TRANSIENT_HEAP_BLOCK_SIZE - 1)) == 0);
313 #endif
314  reset_block(block);
315 
316  TH_ASSERT(((intptr_t)block->buff & (TRANSIENT_HEAP_ALLOC_ALIGN-1)) == 0);
317  if (0) fprintf(stderr, "transient_heap_block_alloc: %4d %p\n", theap->total_blocks, (void *)block);
318  return block;
319 }
320 
321 
322 static struct transient_heap_block *
323 transient_heap_allocatable_block(struct transient_heap* theap)
324 {
325  struct transient_heap_block *block;
326 
327 #if TRANSIENT_HEAP_DEBUG_INFINITE_BLOCK
328  block = transient_heap_block_alloc(theap);
329  theap->total_blocks++;
330 #else
331  /* get one block from free_blocks */
332  block = theap->free_blocks;
333  if (block) {
334  theap->free_blocks = block->info.next_block;
335  block->info.next_block = NULL;
336  theap->total_blocks++;
337  }
338 #endif
339 
340  return block;
341 }
342 
343 static struct transient_alloc_header *
344 transient_heap_allocatable_header(struct transient_heap* theap, size_t size)
345 {
346  struct transient_heap_block *block = theap->using_blocks;
347 
348  while (block) {
349  TH_ASSERT(block->info.index <= (int16_t)TRANSIENT_HEAP_USABLE_SIZE);
350 
351  if (TRANSIENT_HEAP_USABLE_SIZE - block->info.index >= size) {
352  struct transient_alloc_header *header = (void *)&block->buff[block->info.index];
353  block->info.index += size;
354  block->info.objects++;
355  return header;
356  }
357  else {
358  block = transient_heap_allocatable_block(theap);
359  if (block) connect_to_using_blocks(theap, block);
360  }
361  }
362 
363  return NULL;
364 }
365 
366 void *
367 rb_transient_heap_alloc(VALUE obj, size_t req_size)
368 {
369  // only on single main ractor
370  if (ruby_single_main_ractor == NULL) return NULL;
371 
372  void *ret;
373  struct transient_heap* theap = transient_heap_get();
374  size_t size = ROUND_UP(req_size + sizeof(struct transient_alloc_header), TRANSIENT_HEAP_ALLOC_ALIGN);
375 
376  TH_ASSERT(RB_TYPE_P(obj, T_ARRAY) ||
377  RB_TYPE_P(obj, T_OBJECT) ||
378  RB_TYPE_P(obj, T_STRUCT) ||
379  RB_TYPE_P(obj, T_HASH)); /* supported types */
380 
381  if (size > TRANSIENT_HEAP_ALLOC_MAX) {
382  if (TRANSIENT_HEAP_DEBUG >= 3) fprintf(stderr, "rb_transient_heap_alloc: [too big: %ld] %s\n", (long)size, rb_obj_info(obj));
383  ret = NULL;
384  }
385 #if TRANSIENT_HEAP_DEBUG_DONT_PROMOTE == 0
386  else if (RB_OBJ_PROMOTED_RAW(obj)) {
387  if (TRANSIENT_HEAP_DEBUG >= 3) fprintf(stderr, "rb_transient_heap_alloc: [promoted object] %s\n", rb_obj_info(obj));
388  ret = NULL;
389  }
390 #else
391  else if (RBASIC_CLASS(obj) == 0) {
392  if (TRANSIENT_HEAP_DEBUG >= 3) fprintf(stderr, "rb_transient_heap_alloc: [hidden object] %s\n", rb_obj_info(obj));
393  ret = NULL;
394  }
395 #endif
396  else {
397  struct transient_alloc_header *header = transient_heap_allocatable_header(theap, size);
398  if (header) {
399  void *ptr;
400 
401  /* header is poisoned to prevent buffer overflow, should
402  * unpoison first... */
403  asan_unpoison_memory_region(header, sizeof *header, true);
404 
405  header->size = size;
406  header->magic = TRANSIENT_HEAP_ALLOC_MAGIC;
407  header->next_marked_index = TRANSIENT_HEAP_ALLOC_MARKING_FREE;
408  header->obj = obj; /* TODO: can we eliminate it? */
409 
410  /* header is fixed; shall poison again */
411  asan_poison_memory_region(header, sizeof *header);
412  ptr = header + 1;
413 
414  theap->total_objects++; /* statistics */
415 
416 #if TRANSIENT_HEAP_DEBUG_DONT_PROMOTE
417  if (RB_OBJ_PROMOTED_RAW(obj)) {
418  transient_heap_promote_add(theap, obj);
419  }
420 #endif
421  if (TRANSIENT_HEAP_DEBUG >= 3) fprintf(stderr, "rb_transient_heap_alloc: header:%p ptr:%p size:%d obj:%s\n", (void *)header, ptr, (int)size, rb_obj_info(obj));
422 
423  RB_DEBUG_COUNTER_INC(theap_alloc);
424 
425  /* ptr is set up; OK to unpoison. */
426  asan_unpoison_memory_region(ptr, size - sizeof *header, true);
427  ret = ptr;
428  }
429  else {
430  if (TRANSIENT_HEAP_DEBUG >= 3) fprintf(stderr, "rb_transient_heap_alloc: [no enough space: %ld] %s\n", (long)size, rb_obj_info(obj));
431  RB_DEBUG_COUNTER_INC(theap_alloc_fail);
432  ret = NULL;
433  }
434  }
435 
436  return ret;
437 }
438 
439 void
440 Init_TransientHeap(void)
441 {
442  int i, block_num;
443  struct transient_heap* theap = transient_heap_get();
444 
445 #if TRANSIENT_HEAP_DEBUG_INFINITE_BLOCK
446  block_num = 0;
447 #else
448  TH_ASSERT(TRANSIENT_HEAP_BLOCK_SIZE * TRANSIENT_HEAP_BLOCK_NUM == TRANSIENT_HEAP_TOTAL_SIZE);
449  block_num = TRANSIENT_HEAP_BLOCK_NUM;
450 #endif
451  for (i=0; i<block_num; i++) {
452  connect_to_free_blocks(theap, transient_heap_block_alloc(theap));
453  }
454  theap->using_blocks = transient_heap_allocatable_block(theap);
455 
456  theap->promoted_objects_size = TRANSIENT_HEAP_PROMOTED_DEFAULT_SIZE;
457  theap->promoted_objects_index = 0;
458  /* should not use ALLOC_N to be free from GC */
459  theap->promoted_objects = malloc(sizeof(VALUE) * theap->promoted_objects_size);
460  STATIC_ASSERT(
461  integer_overflow,
462  sizeof(VALUE) <= SIZE_MAX / TRANSIENT_HEAP_PROMOTED_DEFAULT_SIZE);
463  if (theap->promoted_objects == NULL) rb_bug("Init_TransientHeap: malloc failed.");
464 }
465 
466 static struct transient_heap_block *
467 blocks_alloc_header_to_block(struct transient_heap *theap, struct transient_heap_block *blocks, struct transient_alloc_header *header)
468 {
469  struct transient_heap_block *block = blocks;
470 
471  while (block) {
472  if (block->buff <= (char *)header && (char *)header < block->buff + TRANSIENT_HEAP_USABLE_SIZE) {
473  return block;
474  }
475  block = block->info.next_block;
476  }
477 
478  return NULL;
479 }
480 
481 static struct transient_heap_block *
482 alloc_header_to_block_verbose(struct transient_heap *theap, struct transient_alloc_header *header)
483 {
484  struct transient_heap_block *block;
485 
486  if ((block = blocks_alloc_header_to_block(theap, theap->marked_blocks, header)) != NULL) {
487  if (TRANSIENT_HEAP_DEBUG >= 3) fprintf(stderr, "alloc_header_to_block: found in marked_blocks\n");
488  return block;
489  }
490  else if ((block = blocks_alloc_header_to_block(theap, theap->using_blocks, header)) != NULL) {
491  if (TRANSIENT_HEAP_DEBUG >= 3) fprintf(stderr, "alloc_header_to_block: found in using_blocks\n");
492  return block;
493  }
494  else {
495  return NULL;
496  }
497 }
498 
499 static struct transient_alloc_header *
500 ptr_to_alloc_header(const void *ptr)
501 {
502  struct transient_alloc_header *header = (void *)ptr;
503  header -= 1;
504  return header;
505 }
506 
507 static int
508 transient_header_managed_ptr_p(struct transient_heap* theap, const void *ptr)
509 {
510  if (alloc_header_to_block_verbose(theap, ptr_to_alloc_header(ptr))) {
511  return TRUE;
512  }
513  else {
514  return FALSE;
515  }
516 }
517 
518 
519 int
520 rb_transient_heap_managed_ptr_p(const void *ptr)
521 {
522  return transient_header_managed_ptr_p(transient_heap_get(), ptr);
523 }
524 
525 static struct transient_heap_block *
526 alloc_header_to_block(struct transient_heap *theap, struct transient_alloc_header *header)
527 {
528  struct transient_heap_block *block;
529 #if TRANSIENT_HEAP_DEBUG_INFINITE_BLOCK
530  block = alloc_header_to_block_verbose(theap, header);
531  if (block == NULL) {
532  transient_heap_dump(theap);
533  rb_bug("alloc_header_to_block: not found in mark_blocks (%p)\n", header);
534  }
535 #else
536  block = (void *)((intptr_t)header & ~(TRANSIENT_HEAP_BLOCK_SIZE-1));
537  TH_ASSERT(block == alloc_header_to_block_verbose(theap, header));
538 #endif
539  return block;
540 }
541 
542 void
543 rb_transient_heap_mark(VALUE obj, const void *ptr)
544 {
545  ASSERT_vm_locking();
546 
547  struct transient_alloc_header *header = ptr_to_alloc_header(ptr);
548  asan_unpoison_memory_region(header, sizeof *header, false);
549  if (header->magic != TRANSIENT_HEAP_ALLOC_MAGIC) rb_bug("rb_transient_heap_mark: wrong header, %s (%p)", rb_obj_info(obj), ptr);
550  if (TRANSIENT_HEAP_DEBUG >= 3) fprintf(stderr, "rb_transient_heap_mark: %s (%p)\n", rb_obj_info(obj), ptr);
551 
552 #if TRANSIENT_HEAP_CHECK_MODE > 0
553  {
554  struct transient_heap* theap = transient_heap_get();
555  TH_ASSERT(theap->status == transient_heap_marking);
556  TH_ASSERT(transient_header_managed_ptr_p(theap, ptr));
557 
558  if (header->magic != TRANSIENT_HEAP_ALLOC_MAGIC) {
559  transient_heap_dump(theap);
560  rb_bug("rb_transient_heap_mark: magic is broken");
561  }
562  else if (header->obj != obj) {
563  // transient_heap_dump(theap);
564  rb_bug("rb_transient_heap_mark: unmatch (%s is stored, but %s is given)\n",
565  rb_obj_info(header->obj), rb_obj_info(obj));
566  }
567  }
568 #endif
569 
570  if (header->next_marked_index != TRANSIENT_HEAP_ALLOC_MARKING_FREE) {
571  /* already marked */
572  return;
573  }
574  else {
575  struct transient_heap* theap = transient_heap_get();
576  struct transient_heap_block *block = alloc_header_to_block(theap, header);
577  __asan_unpoison_memory_region(&block->info, sizeof block->info);
578  header->next_marked_index = block->info.last_marked_index;
579  block->info.last_marked_index = (int)((char *)header - block->buff);
580  theap->total_marked_objects++;
581 
582  transient_heap_verify(theap);
583  }
584 }
585 
586 ATTRIBUTE_NO_ADDRESS_SAFETY_ANALYSIS(static const void *transient_heap_ptr(VALUE obj, int error));
587 static const void *
588 transient_heap_ptr(VALUE obj, int error)
589 {
590  const void *ptr = NULL;
591 
592  switch (BUILTIN_TYPE(obj)) {
593  case T_ARRAY:
594  if (RARRAY_TRANSIENT_P(obj)) {
595  TH_ASSERT(!FL_TEST_RAW(obj, RARRAY_EMBED_FLAG));
596  ptr = RARRAY(obj)->as.heap.ptr;
597  }
598  break;
599  case T_OBJECT:
600  if (ROBJ_TRANSIENT_P(obj)) {
601  ptr = ROBJECT_IVPTR(obj);
602  }
603  break;
604  case T_STRUCT:
605  if (RSTRUCT_TRANSIENT_P(obj)) {
606  ptr = rb_struct_const_heap_ptr(obj);
607  }
608  break;
609  case T_HASH:
610  if (RHASH_TRANSIENT_P(obj)) {
611  TH_ASSERT(RHASH_AR_TABLE_P(obj));
612  ptr = (VALUE *)(RHASH(obj)->as.ar);
613  }
614  else {
615  ptr = NULL;
616  }
617  break;
618  default:
619  if (error) {
620  rb_bug("transient_heap_ptr: unknown obj %s\n", rb_obj_info(obj));
621  }
622  }
623 
624  return ptr;
625 }
626 
627 static void
628 transient_heap_promote_add(struct transient_heap* theap, VALUE obj)
629 {
630  if (TRANSIENT_HEAP_DEBUG >= 3) fprintf(stderr, "rb_transient_heap_promote: %s\n", rb_obj_info(obj));
631 
632  if (TRANSIENT_HEAP_DEBUG_DONT_PROMOTE) {
633  /* duplicate check */
634  int i;
635  for (i=0; i<theap->promoted_objects_index; i++) {
636  if (theap->promoted_objects[i] == obj) return;
637  }
638  }
639 
640  if (theap->promoted_objects_size <= theap->promoted_objects_index) {
641  theap->promoted_objects_size *= 2;
642  if (TRANSIENT_HEAP_DEBUG >= 1) fprintf(stderr, "rb_transient_heap_promote: expand table to %d\n", theap->promoted_objects_size);
643  if (UNLIKELY((size_t)theap->promoted_objects_size > SIZE_MAX / sizeof(VALUE))) {
644  /* realloc failure due to integer overflow */
645  theap->promoted_objects = NULL;
646  }
647  else {
648  theap->promoted_objects = realloc(theap->promoted_objects, theap->promoted_objects_size * sizeof(VALUE));
649  }
650  if (theap->promoted_objects == NULL) rb_bug("rb_transient_heap_promote: realloc failed");
651  }
652  theap->promoted_objects[theap->promoted_objects_index++] = obj;
653 }
654 
655 void
656 rb_transient_heap_promote(VALUE obj)
657 {
658  ASSERT_vm_locking();
659 
660  if (transient_heap_ptr(obj, FALSE)) {
661  struct transient_heap* theap = transient_heap_get();
662  transient_heap_promote_add(theap, obj);
663  }
664  else {
665  /* ignore */
666  }
667 }
668 
669 static struct transient_alloc_header *
670 alloc_header(struct transient_heap_block* block, int index)
671 {
672  return (void *)&block->buff[index];
673 }
674 
675 static void
676 transient_heap_reset(void)
677 {
678  ASSERT_vm_locking();
679 
680  struct transient_heap* theap = transient_heap_get();
681  struct transient_heap_block* block;
682 
683  if (TRANSIENT_HEAP_DEBUG >= 1) fprintf(stderr, "!! transient_heap_reset\n");
684 
685  block = theap->marked_blocks;
686  while (block) {
687  struct transient_heap_block *next_block = block->info.next_block;
688  theap->total_objects -= block->info.objects;
689 #if TRANSIENT_HEAP_DEBUG_INFINITE_BLOCK
690  if (madvise(block, TRANSIENT_HEAP_BLOCK_SIZE, MADV_DONTNEED) != 0) {
691  rb_bug("madvise err:%d", errno);
692  }
693  if (mprotect(block, TRANSIENT_HEAP_BLOCK_SIZE, PROT_NONE) != 0) {
694  rb_bug("mprotect err:%d", errno);
695  }
696 #else
697  reset_block(block);
698  connect_to_free_blocks(theap, block);
699 #endif
700  theap->total_blocks--;
701  block = next_block;
702  }
703 
704  if (TRANSIENT_HEAP_DEBUG >= 1) fprintf(stderr, "!! transient_heap_reset block_num:%d\n", theap->total_blocks);
705 
706  theap->marked_blocks = NULL;
707  theap->total_marked_objects = 0;
708 }
709 
710 static void
711 transient_heap_block_evacuate(struct transient_heap* theap, struct transient_heap_block* block)
712 {
713  int marked_index = block->info.last_marked_index;
714  block->info.last_marked_index = TRANSIENT_HEAP_ALLOC_MARKING_LAST;
715 
716  while (marked_index >= 0) {
717  struct transient_alloc_header *header = alloc_header(block, marked_index);
718  asan_unpoison_memory_region(header, sizeof *header, true);
719  VALUE obj = header->obj;
720  TH_ASSERT(header->magic == TRANSIENT_HEAP_ALLOC_MAGIC);
721  if (header->magic != TRANSIENT_HEAP_ALLOC_MAGIC) rb_bug("transient_heap_block_evacuate: wrong header %p %s\n", (void *)header, rb_obj_info(obj));
722 
723  if (TRANSIENT_HEAP_DEBUG >= 3) fprintf(stderr, " * transient_heap_block_evacuate %p %s\n", (void *)header, rb_obj_info(obj));
724 
725  if (obj != Qnil) {
726  RB_DEBUG_COUNTER_INC(theap_evacuate);
727 
728  switch (BUILTIN_TYPE(obj)) {
729  case T_ARRAY:
730  rb_ary_transient_heap_evacuate(obj, !TRANSIENT_HEAP_DEBUG_DONT_PROMOTE);
731  break;
732  case T_OBJECT:
733  rb_obj_transient_heap_evacuate(obj, !TRANSIENT_HEAP_DEBUG_DONT_PROMOTE);
734  break;
735  case T_STRUCT:
736  rb_struct_transient_heap_evacuate(obj, !TRANSIENT_HEAP_DEBUG_DONT_PROMOTE);
737  break;
738  case T_HASH:
739  rb_hash_transient_heap_evacuate(obj, !TRANSIENT_HEAP_DEBUG_DONT_PROMOTE);
740  break;
741  default:
742  rb_bug("unsupported: %s\n", rb_obj_info(obj));
743  }
744  header->obj = Qundef; /* for debug */
745  }
746  marked_index = header->next_marked_index;
747  asan_poison_memory_region(header, sizeof *header);
748  }
749 }
750 
751 #if defined(USE_RUBY_DEBUG_LOG) && USE_RUBY_DEBUG_LOG
752 static const char *
753 transient_heap_status_cstr(enum transient_heap_status status)
754 {
755  switch (status) {
756  case transient_heap_none: return "none";
757  case transient_heap_marking: return "marking";
758  case transient_heap_escaping: return "escaping";
759  }
760  UNREACHABLE_RETURN(NULL);
761 }
762 #endif
763 
764 static void
765 transient_heap_update_status(struct transient_heap* theap, enum transient_heap_status status)
766 {
767  RUBY_DEBUG_LOG("%s -> %s",
768  transient_heap_status_cstr(theap->status),
769  transient_heap_status_cstr(status));
770 
771  TH_ASSERT(theap->status != status);
772  theap->status = status;
773 }
774 
775 static void
776 transient_heap_evacuate(void *dmy)
777 {
778  struct transient_heap* theap = transient_heap_get();
779 
780  if (theap->total_marked_objects == 0) return;
781  if (ruby_single_main_ractor == NULL) rb_bug("not single ractor mode");
782  if (theap->status == transient_heap_marking) {
783  if (TRANSIENT_HEAP_DEBUG >= 1) fprintf(stderr, "!! transient_heap_evacuate: skip while transient_heap_marking\n");
784  }
785  else {
786  VALUE gc_disabled = rb_gc_disable_no_rest();
787  {
788  struct transient_heap_block* block;
789 
790  RUBY_DEBUG_LOG("start gc_disabled:%d", RTEST(gc_disabled));
791 
792  if (TRANSIENT_HEAP_DEBUG >= 1) {
793  int i;
794  fprintf(stderr, "!! transient_heap_evacuate start total_blocks:%d\n", theap->total_blocks);
795  if (TRANSIENT_HEAP_DEBUG >= 4) {
796  for (i=0; i<theap->promoted_objects_index; i++) fprintf(stderr, "%4d %s\n", i, rb_obj_info(theap->promoted_objects[i]));
797  }
798  }
799  if (TRANSIENT_HEAP_DEBUG >= 2) transient_heap_dump(theap);
800 
801  TH_ASSERT(theap->status == transient_heap_none);
802  transient_heap_update_status(theap, transient_heap_escaping);
803 
804  /* evacuate from marked blocks */
805  block = theap->marked_blocks;
806  while (block) {
807  transient_heap_block_evacuate(theap, block);
808  block = block->info.next_block;
809  }
810 
811  /* evacuate from using blocks
812  only affect incremental marking */
813  block = theap->using_blocks;
814  while (block) {
815  transient_heap_block_evacuate(theap, block);
816  block = block->info.next_block;
817  }
818 
819  /* all objects in marked_objects are escaped. */
820  transient_heap_reset();
821 
822  if (TRANSIENT_HEAP_DEBUG > 0) {
823  fprintf(stderr, "!! transient_heap_evacuate end total_blocks:%d\n", theap->total_blocks);
824  }
825 
826  transient_heap_verify(theap);
827  transient_heap_update_status(theap, transient_heap_none);
828  }
829  if (gc_disabled != Qtrue) rb_gc_enable();
830  RUBY_DEBUG_LOG("finish");
831  }
832 }
833 
834 void
835 rb_transient_heap_evacuate(void)
836 {
837  transient_heap_evacuate(NULL);
838 }
839 
840 static void
841 clear_marked_index(struct transient_heap_block* block)
842 {
843  int marked_index = block->info.last_marked_index;
844 
845  while (marked_index != TRANSIENT_HEAP_ALLOC_MARKING_LAST) {
846  struct transient_alloc_header *header = alloc_header(block, marked_index);
847  /* header is poisoned to prevent buffer overflow, should
848  * unpoison first... */
849  asan_unpoison_memory_region(header, sizeof *header, false);
850  TH_ASSERT(marked_index != TRANSIENT_HEAP_ALLOC_MARKING_FREE);
851  if (0) fprintf(stderr, "clear_marked_index - block:%p mark_index:%d\n", (void *)block, marked_index);
852 
853  marked_index = header->next_marked_index;
854  header->next_marked_index = TRANSIENT_HEAP_ALLOC_MARKING_FREE;
855  }
856 
857  block->info.last_marked_index = TRANSIENT_HEAP_ALLOC_MARKING_LAST;
858 }
859 
860 static void
861 blocks_clear_marked_index(struct transient_heap_block* block)
862 {
863  while (block) {
864  clear_marked_index(block);
865  block = block->info.next_block;
866  }
867 }
868 
869 static void
870 transient_heap_block_update_refs(struct transient_heap* theap, struct transient_heap_block* block)
871 {
872  int marked_index = block->info.last_marked_index;
873 
874  while (marked_index >= 0) {
875  struct transient_alloc_header *header = alloc_header(block, marked_index);
876 
877  asan_unpoison_memory_region(header, sizeof *header, false);
878 
879  header->obj = rb_gc_location(header->obj);
880 
881  marked_index = header->next_marked_index;
882  asan_poison_memory_region(header, sizeof *header);
883  }
884 }
885 
886 static void
887 transient_heap_blocks_update_refs(struct transient_heap* theap, struct transient_heap_block *block, const char *type_str)
888 {
889  while (block) {
890  transient_heap_block_update_refs(theap, block);
891  block = block->info.next_block;
892  }
893 }
894 
895 void
896 rb_transient_heap_update_references(void)
897 {
898  ASSERT_vm_locking();
899 
900  struct transient_heap* theap = transient_heap_get();
901  int i;
902 
903  transient_heap_blocks_update_refs(theap, theap->using_blocks, "using_blocks");
904  transient_heap_blocks_update_refs(theap, theap->marked_blocks, "marked_blocks");
905 
906  for (i=0; i<theap->promoted_objects_index; i++) {
907  VALUE obj = theap->promoted_objects[i];
908  theap->promoted_objects[i] = rb_gc_location(obj);
909  }
910 }
911 
912 void
913 rb_transient_heap_start_marking(int full_marking)
914 {
915  ASSERT_vm_locking();
916  RUBY_DEBUG_LOG("full?:%d", full_marking);
917 
918  struct transient_heap* theap = transient_heap_get();
919 
920  if (TRANSIENT_HEAP_DEBUG >= 1) fprintf(stderr, "!! rb_transient_heap_start_marking objects:%d blocks:%d promoted:%d full_marking:%d\n",
921  theap->total_objects, theap->total_blocks, theap->promoted_objects_index, full_marking);
922  if (TRANSIENT_HEAP_DEBUG >= 2) transient_heap_dump(theap);
923 
924  blocks_clear_marked_index(theap->marked_blocks);
925  blocks_clear_marked_index(theap->using_blocks);
926 
927  if (theap->using_blocks) {
928  if (theap->using_blocks->info.objects > 0) {
929  append_to_marked_blocks(theap, theap->using_blocks);
930  theap->using_blocks = NULL;
931  }
932  else {
933  append_to_marked_blocks(theap, theap->using_blocks->info.next_block);
934  theap->using_blocks->info.next_block = NULL;
935  }
936  }
937 
938  if (theap->using_blocks == NULL) {
939  theap->using_blocks = transient_heap_allocatable_block(theap);
940  }
941 
942  TH_ASSERT(theap->status == transient_heap_none);
943  transient_heap_update_status(theap, transient_heap_marking);
944  theap->total_marked_objects = 0;
945 
946  if (full_marking) {
947  theap->promoted_objects_index = 0;
948  }
949  else { /* mark promoted objects */
950  int i;
951  for (i=0; i<theap->promoted_objects_index; i++) {
952  VALUE obj = theap->promoted_objects[i];
953  const void *ptr = transient_heap_ptr(obj, TRUE);
954  if (ptr) {
955  rb_transient_heap_mark(obj, ptr);
956  }
957  }
958  }
959 
960  transient_heap_verify(theap);
961 }
962 
963 void
964 rb_transient_heap_finish_marking(void)
965 {
966  ASSERT_vm_locking();
967  RUBY_DEBUG_LOG("");
968 
969  struct transient_heap* theap = transient_heap_get();
970 
971  RUBY_DEBUG_LOG("objects:%d, marked:%d",
972  theap->total_objects,
973  theap->total_marked_objects);
974  if (TRANSIENT_HEAP_DEBUG >= 2) transient_heap_dump(theap);
975 
976  TH_ASSERT(theap->total_objects >= theap->total_marked_objects);
977 
978  TH_ASSERT(theap->status == transient_heap_marking);
979  transient_heap_update_status(theap, transient_heap_none);
980 
981  if (theap->total_marked_objects > 0) {
982  if (TRANSIENT_HEAP_DEBUG >= 1) fprintf(stderr, "-> rb_transient_heap_finish_marking register escape func.\n");
983  rb_postponed_job_register_one(0, transient_heap_evacuate, NULL);
984  }
985  else {
986  transient_heap_reset();
987  }
988 
989  transient_heap_verify(theap);
990 }
991 #endif /* USE_TRANSIENT_HEAP */
int rb_postponed_job_register_one(unsigned int flags, rb_postponed_job_func_t func, void *data)
Identical to rb_postponed_job_register_one(), except it additionally checks for duplicated registrati...
Definition: vm_trace.c:1653
#define Qundef
Old name of RUBY_Qundef.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
Definition: value_type.h:79
#define UNREACHABLE_RETURN
Old name of RBIMPL_UNREACHABLE_RETURN.
Definition: assume.h:31
#define T_HASH
Old name of RUBY_T_HASH.
Definition: value_type.h:65
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
Definition: fl_type.h:140
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
Definition: value_type.h:56
#define T_OBJECT
Old name of RUBY_T_OBJECT.
Definition: value_type.h:75
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
Definition: value_type.h:85
void rb_bug(const char *fmt,...)
Interpreter panic switch.
Definition: error.c:802
VALUE rb_gc_enable(void)
(Re-) enables GC.
Definition: gc.c:10687
VALUE rb_gc_location(VALUE obj)
Finds a new "location" of an object.
Definition: gc.c:9754
#define RARRAY(obj)
Convenient casting macro.
Definition: rarray.h:56
static bool RARRAY_TRANSIENT_P(VALUE ary)
Queries if the array is a transient array.
Definition: rarray.h:345
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
Definition: rbasic.h:152
static bool RB_OBJ_PROMOTED_RAW(VALUE obj)
This is the implementation of RB_OBJ_PROMOTED().
Definition: rgengc.h:323
static VALUE * ROBJECT_IVPTR(VALUE obj)
Queries the instance variables.
Definition: robject.h:171
#define RTEST
This is an old name of RB_TEST.
uintptr_t VALUE
Type that represents a Ruby object.
Definition: value.h:40
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.
Definition: value_type.h:375