@@ -34,53 +34,70 @@ enum rb_gc_vm_weak_tables {
34
34
RB_GC_VM_WEAK_TABLE_COUNT
35
35
};
36
36
37
+ #if USE_MODULAR_GC
38
+ # define MODULAR_GC_FN
39
+ #else
40
+ // This takes advantage of internal linkage winning when appearing first.
41
+ // See C99 6.2.2p4.
42
+ # define MODULAR_GC_FN static
43
+ #endif
44
+
45
+ #if USE_MODULAR_GC
37
46
RUBY_SYMBOL_EXPORT_BEGIN
38
- unsigned int rb_gc_vm_lock (void );
39
- void rb_gc_vm_unlock (unsigned int lev );
40
- unsigned int rb_gc_cr_lock (void );
41
- void rb_gc_cr_unlock (unsigned int lev );
42
- unsigned int rb_gc_vm_lock_no_barrier (void );
43
- void rb_gc_vm_unlock_no_barrier (unsigned int lev );
44
- void rb_gc_vm_barrier (void );
45
- size_t rb_gc_obj_optimal_size (VALUE obj );
46
- void rb_gc_mark_children (void * objspace , VALUE obj );
47
- void rb_gc_vm_weak_table_foreach (vm_table_foreach_callback_func callback , vm_table_update_callback_func update_callback , void * data , bool weak_only , enum rb_gc_vm_weak_tables table );
48
- void rb_gc_update_object_references (void * objspace , VALUE obj );
49
- void rb_gc_update_vm_references (void * objspace );
50
- void rb_gc_event_hook (VALUE obj , rb_event_flag_t event );
51
- void * rb_gc_get_objspace (void );
47
+ #endif
48
+
49
+ // These functions cannot be defined as static because they are used by other
50
+ // files in Ruby.
52
51
size_t rb_size_mul_or_raise (size_t x , size_t y , VALUE exc );
53
- void rb_gc_run_obj_finalizer (VALUE objid , long count , VALUE (* callback )(long i , void * data ), void * data );
54
- void rb_gc_set_pending_interrupt (void );
55
- void rb_gc_unset_pending_interrupt (void );
56
- void rb_gc_obj_free_vm_weak_references (VALUE obj );
57
- bool rb_gc_obj_free (void * objspace , VALUE obj );
58
- void rb_gc_save_machine_context (void );
59
- void rb_gc_mark_roots (void * objspace , const char * * categoryp );
60
- void rb_gc_ractor_newobj_cache_foreach (void (* func )(void * cache , void * data ), void * data );
61
- bool rb_gc_multi_ractor_p (void );
62
- void rb_objspace_reachable_objects_from_root (void (func )(const char * category , VALUE , void * ), void * passing_data );
63
52
void rb_objspace_reachable_objects_from (VALUE obj , void (func )(VALUE , void * ), void * data );
64
53
void rb_obj_info_dump (VALUE obj );
65
54
const char * rb_obj_info (VALUE obj );
66
- bool rb_gc_shutdown_call_finalizer_p (VALUE obj );
67
- uint32_t rb_gc_get_shape (VALUE obj );
68
- void rb_gc_set_shape (VALUE obj , uint32_t shape_id );
69
- uint32_t rb_gc_rebuild_shape (VALUE obj , size_t heap_id );
70
55
size_t rb_obj_memsize_of (VALUE obj );
71
- void rb_gc_prepare_heap_process_object (VALUE obj );
72
56
bool ruby_free_at_exit_p (void );
73
- bool rb_memerror_reentered (void );
74
- bool rb_obj_id_p (VALUE );
57
+ void rb_objspace_reachable_objects_from_root (void (func )(const char * category , VALUE , void * ), void * passing_data );
58
+
59
+ MODULAR_GC_FN unsigned int rb_gc_vm_lock (void );
60
+ MODULAR_GC_FN void rb_gc_vm_unlock (unsigned int lev );
61
+ MODULAR_GC_FN unsigned int rb_gc_cr_lock (void );
62
+ MODULAR_GC_FN void rb_gc_cr_unlock (unsigned int lev );
63
+ MODULAR_GC_FN unsigned int rb_gc_vm_lock_no_barrier (void );
64
+ MODULAR_GC_FN void rb_gc_vm_unlock_no_barrier (unsigned int lev );
65
+ MODULAR_GC_FN void rb_gc_vm_barrier (void );
66
+ MODULAR_GC_FN size_t rb_gc_obj_optimal_size (VALUE obj );
67
+ MODULAR_GC_FN void rb_gc_mark_children (void * objspace , VALUE obj );
68
+ MODULAR_GC_FN void rb_gc_vm_weak_table_foreach (vm_table_foreach_callback_func callback , vm_table_update_callback_func update_callback , void * data , bool weak_only , enum rb_gc_vm_weak_tables table );
69
+ MODULAR_GC_FN void rb_gc_update_object_references (void * objspace , VALUE obj );
70
+ MODULAR_GC_FN void rb_gc_update_vm_references (void * objspace );
71
+ MODULAR_GC_FN void rb_gc_event_hook (VALUE obj , rb_event_flag_t event );
72
+ MODULAR_GC_FN void * rb_gc_get_objspace (void );
73
+ MODULAR_GC_FN void rb_gc_run_obj_finalizer (VALUE objid , long count , VALUE (* callback )(long i , void * data ), void * data );
74
+ MODULAR_GC_FN void rb_gc_set_pending_interrupt (void );
75
+ MODULAR_GC_FN void rb_gc_unset_pending_interrupt (void );
76
+ MODULAR_GC_FN void rb_gc_obj_free_vm_weak_references (VALUE obj );
77
+ MODULAR_GC_FN bool rb_gc_obj_free (void * objspace , VALUE obj );
78
+ MODULAR_GC_FN void rb_gc_save_machine_context (void );
79
+ MODULAR_GC_FN void rb_gc_mark_roots (void * objspace , const char * * categoryp );
80
+ MODULAR_GC_FN void rb_gc_ractor_newobj_cache_foreach (void (* func )(void * cache , void * data ), void * data );
81
+ MODULAR_GC_FN bool rb_gc_multi_ractor_p (void );
82
+ MODULAR_GC_FN bool rb_gc_shutdown_call_finalizer_p (VALUE obj );
83
+ MODULAR_GC_FN uint32_t rb_gc_get_shape (VALUE obj );
84
+ MODULAR_GC_FN void rb_gc_set_shape (VALUE obj , uint32_t shape_id );
85
+ MODULAR_GC_FN uint32_t rb_gc_rebuild_shape (VALUE obj , size_t heap_id );
86
+ MODULAR_GC_FN void rb_gc_prepare_heap_process_object (VALUE obj );
87
+ MODULAR_GC_FN bool rb_memerror_reentered (void );
88
+ MODULAR_GC_FN bool rb_obj_id_p (VALUE );
75
89
76
90
#if USE_MODULAR_GC
77
- bool rb_gc_event_hook_required_p (rb_event_flag_t event );
78
- void * rb_gc_get_ractor_newobj_cache (void );
79
- void rb_gc_initialize_vm_context (struct rb_gc_vm_context * context );
80
- void rb_gc_worker_thread_set_vm_context (struct rb_gc_vm_context * context );
81
- void rb_gc_worker_thread_unset_vm_context (struct rb_gc_vm_context * context );
91
+ MODULAR_GC_FN bool rb_gc_event_hook_required_p (rb_event_flag_t event );
92
+ MODULAR_GC_FN void * rb_gc_get_ractor_newobj_cache (void );
93
+ MODULAR_GC_FN void rb_gc_initialize_vm_context (struct rb_gc_vm_context * context );
94
+ MODULAR_GC_FN void rb_gc_worker_thread_set_vm_context (struct rb_gc_vm_context * context );
95
+ MODULAR_GC_FN void rb_gc_worker_thread_unset_vm_context (struct rb_gc_vm_context * context );
82
96
#endif
97
+
98
+ #if USE_MODULAR_GC
83
99
RUBY_SYMBOL_EXPORT_END
100
+ #endif
84
101
85
102
void rb_ractor_finish_marking (void );
86
103
0 commit comments