@@ -433,6 +433,7 @@ void *native_functions; // opaque jl_native_code_desc_t blob used for fetching
433
433
434
434
// table of struct field addresses to rewrite during saving
435
435
static htable_t field_replace ;
436
+ static htable_t relocatable_ext_cis ;
436
437
437
438
// array of definitions for the predefined function pointers
438
439
// (reverse of fptr_to_id)
@@ -656,7 +657,8 @@ static int needs_uniquing(jl_value_t *v) JL_NOTSAFEPOINT
656
657
657
658
static void record_field_change (jl_value_t * * addr , jl_value_t * newval ) JL_NOTSAFEPOINT
658
659
{
659
- ptrhash_put (& field_replace , (void * )addr , newval );
660
+ if (* addr != newval )
661
+ ptrhash_put (& field_replace , (void * )addr , newval );
660
662
}
661
663
662
664
static jl_value_t * get_replaceable_field (jl_value_t * * addr , int mutabl ) JL_GC_DISABLED
@@ -797,6 +799,8 @@ static void jl_insert_into_serialization_queue(jl_serializer_state *s, jl_value_
797
799
// TODO: if (ci in ci->defs->cache)
798
800
record_field_change ((jl_value_t * * )& ci -> next , NULL );
799
801
}
802
+ if (jl_atomic_load_relaxed (& ci -> inferred ) && !is_relocatable_ci (& relocatable_ext_cis , ci ))
803
+ record_field_change ((jl_value_t * * )& ci -> inferred , jl_nothing );
800
804
}
801
805
802
806
if (immediate ) // must be things that can be recursively handled, and valid as type parameters
@@ -1505,6 +1509,7 @@ static void jl_write_values(jl_serializer_state *s) JL_GC_DISABLED
1505
1509
// will check on deserialize if this cache entry is still valid
1506
1510
}
1507
1511
}
1512
+ newm -> relocatability = 0 ;
1508
1513
}
1509
1514
1510
1515
newm -> invoke = NULL ;
@@ -2384,7 +2389,7 @@ static void jl_prepare_serialization_data(jl_array_t *mod_array, jl_array_t *new
2384
2389
* edges = jl_alloc_vec_any (0 );
2385
2390
* method_roots_list = jl_alloc_vec_any (0 );
2386
2391
// Collect the new method roots
2387
- jl_collect_new_roots (* method_roots_list , * new_specializations , worklist_key );
2392
+ jl_collect_new_roots (& relocatable_ext_cis , * method_roots_list , * new_specializations , worklist_key );
2388
2393
jl_collect_edges (* edges , * ext_targets , * new_specializations , world );
2389
2394
}
2390
2395
assert (edges_map == NULL ); // jl_collect_edges clears this when done
@@ -2770,6 +2775,7 @@ JL_DLLEXPORT void jl_create_system_image(void **_native_data, jl_array_t *workli
2770
2775
assert ((ct -> reentrant_timing & 0b1110 ) == 0 );
2771
2776
ct -> reentrant_timing |= 0b1000 ;
2772
2777
if (worklist ) {
2778
+ htable_new (& relocatable_ext_cis , 0 );
2773
2779
jl_prepare_serialization_data (mod_array , newly_inferred , jl_worklist_key (worklist ),
2774
2780
& extext_methods , & new_specializations , & method_roots_list , & ext_targets , & edges );
2775
2781
if (!emit_split ) {
@@ -2786,6 +2792,8 @@ JL_DLLEXPORT void jl_create_system_image(void **_native_data, jl_array_t *workli
2786
2792
jl_save_system_image_to_stream (ff , mod_array , worklist , extext_methods , new_specializations , method_roots_list , ext_targets , edges );
2787
2793
if (_native_data != NULL )
2788
2794
native_functions = NULL ;
2795
+ if (worklist )
2796
+ htable_free (& relocatable_ext_cis );
2789
2797
// make sure we don't run any Julia code concurrently before this point
2790
2798
// Re-enable running julia code for postoutput hooks, atexit, etc.
2791
2799
jl_gc_enable_finalizers (ct , 1 );
0 commit comments