@@ -863,60 +863,40 @@ static void jl_insert_into_serialization_queue(jl_serializer_state *s, jl_value_
863
863
}
864
864
goto done_fields ; // for now
865
865
}
866
- if (jl_is_method_instance (v )) {
866
+ if (s -> incremental && jl_is_method_instance (v )) {
867
867
jl_method_instance_t * mi = (jl_method_instance_t * )v ;
868
- if (s -> incremental ) {
869
- jl_value_t * def = mi -> def .value ;
870
- if (needs_uniquing (v , s -> query_cache )) {
871
- // we only need 3 specific fields of this (the rest are not used)
872
- jl_queue_for_serialization (s , mi -> def .value );
873
- jl_queue_for_serialization (s , mi -> specTypes );
874
- jl_queue_for_serialization (s , (jl_value_t * )mi -> sparam_vals );
875
- goto done_fields ;
876
- }
877
- else if (jl_is_method (def ) && jl_object_in_image (def )) {
878
- // we only need 3 specific fields of this (the rest are restored afterward, if valid)
879
- // in particular, cache is repopulated by jl_mi_cache_insert for all foreign function,
880
- // so must not be present here
881
- record_field_change ((jl_value_t * * )& mi -> backedges , NULL );
882
- record_field_change ((jl_value_t * * )& mi -> cache , NULL );
883
- }
884
- else {
885
- assert (!needs_recaching (v , s -> query_cache ));
886
- }
887
- // n.b. opaque closures cannot be inspected and relied upon like a
888
- // normal method since they can get improperly introduced by generated
889
- // functions, so if they appeared at all, we will probably serialize
890
- // them wrong and segfault. The jl_code_for_staged function should
891
- // prevent this from happening, so we do not need to detect that user
892
- // error now.
868
+ jl_value_t * def = mi -> def .value ;
869
+ if (needs_uniquing (v , s -> query_cache )) {
870
+ // we only need 3 specific fields of this (the rest are not used)
871
+ jl_queue_for_serialization (s , mi -> def .value );
872
+ jl_queue_for_serialization (s , mi -> specTypes );
873
+ jl_queue_for_serialization (s , (jl_value_t * )mi -> sparam_vals );
874
+ goto done_fields ;
893
875
}
894
- // don't recurse into all backedges memory (yet)
895
- jl_value_t * backedges = get_replaceable_field ((jl_value_t * * )& mi -> backedges , 1 );
896
- if (backedges ) {
897
- jl_queue_for_serialization_ (s , (jl_value_t * )((jl_array_t * )backedges )-> ref .mem , 0 , 1 );
898
- size_t i = 0 , n = jl_array_nrows (backedges );
899
- while (i < n ) {
900
- jl_value_t * invokeTypes ;
901
- jl_code_instance_t * caller ;
902
- i = get_next_edge ((jl_array_t * )backedges , i , & invokeTypes , & caller );
903
- if (invokeTypes )
904
- jl_queue_for_serialization (s , invokeTypes );
905
- }
876
+ else if (jl_is_method (def ) && jl_object_in_image (def )) {
877
+ // we only need 3 specific fields of this (the rest are restored afterward, if valid)
878
+ // in particular, cache is repopulated by jl_mi_cache_insert for all foreign function,
879
+ // so must not be present here
880
+ record_field_change ((jl_value_t * * )& mi -> backedges , NULL );
881
+ record_field_change ((jl_value_t * * )& mi -> cache , NULL );
906
882
}
907
- }
908
- if (jl_is_binding (v )) {
909
- jl_binding_t * b = (jl_binding_t * )v ;
910
- if (s -> incremental && needs_uniquing (v , s -> query_cache )) {
883
+ else {
884
+ assert (!needs_recaching (v , s -> query_cache ));
885
+ }
886
+ // n.b. opaque closures cannot be inspected and relied upon like a
887
+ // normal method since they can get improperly introduced by generated
888
+ // functions, so if they appeared at all, we will probably serialize
889
+ // them wrong and segfault. The jl_code_for_staged function should
890
+ // prevent this from happening, so we do not need to detect that user
891
+ // error now.
892
+ }
893
+ if (s -> incremental && jl_is_binding (v )) {
894
+ if (needs_uniquing (v , s -> query_cache )) {
895
+ jl_binding_t * b = (jl_binding_t * )v ;
911
896
jl_queue_for_serialization (s , b -> globalref -> mod );
912
897
jl_queue_for_serialization (s , b -> globalref -> name );
913
898
goto done_fields ;
914
899
}
915
- // don't recurse into backedges memory (yet)
916
- jl_value_t * backedges = get_replaceable_field ((jl_value_t * * )& b -> backedges , 1 );
917
- if (backedges ) {
918
- jl_queue_for_serialization_ (s , (jl_value_t * )((jl_array_t * )backedges )-> ref .mem , 0 , 1 );
919
- }
920
900
}
921
901
if (s -> incremental && jl_is_globalref (v )) {
922
902
jl_globalref_t * gr = (jl_globalref_t * )v ;
@@ -934,20 +914,18 @@ static void jl_insert_into_serialization_queue(jl_serializer_state *s, jl_value_
934
914
assert (!jl_object_in_image ((jl_value_t * )tn -> wrapper ));
935
915
}
936
916
}
937
- if (jl_is_code_instance (v )) {
917
+ if (s -> incremental && jl_is_code_instance (v )) {
938
918
jl_code_instance_t * ci = (jl_code_instance_t * )v ;
939
919
jl_method_instance_t * mi = jl_get_ci_mi (ci );
940
- if (s -> incremental ) {
941
- // make sure we don't serialize other reachable cache entries of foreign methods
942
- // Should this now be:
943
- // if (ci !in ci->defs->cache)
944
- // record_field_change((jl_value_t**)&ci->next, NULL);
945
- // Why are we checking that the method/module this originates from is in_image?
946
- // and then disconnect this CI?
947
- if (jl_object_in_image ((jl_value_t * )mi -> def .value )) {
948
- // TODO: if (ci in ci->defs->cache)
949
- record_field_change ((jl_value_t * * )& ci -> next , NULL );
950
- }
920
+ // make sure we don't serialize other reachable cache entries of foreign methods
921
+ // Should this now be:
922
+ // if (ci !in ci->defs->cache)
923
+ // record_field_change((jl_value_t**)&ci->next, NULL);
924
+ // Why are we checking that the method/module this originates from is in_image?
925
+ // and then disconnect this CI?
926
+ if (jl_object_in_image ((jl_value_t * )mi -> def .value )) {
927
+ // TODO: if (ci in ci->defs->cache)
928
+ record_field_change ((jl_value_t * * )& ci -> next , NULL );
951
929
}
952
930
jl_value_t * inferred = jl_atomic_load_relaxed (& ci -> inferred );
953
931
if (inferred && inferred != jl_nothing ) { // disregard if there is nothing here to delete (e.g. builtins, unspecialized)
@@ -975,7 +953,7 @@ static void jl_insert_into_serialization_queue(jl_serializer_state *s, jl_value_
975
953
if (inferred == jl_nothing ) {
976
954
record_field_change ((jl_value_t * * )& ci -> inferred , jl_nothing );
977
955
}
978
- else if (s -> incremental && jl_is_string (inferred )) {
956
+ else if (jl_is_string (inferred )) {
979
957
// New roots for external methods
980
958
if (jl_object_in_image ((jl_value_t * )def )) {
981
959
void * * pfound = ptrhash_bp (& s -> method_roots_index , def );
@@ -2594,35 +2572,6 @@ static void jl_prune_type_cache_linear(jl_svec_t *cache)
2594
2572
jl_svecset (cache , ins ++ , jl_nothing );
2595
2573
}
2596
2574
2597
- static void jl_prune_mi_backedges (jl_array_t * backedges )
2598
- {
2599
- if (backedges == NULL )
2600
- return ;
2601
- size_t i = 0 , ins = 0 , n = jl_array_nrows (backedges );
2602
- while (i < n ) {
2603
- jl_value_t * invokeTypes ;
2604
- jl_code_instance_t * caller ;
2605
- i = get_next_edge (backedges , i , & invokeTypes , & caller );
2606
- if (ptrhash_get (& serialization_order , caller ) != HT_NOTFOUND )
2607
- ins = set_next_edge (backedges , ins , invokeTypes , caller );
2608
- }
2609
- jl_array_del_end (backedges , n - ins );
2610
- }
2611
-
2612
- static void jl_prune_binding_backedges (jl_array_t * backedges )
2613
- {
2614
- if (backedges == NULL )
2615
- return ;
2616
- size_t i = 0 , ins = 0 , n = jl_array_nrows (backedges );
2617
- for (i = 0 ; i < n ; i ++ ) {
2618
- jl_value_t * b = jl_array_ptr_ref (backedges , i );
2619
- if (ptrhash_get (& serialization_order , b ) != HT_NOTFOUND )
2620
- jl_array_ptr_set (backedges , ins , b );
2621
- }
2622
- jl_array_del_end (backedges , n - ins );
2623
- }
2624
-
2625
-
2626
2575
uint_t bindingkey_hash (size_t idx , jl_value_t * data );
2627
2576
2628
2577
static void jl_prune_module_bindings (jl_module_t * m ) JL_GC_DISABLED
@@ -3196,11 +3145,12 @@ static void jl_save_system_image_to_stream(ios_t *f, jl_array_t *mod_array,
3196
3145
jl_queue_for_serialization (& s , global_roots_keyset );
3197
3146
jl_serialize_reachable (& s );
3198
3147
}
3199
- // step 1.5: prune (garbage collect) some special weak references known caches
3148
+ // step 1.5: prune (garbage collect) some special weak references from
3149
+ // built-in type caches too
3200
3150
for (i = 0 ; i < serialization_queue .len ; i ++ ) {
3201
3151
jl_value_t * v = (jl_value_t * )serialization_queue .items [i ];
3202
3152
if (jl_options .trim ) {
3203
- if (jl_is_method (v )) {
3153
+ if (jl_is_method (v )){
3204
3154
jl_method_t * m = (jl_method_t * )v ;
3205
3155
jl_value_t * specializations_ = jl_atomic_load_relaxed (& m -> specializations );
3206
3156
if (!jl_is_svec (specializations_ ))
@@ -3228,16 +3178,6 @@ static void jl_save_system_image_to_stream(ios_t *f, jl_array_t *mod_array,
3228
3178
jl_gc_wb (tn , jl_atomic_load_relaxed (& tn -> cache ));
3229
3179
jl_prune_type_cache_linear (jl_atomic_load_relaxed (& tn -> linearcache ));
3230
3180
}
3231
- else if (jl_is_method_instance (v )) {
3232
- jl_method_instance_t * mi = (jl_method_instance_t * )v ;
3233
- jl_value_t * backedges = get_replaceable_field ((jl_value_t * * )& mi -> backedges , 1 );
3234
- jl_prune_mi_backedges ((jl_array_t * )backedges );
3235
- }
3236
- else if (jl_is_binding (v )) {
3237
- jl_binding_t * b = (jl_binding_t * )v ;
3238
- jl_value_t * backedges = get_replaceable_field ((jl_value_t * * )& b -> backedges , 1 );
3239
- jl_prune_binding_backedges ((jl_array_t * )backedges );
3240
- }
3241
3181
}
3242
3182
}
3243
3183
0 commit comments