@@ -1197,68 +1197,29 @@ static jl_method_instance_t *jl_mt_assoc_by_type(jl_methtable_t *mt, jl_datatype
1197
1197
return nf ;
1198
1198
}
1199
1199
1200
- struct shadowed_matches_env {
1200
+
1201
+ struct matches_env {
1201
1202
struct typemap_intersection_env match ;
1202
1203
jl_typemap_entry_t * newentry ;
1203
1204
jl_value_t * shadowed ;
1204
1205
};
1205
- static int check_shadowed_visitor (jl_typemap_entry_t * oldentry , struct typemap_intersection_env * closure0 )
1206
+ static int get_intersect_visitor (jl_typemap_entry_t * oldentry , struct typemap_intersection_env * closure0 )
1206
1207
{
1207
- struct shadowed_matches_env * closure = container_of (closure0 , struct shadowed_matches_env , match );
1208
+ struct matches_env * closure = container_of (closure0 , struct matches_env , match );
1208
1209
if (oldentry == closure -> newentry )
1209
1210
return 1 ;
1210
1211
if (oldentry -> max_world < ~(size_t )0 || oldentry -> min_world == closure -> newentry -> min_world )
1211
1212
// skip if no world has both active
1212
1213
// also be careful not to try to scan something from the current dump-reload though
1213
1214
return 1 ;
1214
1215
jl_method_t * oldmethod = oldentry -> func .method ;
1215
- if (oldmethod -> specializations == jl_emptysvec )
1216
- // nothing inferred ever before means nothing shadowed ever before
1217
- return 1 ;
1218
-
1219
- jl_tupletype_t * type = closure -> newentry -> sig ;
1220
- jl_tupletype_t * sig = oldentry -> sig ;
1221
-
1222
- int shadowed = 0 ;
1223
- if (closure -> match .issubty ) { // (new)type <: (old)sig
1224
- // new entry is more specific
1225
- shadowed = 1 ;
1226
- }
1227
- else if (jl_subtype ((jl_value_t * )sig , (jl_value_t * )type )) {
1228
- // old entry is more specific
1229
- }
1230
- else if (jl_type_morespecific_no_subtype ((jl_value_t * )type , (jl_value_t * )sig )) {
1231
- // new entry is more specific
1232
- shadowed = 1 ;
1233
- }
1234
- else if (jl_type_morespecific_no_subtype ((jl_value_t * )sig , (jl_value_t * )type )) {
1235
- // old entry is more specific
1236
- }
1237
- else {
1238
- // sort order is ambiguous
1239
- shadowed = 1 ;
1240
- }
1241
-
1242
- // ok: record that this method definition is being partially replaced
1243
- // (either with a real definition, or an ambiguity error)
1244
- if (shadowed ) {
1245
- if (closure -> shadowed == NULL ) {
1246
- closure -> shadowed = (jl_value_t * )oldmethod ;
1247
- }
1248
- else if (!jl_is_array (closure -> shadowed )) {
1249
- jl_array_t * list = jl_alloc_vec_any (2 );
1250
- jl_array_ptr_set (list , 0 , closure -> shadowed );
1251
- jl_array_ptr_set (list , 1 , (jl_value_t * )oldmethod );
1252
- closure -> shadowed = (jl_value_t * )list ;
1253
- }
1254
- else {
1255
- jl_array_ptr_1d_push ((jl_array_t * )closure -> shadowed , (jl_value_t * )oldmethod );
1256
- }
1257
- }
1216
+ if (closure -> shadowed == NULL )
1217
+ closure -> shadowed = (jl_value_t * )jl_alloc_vec_any (0 );
1218
+ jl_array_ptr_1d_push ((jl_array_t * )closure -> shadowed , (jl_value_t * )oldmethod );
1258
1219
return 1 ;
1259
1220
}
1260
1221
1261
- static jl_value_t * check_shadowed_matches (jl_typemap_t * defs , jl_typemap_entry_t * newentry )
1222
+ static jl_value_t * get_intersect_matches (jl_typemap_t * defs , jl_typemap_entry_t * newentry )
1262
1223
{
1263
1224
jl_tupletype_t * type = newentry -> sig ;
1264
1225
jl_tupletype_t * ttypes = (jl_tupletype_t * )jl_unwrap_unionall ((jl_value_t * )type );
@@ -1271,7 +1232,7 @@ static jl_value_t *check_shadowed_matches(jl_typemap_t *defs, jl_typemap_entry_t
1271
1232
else
1272
1233
va = NULL ;
1273
1234
}
1274
- struct shadowed_matches_env env = {{check_shadowed_visitor , (jl_value_t * )type , va }};
1235
+ struct matches_env env = {{get_intersect_visitor , (jl_value_t * )type , va }};
1275
1236
env .match .ti = NULL ;
1276
1237
env .match .env = jl_emptysvec ;
1277
1238
env .newentry = newentry ;
@@ -1608,8 +1569,9 @@ JL_DLLEXPORT void jl_method_table_insert(jl_methtable_t *mt, jl_method_t *method
1608
1569
size_t max_world = method -> primary_world - 1 ;
1609
1570
int invalidated = 0 ;
1610
1571
jl_value_t * loctag = NULL ; // debug info for invalidation
1572
+ jl_value_t * isect = NULL ;
1611
1573
jl_typemap_entry_t * newentry = NULL ;
1612
- JL_GC_PUSH3 (& oldvalue , & newentry , & loctag );
1574
+ JL_GC_PUSH4 (& oldvalue , & newentry , & loctag , & isect );
1613
1575
JL_LOCK (& mt -> writelock );
1614
1576
// first delete the existing entry (we'll disable it later)
1615
1577
struct jl_typemap_assoc search = {(jl_value_t * )type , method -> primary_world , NULL , 0 , ~(size_t )0 };
@@ -1622,19 +1584,57 @@ JL_DLLEXPORT void jl_method_table_insert(jl_methtable_t *mt, jl_method_t *method
1622
1584
newentry = jl_typemap_alloc ((jl_tupletype_t * )type , simpletype , jl_emptysvec ,
1623
1585
(jl_value_t * )method , method -> primary_world , method -> deleted_world );
1624
1586
jl_typemap_insert (& mt -> defs , (jl_value_t * )mt , newentry , 0 , & method_defs );
1587
+ int any_to_drop = 0 ;
1625
1588
if (oldentry ) {
1589
+ jl_method_t * m = oldentry -> func .method ;
1590
+ method_overwrite (newentry , m );
1591
+ jl_svec_t * specializations = jl_atomic_load_acquire (& m -> specializations );
1592
+ jl_method_instance_t * * data = (jl_method_instance_t * * )jl_svec_data (specializations );
1593
+ size_t i , l = jl_svec_len (specializations );
1594
+ for (i = 0 ; i < l ; i ++ ) {
1595
+ jl_method_instance_t * mi = jl_atomic_load_relaxed (& data [i ]);
1596
+ if (mi == NULL )
1597
+ continue ;
1598
+ if (mi -> backedges )
1599
+ invalidate_backedges (mi , max_world , "jl_method_table_insert" );
1600
+ }
1601
+ any_to_drop = l > 0 ;
1626
1602
oldvalue = oldentry -> func .value ;
1627
- method_overwrite (newentry , (jl_method_t * )oldvalue );
1628
1603
}
1629
1604
else {
1605
+ oldvalue = get_intersect_matches (mt -> defs , newentry );
1606
+
1607
+ jl_method_t * * d ;
1608
+ size_t j , n ;
1609
+ if (oldvalue == NULL ) {
1610
+ d = NULL ;
1611
+ n = 0 ;
1612
+ }
1613
+ else {
1614
+ assert (jl_is_array (oldvalue ));
1615
+ d = (jl_method_t * * )jl_array_ptr_data (oldvalue );
1616
+ n = jl_array_len (oldvalue );
1617
+ }
1630
1618
if (mt -> backedges ) {
1631
1619
jl_value_t * * backedges = jl_array_ptr_data (mt -> backedges );
1632
1620
size_t i , na = jl_array_len (mt -> backedges );
1633
1621
size_t ins = 0 ;
1634
1622
for (i = 1 ; i < na ; i += 2 ) {
1635
1623
jl_value_t * backedgetyp = backedges [i - 1 ];
1636
- if (!jl_has_empty_intersection (backedgetyp , (jl_value_t * )type )) {
1637
- // TODO: only delete if the ml_matches list (with intersection=0, include_ambiguous=1) is empty
1624
+ isect = jl_type_intersection (backedgetyp , (jl_value_t * )type );
1625
+ if (isect != jl_bottom_type ) {
1626
+ // see if the intersection was actually already fully
1627
+ // covered by anything (method or ambiguity is okay)
1628
+ size_t j ;
1629
+ for (j = 0 ; j < n ; j ++ ) {
1630
+ jl_method_t * m = d [j ];
1631
+ if (jl_subtype (isect , m -> sig ))
1632
+ break ;
1633
+ }
1634
+ if (j != n )
1635
+ isect = jl_bottom_type ;
1636
+ }
1637
+ if (isect != jl_bottom_type ) {
1638
1638
jl_method_instance_t * backedge = (jl_method_instance_t * )backedges [i ];
1639
1639
invalidate_method_instance (backedge , max_world , 0 );
1640
1640
invalidated = 1 ;
@@ -1651,10 +1651,67 @@ JL_DLLEXPORT void jl_method_table_insert(jl_methtable_t *mt, jl_method_t *method
1651
1651
else
1652
1652
jl_array_del_end (mt -> backedges , na - ins );
1653
1653
}
1654
- oldvalue = check_shadowed_matches (mt -> defs , newentry );
1654
+ if (oldvalue ) {
1655
+ char * morespec = (char * )alloca (n );
1656
+ memset (morespec , 0 , n );
1657
+ for (j = 0 ; j < n ; j ++ ) {
1658
+ jl_method_t * m = d [j ];
1659
+ if (morespec [j ])
1660
+ continue ;
1661
+ jl_svec_t * specializations = jl_atomic_load_acquire (& m -> specializations );
1662
+ jl_method_instance_t * * data = (jl_method_instance_t * * )jl_svec_data (specializations );
1663
+ size_t i , l = jl_svec_len (specializations );
1664
+ int shadowing = 0 ;
1665
+ for (i = 0 ; i < l ; i ++ ) {
1666
+ jl_method_instance_t * mi = jl_atomic_load_relaxed (& data [i ]);
1667
+ if (mi == NULL )
1668
+ continue ;
1669
+ isect = jl_type_intersection (type , (jl_value_t * )mi -> specTypes );
1670
+ if (isect != jl_bottom_type ) {
1671
+ if (shadowing == 0 ) {
1672
+ if (jl_type_morespecific (m -> sig , type ))
1673
+ // not actually shadowing--the existing method is still better
1674
+ break ;
1675
+ if (!jl_type_morespecific (type , mi -> def .method -> sig )) {
1676
+ // adding an ambiguity--see if there already was one
1677
+ size_t k ;
1678
+ for (k = 0 ; k < n ; k ++ ) {
1679
+ jl_method_t * m2 = d [k ];
1680
+ if (m == m2 || !jl_subtype (isect , m2 -> sig ))
1681
+ continue ;
1682
+ if (k > i ) {
1683
+ if (jl_type_morespecific (m2 -> sig , type )) {
1684
+ // not actually shadowing this--m2 will still be better
1685
+ morespec [k ] = 1 ;
1686
+ continue ;
1687
+ }
1688
+ }
1689
+ if (!jl_type_morespecific (m -> sig , m2 -> sig ) &&
1690
+ !jl_type_morespecific (m2 -> sig , m -> sig )) {
1691
+ break ;
1692
+ }
1693
+ }
1694
+ if (k != n )
1695
+ continue ;
1696
+ }
1697
+ shadowing = 1 ;
1698
+ }
1699
+ if (mi -> backedges )
1700
+ invalidate_backedges (mi , max_world , "jl_method_table_insert" );
1701
+ }
1702
+ }
1703
+ if (shadowing == 0 )
1704
+ morespec [j ] = 1 ; // the method won't need to be dropped from any cache
1705
+ }
1706
+ for (j = 0 ; j < n ; j ++ ) {
1707
+ if (morespec [j ])
1708
+ d [j ] = NULL ;
1709
+ else
1710
+ any_to_drop = 1 ;
1711
+ }
1712
+ }
1655
1713
}
1656
-
1657
- if (oldvalue ) {
1714
+ if (any_to_drop ) {
1658
1715
// drop anything in mt->cache that might overlap with the new method
1659
1716
struct invalidate_mt_env mt_cache_env ;
1660
1717
mt_cache_env .max_world = max_world ;
@@ -1674,31 +1731,7 @@ JL_DLLEXPORT void jl_method_table_insert(jl_methtable_t *mt, jl_method_t *method
1674
1731
}
1675
1732
}
1676
1733
}
1677
-
1678
- jl_value_t * * d ;
1679
- size_t j , n ;
1680
- if (jl_is_method (oldvalue )) {
1681
- d = & oldvalue ;
1682
- n = 1 ;
1683
- }
1684
- else {
1685
- assert (jl_is_array (oldvalue ));
1686
- d = jl_array_ptr_data (oldvalue );
1687
- n = jl_array_len (oldvalue );
1688
- }
1689
- for (j = 0 ; j < n ; j ++ ) {
1690
- jl_value_t * m = d [j ];
1691
- jl_svec_t * specializations = jl_atomic_load_acquire (& ((jl_method_t * )m )-> specializations );
1692
- jl_method_instance_t * * data = (jl_method_instance_t * * )jl_svec_data (specializations );
1693
- size_t i , l = jl_svec_len (specializations );
1694
- for (i = 0 ; i < l ; i ++ ) {
1695
- jl_method_instance_t * mi = jl_atomic_load_relaxed (& data [i ]);
1696
- if (mi != NULL && mi -> backedges && !jl_has_empty_intersection (type , (jl_value_t * )mi -> specTypes )) {
1697
- invalidate_backedges (mi , max_world , "jl_method_table_insert" );
1698
- invalidated = 1 ;
1699
- }
1700
- }
1701
- }
1734
+ invalidated = 1 ;
1702
1735
}
1703
1736
if (invalidated && _jl_debug_method_invalidation ) {
1704
1737
jl_array_ptr_1d_push (_jl_debug_method_invalidation , (jl_value_t * )method );
0 commit comments