@@ -28,7 +28,7 @@ use crate::{
28
28
lower:: {
29
29
const_or_path_to_chalk, generic_arg_to_chalk, lower_to_chalk_mutability, ParamLoweringMode ,
30
30
} ,
31
- mapping:: from_chalk,
31
+ mapping:: { from_chalk, ToChalk } ,
32
32
method_resolution,
33
33
primitive:: { self , UintTy } ,
34
34
static_lifetime, to_chalk_trait_id,
@@ -279,21 +279,24 @@ impl<'a> InferenceContext<'a> {
279
279
let callee_ty = self . infer_expr ( * callee, & Expectation :: none ( ) ) ;
280
280
let mut derefs = Autoderef :: new ( & mut self . table , callee_ty. clone ( ) ) ;
281
281
let mut res = None ;
282
+ let mut derefed_callee = callee_ty. clone ( ) ;
282
283
// manual loop to be able to access `derefs.table`
283
284
while let Some ( ( callee_deref_ty, _) ) = derefs. next ( ) {
284
285
res = derefs. table . callable_sig ( & callee_deref_ty, args. len ( ) ) ;
285
286
if res. is_some ( ) {
287
+ derefed_callee = callee_deref_ty;
286
288
break ;
287
289
}
288
290
}
289
- let ( param_tys, ret_ty) : ( Vec < Ty > , Ty ) = match res {
291
+ let ( param_tys, ret_ty) = match res {
290
292
Some ( res) => {
291
293
let adjustments = auto_deref_adjust_steps ( & derefs) ;
292
294
self . write_expr_adj ( * callee, adjustments) ;
293
295
res
294
296
}
295
297
None => ( Vec :: new ( ) , self . err_ty ( ) ) ,
296
298
} ;
299
+ let indices_to_skip = self . check_legacy_const_generics ( derefed_callee, args) ;
297
300
self . register_obligations_for_call ( & callee_ty) ;
298
301
299
302
let expected_inputs = self . expected_inputs_for_expected_output (
@@ -302,7 +305,7 @@ impl<'a> InferenceContext<'a> {
302
305
param_tys. clone ( ) ,
303
306
) ;
304
307
305
- self . check_call_arguments ( args, & expected_inputs, & param_tys) ;
308
+ self . check_call_arguments ( args, & expected_inputs, & param_tys, & indices_to_skip ) ;
306
309
self . normalize_associated_types_in ( ret_ty)
307
310
}
308
311
Expr :: MethodCall { receiver, args, method_name, generic_args } => self
@@ -952,7 +955,7 @@ impl<'a> InferenceContext<'a> {
952
955
let expected_inputs =
953
956
self . expected_inputs_for_expected_output ( expected, ret_ty. clone ( ) , param_tys. clone ( ) ) ;
954
957
955
- self . check_call_arguments ( args, & expected_inputs, & param_tys) ;
958
+ self . check_call_arguments ( args, & expected_inputs, & param_tys, & [ ] ) ;
956
959
self . normalize_associated_types_in ( ret_ty)
957
960
}
958
961
@@ -983,24 +986,40 @@ impl<'a> InferenceContext<'a> {
983
986
}
984
987
}
985
988
986
- fn check_call_arguments ( & mut self , args : & [ ExprId ] , expected_inputs : & [ Ty ] , param_tys : & [ Ty ] ) {
989
+ fn check_call_arguments (
990
+ & mut self ,
991
+ args : & [ ExprId ] ,
992
+ expected_inputs : & [ Ty ] ,
993
+ param_tys : & [ Ty ] ,
994
+ skip_indices : & [ u32 ] ,
995
+ ) {
987
996
// Quoting https://github.com/rust-lang/rust/blob/6ef275e6c3cb1384ec78128eceeb4963ff788dca/src/librustc_typeck/check/mod.rs#L3325 --
988
997
// We do this in a pretty awful way: first we type-check any arguments
989
998
// that are not closures, then we type-check the closures. This is so
990
999
// that we have more information about the types of arguments when we
991
1000
// type-check the functions. This isn't really the right way to do this.
992
1001
for & check_closures in & [ false , true ] {
1002
+ let mut skip_indices = skip_indices. into_iter ( ) . copied ( ) . fuse ( ) . peekable ( ) ;
993
1003
let param_iter = param_tys. iter ( ) . cloned ( ) . chain ( repeat ( self . err_ty ( ) ) ) ;
994
1004
let expected_iter = expected_inputs
995
1005
. iter ( )
996
1006
. cloned ( )
997
1007
. chain ( param_iter. clone ( ) . skip ( expected_inputs. len ( ) ) ) ;
998
- for ( ( & arg, param_ty) , expected_ty) in args. iter ( ) . zip ( param_iter) . zip ( expected_iter) {
1008
+ for ( idx, ( ( & arg, param_ty) , expected_ty) ) in
1009
+ args. iter ( ) . zip ( param_iter) . zip ( expected_iter) . enumerate ( )
1010
+ {
999
1011
let is_closure = matches ! ( & self . body[ arg] , Expr :: Lambda { .. } ) ;
1000
1012
if is_closure != check_closures {
1001
1013
continue ;
1002
1014
}
1003
1015
1016
+ while skip_indices. peek ( ) . map_or ( false , |i| * i < idx as u32 ) {
1017
+ skip_indices. next ( ) ;
1018
+ }
1019
+ if skip_indices. peek ( ) . copied ( ) == Some ( idx as u32 ) {
1020
+ continue ;
1021
+ }
1022
+
1004
1023
// the difference between param_ty and expected here is that
1005
1024
// expected is the parameter when the expected *return* type is
1006
1025
// taken into account. So in `let _: &[i32] = identity(&[1, 2])`
@@ -1140,6 +1159,49 @@ impl<'a> InferenceContext<'a> {
1140
1159
}
1141
1160
}
1142
1161
1162
+ /// Returns the argument indices to skip.
1163
+ fn check_legacy_const_generics ( & mut self , callee : Ty , args : & [ ExprId ] ) -> Vec < u32 > {
1164
+ let ( func, subst) = match callee. kind ( Interner ) {
1165
+ TyKind :: FnDef ( fn_id, subst) => {
1166
+ let callable = CallableDefId :: from_chalk ( self . db , * fn_id) ;
1167
+ let func = match callable {
1168
+ CallableDefId :: FunctionId ( f) => f,
1169
+ _ => return Vec :: new ( ) ,
1170
+ } ;
1171
+ ( func, subst)
1172
+ }
1173
+ _ => return Vec :: new ( ) ,
1174
+ } ;
1175
+
1176
+ let data = self . db . function_data ( func) ;
1177
+ if data. legacy_const_generics_indices . is_empty ( ) {
1178
+ return Vec :: new ( ) ;
1179
+ }
1180
+
1181
+ // only use legacy const generics if the param count matches with them
1182
+ if data. params . len ( ) + data. legacy_const_generics_indices . len ( ) != args. len ( ) {
1183
+ return Vec :: new ( ) ;
1184
+ }
1185
+
1186
+ // check legacy const parameters
1187
+ for ( subst_idx, arg_idx) in data. legacy_const_generics_indices . iter ( ) . copied ( ) . enumerate ( ) {
1188
+ let arg = match subst. at ( Interner , subst_idx) . constant ( Interner ) {
1189
+ Some ( c) => c,
1190
+ None => continue , // not a const parameter?
1191
+ } ;
1192
+ if arg_idx >= args. len ( ) as u32 {
1193
+ continue ;
1194
+ }
1195
+ let _ty = arg. data ( Interner ) . ty . clone ( ) ;
1196
+ let expected = Expectation :: none ( ) ; // FIXME use actual const ty, when that is lowered correctly
1197
+ self . infer_expr ( args[ arg_idx as usize ] , & expected) ;
1198
+ // FIXME: evaluate and unify with the const
1199
+ }
1200
+ let mut indices = data. legacy_const_generics_indices . clone ( ) ;
1201
+ indices. sort ( ) ;
1202
+ indices
1203
+ }
1204
+
1143
1205
fn builtin_binary_op_return_ty ( & mut self , op : BinaryOp , lhs_ty : Ty , rhs_ty : Ty ) -> Option < Ty > {
1144
1206
let lhs_ty = self . resolve_ty_shallow ( & lhs_ty) ;
1145
1207
let rhs_ty = self . resolve_ty_shallow ( & rhs_ty) ;
0 commit comments