@@ -6,11 +6,14 @@ use group::{Curve, Group, GroupEncoding, prime::PrimeCurveAffine};
6
6
use rand_core:: CryptoRng ;
7
7
use subtle:: { Choice , ConditionallySelectable , ConstantTimeEq , CtOption } ;
8
8
9
+ #[ cfg( feature = "alloc" ) ]
10
+ use alloc:: vec:: Vec ;
11
+
9
12
#[ cfg( feature = "serde" ) ]
10
13
use serdect:: serde:: { Deserialize , Serialize , de, ser} ;
11
14
use zeroize:: Zeroize ;
12
15
13
- use crate :: { CurveArithmetic , NonZeroScalar , Scalar } ;
16
+ use crate :: { BatchNormalize , CurveArithmetic , NonZeroScalar , Scalar } ;
14
17
15
18
/// Non-identity point type.
16
19
///
@@ -19,6 +22,7 @@ use crate::{CurveArithmetic, NonZeroScalar, Scalar};
19
22
/// In the context of ECC, it's useful for ensuring that certain arithmetic
20
23
/// cannot result in the identity point.
21
24
#[ derive( Clone , Copy , Debug , PartialEq , Eq ) ]
25
+ #[ repr( transparent) ]
22
26
pub struct NonIdentity < P > {
23
27
point : P ,
24
28
}
@@ -103,6 +107,72 @@ impl<P> AsRef<P> for NonIdentity<P> {
103
107
}
104
108
}
105
109
110
+ impl < const N : usize , P > BatchNormalize < [ Self ; N ] > for NonIdentity < P >
111
+ where
112
+ P : Curve + BatchNormalize < [ P ; N ] , Output = [ P :: AffineRepr ; N ] > ,
113
+ {
114
+ type Output = [ NonIdentity < P :: AffineRepr > ; N ] ;
115
+
116
+ fn batch_normalize ( points : & [ Self ; N ] ) -> [ NonIdentity < P :: AffineRepr > ; N ] {
117
+ // Ensure casting is safe.
118
+ // This always succeeds because `NonIdentity` is `repr(transparent)`.
119
+ debug_assert_eq ! ( size_of:: <P >( ) , size_of:: <NonIdentity <P >>( ) ) ;
120
+ debug_assert_eq ! ( align_of:: <P >( ) , align_of:: <NonIdentity <P >>( ) ) ;
121
+
122
+ #[ allow( unsafe_code) ]
123
+ // SAFETY: `NonIdentity` is `repr(transparent)`.
124
+ let points: & [ P ; N ] = unsafe { & * points. as_ptr ( ) . cast ( ) } ;
125
+ let affine_points = <P as BatchNormalize < _ > >:: batch_normalize ( points) ;
126
+
127
+ // Ensure `array::map()` can be optimized to a `memcpy`.
128
+ debug_assert_eq ! (
129
+ size_of:: <P :: AffineRepr >( ) ,
130
+ size_of:: <NonIdentity <P :: AffineRepr >>( )
131
+ ) ;
132
+ debug_assert_eq ! (
133
+ align_of:: <P :: AffineRepr >( ) ,
134
+ align_of:: <NonIdentity <P :: AffineRepr >>( )
135
+ ) ;
136
+
137
+ affine_points. map ( |point| NonIdentity { point } )
138
+ }
139
+ }
140
+
141
+ #[ cfg( feature = "alloc" ) ]
142
+ impl < P > BatchNormalize < [ Self ] > for NonIdentity < P >
143
+ where
144
+ P : Curve + BatchNormalize < [ P ] , Output = Vec < P :: AffineRepr > > ,
145
+ {
146
+ type Output = Vec < NonIdentity < P :: AffineRepr > > ;
147
+
148
+ fn batch_normalize ( points : & [ Self ] ) -> Vec < NonIdentity < P :: AffineRepr > > {
149
+ // Ensure casting is safe.
150
+ // This always succeeds because `NonIdentity` is `repr(transparent)`.
151
+ debug_assert_eq ! ( size_of:: <P >( ) , size_of:: <NonIdentity <P >>( ) ) ;
152
+ debug_assert_eq ! ( align_of:: <P >( ) , align_of:: <NonIdentity <P >>( ) ) ;
153
+
154
+ #[ allow( unsafe_code) ]
155
+ // SAFETY: `NonIdentity` is `repr(transparent)`.
156
+ let points: & [ P ] = unsafe { & * ( points as * const [ NonIdentity < P > ] as * const [ P ] ) } ;
157
+ let affine_points = <P as BatchNormalize < _ > >:: batch_normalize ( points) ;
158
+
159
+ // Ensure `into_iter()` + `collect()` can be optimized away.
160
+ debug_assert_eq ! (
161
+ size_of:: <P :: AffineRepr >( ) ,
162
+ size_of:: <NonIdentity <P :: AffineRepr >>( )
163
+ ) ;
164
+ debug_assert_eq ! (
165
+ align_of:: <P :: AffineRepr >( ) ,
166
+ align_of:: <NonIdentity <P :: AffineRepr >>( )
167
+ ) ;
168
+
169
+ affine_points
170
+ . into_iter ( )
171
+ . map ( |point| NonIdentity { point } )
172
+ . collect ( )
173
+ }
174
+ }
175
+
106
176
impl < P > ConditionallySelectable for NonIdentity < P >
107
177
where
108
178
P : ConditionallySelectable ,
@@ -238,6 +308,7 @@ impl<P: Group> Zeroize for NonIdentity<P> {
238
308
#[ cfg( all( test, feature = "dev" ) ) ]
239
309
mod tests {
240
310
use super :: NonIdentity ;
311
+ use crate :: BatchNormalize ;
241
312
use crate :: dev:: { AffinePoint , NonZeroScalar , ProjectivePoint , SecretKey } ;
242
313
use group:: GroupEncoding ;
243
314
use hex_literal:: hex;
@@ -303,4 +374,38 @@ mod tests {
303
374
304
375
assert_eq ! ( point. to_point( ) , pk. to_projective( ) ) ;
305
376
}
377
+
378
+ #[ test]
379
+ fn batch_normalize ( ) {
380
+ let point = ProjectivePoint :: from_bytes (
381
+ & hex ! ( "02c9afa9d845ba75166b5c215767b1d6934e50c3db36e89b127b8a622b120f6721" ) . into ( ) ,
382
+ )
383
+ . unwrap ( ) ;
384
+ let point = NonIdentity :: new ( point) . unwrap ( ) ;
385
+ let points = [ point, point] ;
386
+
387
+ for ( point, affine_point) in points
388
+ . into_iter ( )
389
+ . zip ( NonIdentity :: batch_normalize ( & points) )
390
+ {
391
+ assert_eq ! ( point. to_affine( ) , affine_point) ;
392
+ }
393
+ }
394
+
395
+ #[ test]
396
+ #[ cfg( feature = "alloc" ) ]
397
+ fn batch_normalize_alloc ( ) {
398
+ let point = ProjectivePoint :: from_bytes (
399
+ & hex ! ( "02c9afa9d845ba75166b5c215767b1d6934e50c3db36e89b127b8a622b120f6721" ) . into ( ) ,
400
+ )
401
+ . unwrap ( ) ;
402
+ let point = NonIdentity :: new ( point) . unwrap ( ) ;
403
+ let points = vec ! [ point, point] ;
404
+
405
+ let affine_points = NonIdentity :: batch_normalize ( points. as_slice ( ) ) ;
406
+
407
+ for ( point, affine_point) in points. into_iter ( ) . zip ( affine_points) {
408
+ assert_eq ! ( point. to_affine( ) , affine_point) ;
409
+ }
410
+ }
306
411
}
0 commit comments