@@ -176,7 +176,19 @@ static __always_inline void fpu_vgfmg(u8 v1, u8 v2, u8 v3)
176
176
: "memory" );
177
177
}
178
178
179
- #ifdef CONFIG_CC_IS_CLANG
179
+ #ifdef CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS
180
+
181
+ static __always_inline void fpu_vl (u8 v1 , const void * vxr )
182
+ {
183
+ instrument_read (vxr , sizeof (__vector128 ));
184
+ asm volatile ("VL %[v1],%O[vxr],,%R[vxr]\n"
185
+ :
186
+ : [vxr ] "Q" (* (__vector128 * )vxr ),
187
+ [v1 ] "I" (v1 )
188
+ : "memory" );
189
+ }
190
+
191
+ #else /* CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS */
180
192
181
193
static __always_inline void fpu_vl (u8 v1 , const void * vxr )
182
194
{
@@ -190,19 +202,7 @@ static __always_inline void fpu_vl(u8 v1, const void *vxr)
190
202
: "memory" , "1" );
191
203
}
192
204
193
- #else /* CONFIG_CC_IS_CLANG */
194
-
195
- static __always_inline void fpu_vl (u8 v1 , const void * vxr )
196
- {
197
- instrument_read (vxr , sizeof (__vector128 ));
198
- asm volatile ("VL %[v1],%O[vxr],,%R[vxr]\n"
199
- :
200
- : [vxr ] "Q" (* (__vector128 * )vxr ),
201
- [v1 ] "I" (v1 )
202
- : "memory" );
203
- }
204
-
205
- #endif /* CONFIG_CC_IS_CLANG */
205
+ #endif /* CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS */
206
206
207
207
static __always_inline void fpu_vleib (u8 v , s16 val , u8 index )
208
208
{
@@ -231,43 +231,43 @@ static __always_inline u64 fpu_vlgvf(u8 v, u16 index)
231
231
return val ;
232
232
}
233
233
234
- #ifdef CONFIG_CC_IS_CLANG
234
+ #ifdef CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS
235
235
236
236
static __always_inline void fpu_vll (u8 v1 , u32 index , const void * vxr )
237
237
{
238
238
unsigned int size ;
239
239
240
240
size = min (index + 1 , sizeof (__vector128 ));
241
241
instrument_read (vxr , size );
242
- asm volatile (
243
- " la 1,%[vxr]\n"
244
- " VLL %[v1],%[index],0,1\n"
245
- :
246
- : [vxr ] "R" (* (u8 * )vxr ),
247
- [index ] "d" (index ),
248
- [v1 ] "I" (v1 )
249
- : "memory" , "1" );
242
+ asm volatile ("VLL %[v1],%[index],%O[vxr],%R[vxr]\n"
243
+ :
244
+ : [vxr ] "Q" (* (u8 * )vxr ),
245
+ [index ] "d" (index ),
246
+ [v1 ] "I" (v1 )
247
+ : "memory" );
250
248
}
251
249
252
- #else /* CONFIG_CC_IS_CLANG */
250
+ #else /* CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS */
253
251
254
252
static __always_inline void fpu_vll (u8 v1 , u32 index , const void * vxr )
255
253
{
256
254
unsigned int size ;
257
255
258
256
size = min (index + 1 , sizeof (__vector128 ));
259
257
instrument_read (vxr , size );
260
- asm volatile ("VLL %[v1],%[index],%O[vxr],%R[vxr]\n"
261
- :
262
- : [vxr ] "Q" (* (u8 * )vxr ),
263
- [index ] "d" (index ),
264
- [v1 ] "I" (v1 )
265
- : "memory" );
258
+ asm volatile (
259
+ " la 1,%[vxr]\n"
260
+ " VLL %[v1],%[index],0,1\n"
261
+ :
262
+ : [vxr ] "R" (* (u8 * )vxr ),
263
+ [index ] "d" (index ),
264
+ [v1 ] "I" (v1 )
265
+ : "memory" , "1" );
266
266
}
267
267
268
- #endif /* CONFIG_CC_IS_CLANG */
268
+ #endif /* CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS */
269
269
270
- #ifdef CONFIG_CC_IS_CLANG
270
+ #ifdef CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS
271
271
272
272
#define fpu_vlm (_v1 , _v3 , _vxrs ) \
273
273
({ \
@@ -277,17 +277,15 @@ static __always_inline void fpu_vll(u8 v1, u32 index, const void *vxr)
277
277
} *_v = (void *)(_vxrs); \
278
278
\
279
279
instrument_read(_v, size); \
280
- asm volatile( \
281
- " la 1,%[vxrs]\n" \
282
- " VLM %[v1],%[v3],0,1\n" \
283
- : \
284
- : [vxrs] "R" (*_v), \
285
- [v1] "I" (_v1), [v3] "I" (_v3) \
286
- : "memory", "1"); \
280
+ asm volatile("VLM %[v1],%[v3],%O[vxrs],%R[vxrs]\n" \
281
+ : \
282
+ : [vxrs] "Q" (*_v), \
283
+ [v1] "I" (_v1), [v3] "I" (_v3) \
284
+ : "memory"); \
287
285
(_v3) - (_v1) + 1; \
288
286
})
289
287
290
- #else /* CONFIG_CC_IS_CLANG */
288
+ #else /* CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS */
291
289
292
290
#define fpu_vlm (_v1 , _v3 , _vxrs ) \
293
291
({ \
@@ -297,15 +295,17 @@ static __always_inline void fpu_vll(u8 v1, u32 index, const void *vxr)
297
295
} *_v = (void *)(_vxrs); \
298
296
\
299
297
instrument_read(_v, size); \
300
- asm volatile("VLM %[v1],%[v3],%O[vxrs],%R[vxrs]\n" \
301
- : \
302
- : [vxrs] "Q" (*_v), \
303
- [v1] "I" (_v1), [v3] "I" (_v3) \
304
- : "memory"); \
298
+ asm volatile( \
299
+ " la 1,%[vxrs]\n" \
300
+ " VLM %[v1],%[v3],0,1\n" \
301
+ : \
302
+ : [vxrs] "R" (*_v), \
303
+ [v1] "I" (_v1), [v3] "I" (_v3) \
304
+ : "memory", "1"); \
305
305
(_v3) - (_v1) + 1; \
306
306
})
307
307
308
- #endif /* CONFIG_CC_IS_CLANG */
308
+ #endif /* CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS */
309
309
310
310
static __always_inline void fpu_vlr (u8 v1 , u8 v2 )
311
311
{
@@ -355,7 +355,18 @@ static __always_inline void fpu_vsrlb(u8 v1, u8 v2, u8 v3)
355
355
: "memory" );
356
356
}
357
357
358
- #ifdef CONFIG_CC_IS_CLANG
358
+ #ifdef CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS
359
+
360
+ static __always_inline void fpu_vst (u8 v1 , const void * vxr )
361
+ {
362
+ instrument_write (vxr , sizeof (__vector128 ));
363
+ asm volatile ("VST %[v1],%O[vxr],,%R[vxr]\n"
364
+ : [vxr ] "=Q" (* (__vector128 * )vxr )
365
+ : [v1 ] "I" (v1 )
366
+ : "memory" );
367
+ }
368
+
369
+ #else /* CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS */
359
370
360
371
static __always_inline void fpu_vst (u8 v1 , const void * vxr )
361
372
{
@@ -368,20 +379,23 @@ static __always_inline void fpu_vst(u8 v1, const void *vxr)
368
379
: "memory" , "1" );
369
380
}
370
381
371
- #else /* CONFIG_CC_IS_CLANG */
382
+ #endif /* CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS */
372
383
373
- static __always_inline void fpu_vst (u8 v1 , const void * vxr )
384
+ #ifdef CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS
385
+
386
+ static __always_inline void fpu_vstl (u8 v1 , u32 index , const void * vxr )
374
387
{
375
- instrument_write (vxr , sizeof (__vector128 ));
376
- asm volatile ("VST %[v1],%O[vxr],,%R[vxr]\n"
377
- : [vxr ] "=Q" (* (__vector128 * )vxr )
378
- : [v1 ] "I" (v1 )
388
+ unsigned int size ;
389
+
390
+ size = min (index + 1 , sizeof (__vector128 ));
391
+ instrument_write (vxr , size );
392
+ asm volatile ("VSTL %[v1],%[index],%O[vxr],%R[vxr]\n"
393
+ : [vxr ] "=Q" (* (u8 * )vxr )
394
+ : [index ] "d" (index ), [v1 ] "I" (v1 )
379
395
: "memory" );
380
396
}
381
397
382
- #endif /* CONFIG_CC_IS_CLANG */
383
-
384
- #ifdef CONFIG_CC_IS_CLANG
398
+ #else /* CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS */
385
399
386
400
static __always_inline void fpu_vstl (u8 v1 , u32 index , const void * vxr )
387
401
{
@@ -397,23 +411,9 @@ static __always_inline void fpu_vstl(u8 v1, u32 index, const void *vxr)
397
411
: "memory" , "1" );
398
412
}
399
413
400
- #else /* CONFIG_CC_IS_CLANG */
414
+ #endif /* CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS */
401
415
402
- static __always_inline void fpu_vstl (u8 v1 , u32 index , const void * vxr )
403
- {
404
- unsigned int size ;
405
-
406
- size = min (index + 1 , sizeof (__vector128 ));
407
- instrument_write (vxr , size );
408
- asm volatile ("VSTL %[v1],%[index],%O[vxr],%R[vxr]\n"
409
- : [vxr ] "=Q" (* (u8 * )vxr )
410
- : [index ] "d" (index ), [v1 ] "I" (v1 )
411
- : "memory" );
412
- }
413
-
414
- #endif /* CONFIG_CC_IS_CLANG */
415
-
416
- #ifdef CONFIG_CC_IS_CLANG
416
+ #ifdef CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS
417
417
418
418
#define fpu_vstm (_v1 , _v3 , _vxrs ) \
419
419
({ \
@@ -423,16 +423,14 @@ static __always_inline void fpu_vstl(u8 v1, u32 index, const void *vxr)
423
423
} *_v = (void *)(_vxrs); \
424
424
\
425
425
instrument_write(_v, size); \
426
- asm volatile( \
427
- " la 1,%[vxrs]\n" \
428
- " VSTM %[v1],%[v3],0,1\n" \
429
- : [vxrs] "=R" (*_v) \
430
- : [v1] "I" (_v1), [v3] "I" (_v3) \
431
- : "memory", "1"); \
426
+ asm volatile("VSTM %[v1],%[v3],%O[vxrs],%R[vxrs]\n" \
427
+ : [vxrs] "=Q" (*_v) \
428
+ : [v1] "I" (_v1), [v3] "I" (_v3) \
429
+ : "memory"); \
432
430
(_v3) - (_v1) + 1; \
433
431
})
434
432
435
- #else /* CONFIG_CC_IS_CLANG */
433
+ #else /* CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS */
436
434
437
435
#define fpu_vstm (_v1 , _v3 , _vxrs ) \
438
436
({ \
@@ -442,14 +440,16 @@ static __always_inline void fpu_vstl(u8 v1, u32 index, const void *vxr)
442
440
} *_v = (void *)(_vxrs); \
443
441
\
444
442
instrument_write(_v, size); \
445
- asm volatile("VSTM %[v1],%[v3],%O[vxrs],%R[vxrs]\n" \
446
- : [vxrs] "=Q" (*_v) \
447
- : [v1] "I" (_v1), [v3] "I" (_v3) \
448
- : "memory"); \
443
+ asm volatile( \
444
+ " la 1,%[vxrs]\n" \
445
+ " VSTM %[v1],%[v3],0,1\n" \
446
+ : [vxrs] "=R" (*_v) \
447
+ : [v1] "I" (_v1), [v3] "I" (_v3) \
448
+ : "memory", "1"); \
449
449
(_v3) - (_v1) + 1; \
450
450
})
451
451
452
- #endif /* CONFIG_CC_IS_CLANG */
452
+ #endif /* CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS */
453
453
454
454
static __always_inline void fpu_vupllf (u8 v1 , u8 v2 )
455
455
{
0 commit comments