@@ -34,6 +34,7 @@ enum sbi_ext_id {
34
34
SBI_EXT_PMU = 0x504D55 ,
35
35
SBI_EXT_DBCN = 0x4442434E ,
36
36
SBI_EXT_STA = 0x535441 ,
37
+ SBI_EXT_NACL = 0x4E41434C ,
37
38
38
39
/* Experimentals extensions must lie within this range */
39
40
SBI_EXT_EXPERIMENTAL_START = 0x08000000 ,
@@ -281,6 +282,125 @@ struct sbi_sta_struct {
281
282
282
283
#define SBI_SHMEM_DISABLE -1
283
284
285
+ enum sbi_ext_nacl_fid {
286
+ SBI_EXT_NACL_PROBE_FEATURE = 0x0 ,
287
+ SBI_EXT_NACL_SET_SHMEM = 0x1 ,
288
+ SBI_EXT_NACL_SYNC_CSR = 0x2 ,
289
+ SBI_EXT_NACL_SYNC_HFENCE = 0x3 ,
290
+ SBI_EXT_NACL_SYNC_SRET = 0x4 ,
291
+ };
292
+
293
+ enum sbi_ext_nacl_feature {
294
+ SBI_NACL_FEAT_SYNC_CSR = 0x0 ,
295
+ SBI_NACL_FEAT_SYNC_HFENCE = 0x1 ,
296
+ SBI_NACL_FEAT_SYNC_SRET = 0x2 ,
297
+ SBI_NACL_FEAT_AUTOSWAP_CSR = 0x3 ,
298
+ };
299
+
300
+ #define SBI_NACL_SHMEM_ADDR_SHIFT 12
301
+ #define SBI_NACL_SHMEM_SCRATCH_OFFSET 0x0000
302
+ #define SBI_NACL_SHMEM_SCRATCH_SIZE 0x1000
303
+ #define SBI_NACL_SHMEM_SRET_OFFSET 0x0000
304
+ #define SBI_NACL_SHMEM_SRET_SIZE 0x0200
305
+ #define SBI_NACL_SHMEM_AUTOSWAP_OFFSET (SBI_NACL_SHMEM_SRET_OFFSET + \
306
+ SBI_NACL_SHMEM_SRET_SIZE)
307
+ #define SBI_NACL_SHMEM_AUTOSWAP_SIZE 0x0080
308
+ #define SBI_NACL_SHMEM_UNUSED_OFFSET (SBI_NACL_SHMEM_AUTOSWAP_OFFSET + \
309
+ SBI_NACL_SHMEM_AUTOSWAP_SIZE)
310
+ #define SBI_NACL_SHMEM_UNUSED_SIZE 0x0580
311
+ #define SBI_NACL_SHMEM_HFENCE_OFFSET (SBI_NACL_SHMEM_UNUSED_OFFSET + \
312
+ SBI_NACL_SHMEM_UNUSED_SIZE)
313
+ #define SBI_NACL_SHMEM_HFENCE_SIZE 0x0780
314
+ #define SBI_NACL_SHMEM_DBITMAP_OFFSET (SBI_NACL_SHMEM_HFENCE_OFFSET + \
315
+ SBI_NACL_SHMEM_HFENCE_SIZE)
316
+ #define SBI_NACL_SHMEM_DBITMAP_SIZE 0x0080
317
+ #define SBI_NACL_SHMEM_CSR_OFFSET (SBI_NACL_SHMEM_DBITMAP_OFFSET + \
318
+ SBI_NACL_SHMEM_DBITMAP_SIZE)
319
+ #define SBI_NACL_SHMEM_CSR_SIZE ((__riscv_xlen / 8) * 1024)
320
+ #define SBI_NACL_SHMEM_SIZE (SBI_NACL_SHMEM_CSR_OFFSET + \
321
+ SBI_NACL_SHMEM_CSR_SIZE)
322
+
323
+ #define SBI_NACL_SHMEM_CSR_INDEX (__csr_num ) \
324
+ ((((__csr_num) & 0xc00) >> 2) | ((__csr_num) & 0xff))
325
+
326
+ #define SBI_NACL_SHMEM_HFENCE_ENTRY_SZ ((__riscv_xlen / 8) * 4)
327
+ #define SBI_NACL_SHMEM_HFENCE_ENTRY_MAX \
328
+ (SBI_NACL_SHMEM_HFENCE_SIZE / \
329
+ SBI_NACL_SHMEM_HFENCE_ENTRY_SZ)
330
+ #define SBI_NACL_SHMEM_HFENCE_ENTRY (__num ) \
331
+ (SBI_NACL_SHMEM_HFENCE_OFFSET + \
332
+ (__num) * SBI_NACL_SHMEM_HFENCE_ENTRY_SZ)
333
+ #define SBI_NACL_SHMEM_HFENCE_ENTRY_CONFIG (__num ) \
334
+ SBI_NACL_SHMEM_HFENCE_ENTRY(__num)
335
+ #define SBI_NACL_SHMEM_HFENCE_ENTRY_PNUM (__num )\
336
+ (SBI_NACL_SHMEM_HFENCE_ENTRY(__num) + (__riscv_xlen / 8))
337
+ #define SBI_NACL_SHMEM_HFENCE_ENTRY_PCOUNT (__num )\
338
+ (SBI_NACL_SHMEM_HFENCE_ENTRY(__num) + \
339
+ ((__riscv_xlen / 8) * 3))
340
+
341
+ #define SBI_NACL_SHMEM_HFENCE_CONFIG_PEND_BITS 1
342
+ #define SBI_NACL_SHMEM_HFENCE_CONFIG_PEND_SHIFT \
343
+ (__riscv_xlen - SBI_NACL_SHMEM_HFENCE_CONFIG_PEND_BITS)
344
+ #define SBI_NACL_SHMEM_HFENCE_CONFIG_PEND_MASK \
345
+ ((1UL << SBI_NACL_SHMEM_HFENCE_CONFIG_PEND_BITS) - 1)
346
+ #define SBI_NACL_SHMEM_HFENCE_CONFIG_PEND \
347
+ (SBI_NACL_SHMEM_HFENCE_CONFIG_PEND_MASK << \
348
+ SBI_NACL_SHMEM_HFENCE_CONFIG_PEND_SHIFT)
349
+
350
+ #define SBI_NACL_SHMEM_HFENCE_CONFIG_RSVD1_BITS 3
351
+ #define SBI_NACL_SHMEM_HFENCE_CONFIG_RSVD1_SHIFT \
352
+ (SBI_NACL_SHMEM_HFENCE_CONFIG_PEND_SHIFT - \
353
+ SBI_NACL_SHMEM_HFENCE_CONFIG_RSVD1_BITS)
354
+
355
+ #define SBI_NACL_SHMEM_HFENCE_CONFIG_TYPE_BITS 4
356
+ #define SBI_NACL_SHMEM_HFENCE_CONFIG_TYPE_SHIFT \
357
+ (SBI_NACL_SHMEM_HFENCE_CONFIG_RSVD1_SHIFT - \
358
+ SBI_NACL_SHMEM_HFENCE_CONFIG_TYPE_BITS)
359
+ #define SBI_NACL_SHMEM_HFENCE_CONFIG_TYPE_MASK \
360
+ ((1UL << SBI_NACL_SHMEM_HFENCE_CONFIG_TYPE_BITS) - 1)
361
+
362
+ #define SBI_NACL_SHMEM_HFENCE_TYPE_GVMA 0x0
363
+ #define SBI_NACL_SHMEM_HFENCE_TYPE_GVMA_ALL 0x1
364
+ #define SBI_NACL_SHMEM_HFENCE_TYPE_GVMA_VMID 0x2
365
+ #define SBI_NACL_SHMEM_HFENCE_TYPE_GVMA_VMID_ALL 0x3
366
+ #define SBI_NACL_SHMEM_HFENCE_TYPE_VVMA 0x4
367
+ #define SBI_NACL_SHMEM_HFENCE_TYPE_VVMA_ALL 0x5
368
+ #define SBI_NACL_SHMEM_HFENCE_TYPE_VVMA_ASID 0x6
369
+ #define SBI_NACL_SHMEM_HFENCE_TYPE_VVMA_ASID_ALL 0x7
370
+
371
+ #define SBI_NACL_SHMEM_HFENCE_CONFIG_RSVD2_BITS 1
372
+ #define SBI_NACL_SHMEM_HFENCE_CONFIG_RSVD2_SHIFT \
373
+ (SBI_NACL_SHMEM_HFENCE_CONFIG_TYPE_SHIFT - \
374
+ SBI_NACL_SHMEM_HFENCE_CONFIG_RSVD2_BITS)
375
+
376
+ #define SBI_NACL_SHMEM_HFENCE_CONFIG_ORDER_BITS 7
377
+ #define SBI_NACL_SHMEM_HFENCE_CONFIG_ORDER_SHIFT \
378
+ (SBI_NACL_SHMEM_HFENCE_CONFIG_RSVD2_SHIFT - \
379
+ SBI_NACL_SHMEM_HFENCE_CONFIG_ORDER_BITS)
380
+ #define SBI_NACL_SHMEM_HFENCE_CONFIG_ORDER_MASK \
381
+ ((1UL << SBI_NACL_SHMEM_HFENCE_CONFIG_ORDER_BITS) - 1)
382
+ #define SBI_NACL_SHMEM_HFENCE_ORDER_BASE 12
383
+
384
+ #if __riscv_xlen == 32
385
+ #define SBI_NACL_SHMEM_HFENCE_CONFIG_ASID_BITS 9
386
+ #define SBI_NACL_SHMEM_HFENCE_CONFIG_VMID_BITS 7
387
+ #else
388
+ #define SBI_NACL_SHMEM_HFENCE_CONFIG_ASID_BITS 16
389
+ #define SBI_NACL_SHMEM_HFENCE_CONFIG_VMID_BITS 14
390
+ #endif
391
+ #define SBI_NACL_SHMEM_HFENCE_CONFIG_VMID_SHIFT \
392
+ SBI_NACL_SHMEM_HFENCE_CONFIG_ASID_BITS
393
+ #define SBI_NACL_SHMEM_HFENCE_CONFIG_ASID_MASK \
394
+ ((1UL << SBI_NACL_SHMEM_HFENCE_CONFIG_ASID_BITS) - 1)
395
+ #define SBI_NACL_SHMEM_HFENCE_CONFIG_VMID_MASK \
396
+ ((1UL << SBI_NACL_SHMEM_HFENCE_CONFIG_VMID_BITS) - 1)
397
+
398
+ #define SBI_NACL_SHMEM_AUTOSWAP_FLAG_HSTATUS BIT(0)
399
+ #define SBI_NACL_SHMEM_AUTOSWAP_HSTATUS ((__riscv_xlen / 8) * 1)
400
+
401
+ #define SBI_NACL_SHMEM_SRET_X (__i ) ((__riscv_xlen / 8) * (__i))
402
+ #define SBI_NACL_SHMEM_SRET_X_LAST 31
403
+
284
404
/* SBI spec version fields */
285
405
#define SBI_SPEC_VERSION_DEFAULT 0x1
286
406
#define SBI_SPEC_VERSION_MAJOR_SHIFT 24
0 commit comments