1
1
// Licensed to the .NET Foundation under one or more agreements.
2
2
// The .NET Foundation licenses this file to you under the MIT license.
3
3
4
+ using System . Buffers ;
4
5
using System . Runtime . CompilerServices ;
6
+ using Microsoft . Extensions . Caching . Distributed ;
5
7
using Microsoft . Extensions . Caching . Hybrid . Internal ;
8
+ using Microsoft . Extensions . Caching . Memory ;
6
9
using Microsoft . Extensions . DependencyInjection ;
10
+ using Microsoft . Extensions . Options ;
7
11
using static Microsoft . Extensions . Caching . Hybrid . Internal . DefaultHybridCache ;
8
12
9
13
namespace Microsoft . Extensions . Caching . Hybrid . Tests ;
10
14
11
15
public class BufferReleaseTests // note that buffer ref-counting is only enabled for DEBUG builds; can only verify general behaviour without that
12
16
{
13
- static IDisposable GetDefaultCache ( out DefaultHybridCache cache )
17
+ static IDisposable GetDefaultCache ( out DefaultHybridCache cache , Action < ServiceCollection > ? config = null )
14
18
{
15
19
var services = new ServiceCollection ( ) ;
20
+ config ? . Invoke ( services ) ;
16
21
services . AddHybridCache ( ) ;
17
22
var provider = services . BuildServiceProvider ( ) ;
18
23
cache = Assert . IsType < DefaultHybridCache > ( provider . GetRequiredService < HybridCache > ( ) ) ;
19
24
return provider ;
20
25
}
21
26
22
27
[ Fact ]
23
- public async Task BufferGetsReleased ( )
28
+ public async Task BufferGetsReleased_NoL2 ( )
24
29
{
25
30
using var provider = GetDefaultCache ( out var cache ) ;
26
31
#if DEBUG
@@ -39,27 +44,174 @@ public async Task BufferGetsReleased()
39
44
Assert . True ( cache . DebugTryGetCacheItem ( key , out var cacheItem ) ) ;
40
45
41
46
// assert that we can reserve the buffer *now* (mostly to see that it behaves differently later)
47
+ Assert . True ( cacheItem . NeedsEvictionCallback , "should be pooled memory" ) ;
42
48
Assert . True ( cacheItem . TryReserveBuffer ( out _ ) ) ;
43
49
cacheItem . Release ( ) ; // for the above reserve
44
50
45
- var second = await cache . GetOrCreateAsync ( key , _ => GetAsync ( ) , new HybridCacheEntryOptions { Flags = HybridCacheEntryFlags . DisableUnderlyingData } ) ;
51
+ var second = await cache . GetOrCreateAsync ( key , _ => GetAsync ( ) , NoUnderlying ) ;
46
52
Assert . NotNull ( second ) ;
47
53
Assert . NotSame ( first , second ) ;
48
54
49
55
await cache . RemoveKeyAsync ( key ) ;
50
- var third = await cache . GetOrCreateAsync ( key , _ => GetAsync ( ) , new HybridCacheEntryOptions { Flags = HybridCacheEntryFlags . DisableUnderlyingData } ) ;
56
+ var third = await cache . GetOrCreateAsync ( key , _ => GetAsync ( ) , NoUnderlying ) ;
51
57
Assert . Null ( third ) ;
52
58
53
- await Task . Delay ( 500 ) ; // give it a moment
59
+ // give it a moment for the eviction callback to kick in
60
+ for ( int i = 0 ; i < 10 && cacheItem . NeedsEvictionCallback ; i ++ )
61
+ {
62
+ await Task . Delay ( 250 ) ;
63
+ }
54
64
#if DEBUG
55
65
Assert . Equal ( 0 , cache . DebugGetOutstandingBuffers ( ) ) ;
56
66
#endif
57
67
// assert that we can *no longer* reserve this buffer, because we've already recycled it
58
68
Assert . False ( cacheItem . TryReserveBuffer ( out _ ) ) ;
69
+ Assert . False ( cacheItem . NeedsEvictionCallback , "should be recycled now" ) ;
70
+ static ValueTask < Customer > GetAsync ( ) => new ( new Customer { Id = 42 , Name = "Fred" } ) ;
71
+ }
72
+
73
+ private static readonly HybridCacheEntryOptions NoUnderlying = new HybridCacheEntryOptions { Flags = HybridCacheEntryFlags . DisableUnderlyingData } ;
74
+
75
+ class TestCache : MemoryDistributedCache , IBufferDistributedCache
76
+ {
77
+ public TestCache ( IOptions < MemoryDistributedCacheOptions > options ) : base ( options ) { }
59
78
79
+ void IBufferDistributedCache . Set ( string key , ReadOnlySequence < byte > value , DistributedCacheEntryOptions options )
80
+ => Set ( key , value . ToArray ( ) , options ) ; // efficiency not important for this
81
+
82
+ ValueTask IBufferDistributedCache . SetAsync ( string key , ReadOnlySequence < byte > value , DistributedCacheEntryOptions options , CancellationToken token )
83
+ => new ( SetAsync ( key , value . ToArray ( ) , options , token ) ) ; // efficiency not important for this
84
+
85
+ bool IBufferDistributedCache . TryGet ( string key , IBufferWriter < byte > destination )
86
+ => Write ( destination , Get ( key ) ) ;
87
+
88
+ async ValueTask < bool > IBufferDistributedCache . TryGetAsync ( string key , IBufferWriter < byte > destination , CancellationToken token )
89
+ => Write ( destination , await GetAsync ( key , token ) ) ;
90
+
91
+ static bool Write ( IBufferWriter < byte > destination , byte [ ] ? buffer )
92
+ {
93
+ if ( buffer is null )
94
+ {
95
+ return false ;
96
+ }
97
+ destination . Write ( buffer ) ;
98
+ return true ;
99
+ }
100
+ }
101
+
102
+ [ Fact ]
103
+ public async Task BufferDoesNotNeedRelease_LegacyL2 ( ) // byte[] API; not pooled
104
+ {
105
+ using var provider = GetDefaultCache ( out var cache ,
106
+ services => services . AddSingleton < IDistributedCache , TestCache > ( ) ) ;
107
+
108
+ cache . DebugRemoveFeatures ( CacheFeatures . BackendBuffers ) ;
109
+ // prep the backend with our data
110
+ var key = Me ( ) ;
111
+ Assert . NotNull ( cache . BackendCache ) ;
112
+ var serializer = cache . GetSerializer < Customer > ( ) ;
113
+ using ( var writer = RecyclableArrayBufferWriter < byte > . Create ( int . MaxValue ) )
114
+ {
115
+ serializer . Serialize ( await GetAsync ( ) , writer ) ;
116
+ cache . BackendCache . Set ( key , writer . ToArray ( ) ) ;
117
+ }
118
+ #if DEBUG
119
+ cache . DebugGetOutstandingBuffers ( flush : true ) ;
120
+ Assert . Equal ( 0 , cache . DebugGetOutstandingBuffers ( ) ) ;
121
+ #endif
122
+ var first = await cache . GetOrCreateAsync ( key , _ => GetAsync ( ) , NoUnderlying ) ; // we expect this to come from L2, hence NoUnderlying
123
+ Assert . NotNull ( first ) ;
124
+ #if DEBUG
125
+ Assert . Equal ( 0 , cache . DebugGetOutstandingBuffers ( ) ) ;
126
+ #endif
127
+ Assert . True ( cache . DebugTryGetCacheItem ( key , out var cacheItem ) ) ;
128
+
129
+ // assert that we can reserve the buffer *now* (mostly to see that it behaves differently later)
130
+ Assert . False ( cacheItem . NeedsEvictionCallback , "should NOT be pooled memory" ) ;
131
+ Assert . True ( cacheItem . TryReserveBuffer ( out _ ) ) ;
132
+ cacheItem . Release ( ) ; // for the above reserve
133
+
134
+ var second = await cache . GetOrCreateAsync ( key , _ => GetAsync ( ) , NoUnderlying ) ;
135
+ Assert . NotNull ( second ) ;
136
+ Assert . NotSame ( first , second ) ;
137
+
138
+ await cache . RemoveKeyAsync ( key ) ;
139
+ var third = await cache . GetOrCreateAsync ( key , _ => GetAsync ( ) , NoUnderlying ) ;
140
+ Assert . Null ( third ) ;
141
+ Assert . Null ( await cache . BackendCache . GetAsync ( key ) ) ; // should be gone from L2 too
142
+
143
+ // give it a moment for the eviction callback to kick in
144
+ for ( int i = 0 ; i < 10 && cacheItem . NeedsEvictionCallback ; i ++ )
145
+ {
146
+ await Task . Delay ( 250 ) ;
147
+ }
148
+ #if DEBUG
149
+ Assert . Equal ( 0 , cache . DebugGetOutstandingBuffers ( ) ) ;
150
+ #endif
151
+ // assert that we can *no longer* reserve this buffer, because we've already recycled it
152
+ Assert . True ( cacheItem . TryReserveBuffer ( out _ ) ) ; // always readable
153
+ cacheItem . Release ( ) ;
154
+
155
+ Assert . False ( cacheItem . NeedsEvictionCallback , "should still not need recycling" ) ;
156
+ static ValueTask < Customer > GetAsync ( ) => new ( new Customer { Id = 42 , Name = "Fred" } ) ;
157
+ }
158
+
159
+ [ Fact ]
160
+ public async Task BufferGetsReleased_BufferL2 ( ) // IBufferWriter<byte> API; pooled
161
+ {
162
+ using var provider = GetDefaultCache ( out var cache ,
163
+ services => services . AddSingleton < IDistributedCache , TestCache > ( ) ) ;
164
+
165
+ // prep the backend with our data
166
+ var key = Me ( ) ;
167
+ Assert . NotNull ( cache . BackendCache ) ;
168
+ var serializer = cache . GetSerializer < Customer > ( ) ;
169
+ using ( var writer = RecyclableArrayBufferWriter < byte > . Create ( int . MaxValue ) )
170
+ {
171
+ serializer . Serialize ( await GetAsync ( ) , writer ) ;
172
+ cache . BackendCache . Set ( key , writer . ToArray ( ) ) ;
173
+ }
174
+ #if DEBUG
175
+ cache . DebugGetOutstandingBuffers ( flush : true ) ;
176
+ Assert . Equal ( 0 , cache . DebugGetOutstandingBuffers ( ) ) ;
177
+ #endif
178
+ var first = await cache . GetOrCreateAsync ( key , _ => GetAsync ( ) , NoUnderlying ) ; // we expect this to come from L2, hence NoUnderlying
179
+ Assert . NotNull ( first ) ;
180
+ #if DEBUG
181
+ Assert . Equal ( 1 , cache . DebugGetOutstandingBuffers ( ) ) ;
182
+ #endif
183
+ Assert . True ( cache . DebugTryGetCacheItem ( key , out var cacheItem ) ) ;
184
+
185
+ // assert that we can reserve the buffer *now* (mostly to see that it behaves differently later)
186
+ Assert . True ( cacheItem . NeedsEvictionCallback , "should be pooled memory" ) ;
187
+ Assert . True ( cacheItem . TryReserveBuffer ( out _ ) ) ;
188
+ cacheItem . Release ( ) ; // for the above reserve
189
+
190
+ var second = await cache . GetOrCreateAsync ( key , _ => GetAsync ( ) , NoUnderlying ) ;
191
+ Assert . NotNull ( second ) ;
192
+ Assert . NotSame ( first , second ) ;
193
+
194
+ await cache . RemoveKeyAsync ( key ) ;
195
+ var third = await cache . GetOrCreateAsync ( key , _ => GetAsync ( ) , NoUnderlying ) ;
196
+ Assert . Null ( third ) ;
197
+ Assert . Null ( await cache . BackendCache . GetAsync ( key ) ) ; // should be gone from L2 too
198
+
199
+ // give it a moment for the eviction callback to kick in
200
+ for ( int i = 0 ; i < 10 && cacheItem . NeedsEvictionCallback ; i ++ )
201
+ {
202
+ await Task . Delay ( 250 ) ;
203
+ }
204
+ #if DEBUG
205
+ Assert . Equal ( 0 , cache . DebugGetOutstandingBuffers ( ) ) ;
206
+ #endif
207
+ // assert that we can *no longer* reserve this buffer, because we've already recycled it
208
+ Assert . False ( cacheItem . TryReserveBuffer ( out _ ) ) ; // released now
209
+
210
+ Assert . False ( cacheItem . NeedsEvictionCallback , "should be recycled by now" ) ;
60
211
static ValueTask < Customer > GetAsync ( ) => new ( new Customer { Id = 42 , Name = "Fred" } ) ;
61
212
}
62
213
214
+
63
215
public class Customer
64
216
{
65
217
public int Id { get ; set ; }
0 commit comments