-
-
Notifications
You must be signed in to change notification settings - Fork 191
Expand file tree
/
Copy pathSourceCacheFixture.cs
More file actions
358 lines (284 loc) · 12.3 KB
/
SourceCacheFixture.cs
File metadata and controls
358 lines (284 loc) · 12.3 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reactive.Linq;
using System.Threading;
using System.Threading.Tasks;
using DynamicData.Tests.Domain;
using FluentAssertions;
using Xunit;
namespace DynamicData.Tests.Cache;
public class SourceCacheFixture : IDisposable
{
private readonly ChangeSetAggregator<Person, string> _results;
private readonly ISourceCache<Person, string> _source;
public SourceCacheFixture()
{
_source = new SourceCache<Person, string>(p => p.Key);
_results = _source.Connect().AsAggregator();
}
[Fact]
public void CanHandleABatchOfUpdates()
{
_source.Edit(
updater =>
{
var torequery = new Person("Adult1", 44);
updater.AddOrUpdate(new Person("Adult1", 40));
updater.AddOrUpdate(new Person("Adult1", 41));
updater.AddOrUpdate(new Person("Adult1", 42));
updater.AddOrUpdate(new Person("Adult1", 43));
updater.Refresh(torequery);
updater.Remove(torequery);
updater.Refresh(torequery);
});
_results.Summary.Overall.Count.Should().Be(6, "Should be 6 up`dates");
_results.Messages.Count.Should().Be(1, "Should be 1 message");
_results.Messages[0].Adds.Should().Be(1, "Should be 1 update");
_results.Messages[0].Updates.Should().Be(3, "Should be 3 updates");
_results.Messages[0].Removes.Should().Be(1, "Should be 1 remove");
_results.Messages[0].Refreshes.Should().Be(1, "Should be 1 evaluate");
_results.Data.Count.Should().Be(0, "Should be 1 item in` the cache");
}
[Fact]
public void CountChanged()
{
var count = 0;
var invoked = 0;
using (_source.CountChanged.Subscribe(
c =>
{
count = c;
invoked++;
}))
{
invoked.Should().Be(1);
count.Should().Be(0);
_source.AddOrUpdate(new RandomPersonGenerator().Take(100));
invoked.Should().Be(2);
count.Should().Be(100);
_source.Clear();
invoked.Should().Be(3);
count.Should().Be(0);
}
}
[Fact]
public void CountChangedShouldAlwaysInvokeUponeSubscription()
{
int? result = null;
var subscription = _source.CountChanged.Subscribe(count => result = count);
result.HasValue.Should().BeTrue();
if (result is null)
{
throw new InvalidOperationException(nameof(result));
}
result.Value.Should().Be(0, "Count should be zero");
subscription.Dispose();
}
[Fact]
public void CountChangedShouldReflectContentsOfCacheInvokeUponSubscription()
{
var generator = new RandomPersonGenerator();
int? result = null;
var subscription = _source.CountChanged.Subscribe(count => result = count);
_source.AddOrUpdate(generator.Take(100));
if (result is null)
{
throw new InvalidOperationException(nameof(result));
}
result.HasValue.Should().BeTrue();
result.Value.Should().Be(100, "Count should be 100");
subscription.Dispose();
}
public void Dispose()
{
_source.Dispose();
_results.Dispose();
}
[Fact]
public void SubscribesDisposesCorrectly()
{
var called = false;
var errored = false;
var completed = false;
var subscription = _source.Connect().Finally(() => completed = true).Subscribe(updates => { called = true; }, ex => errored = true, () => completed = true);
_source.AddOrUpdate(new Person("Adult1", 40));
subscription.Dispose();
_source.Dispose();
errored.Should().BeFalse();
called.Should().BeTrue();
completed.Should().BeTrue();
}
[Fact]
public void EmptyChanges()
{
IChangeSet<Person, string>? change = null;
using var subscription = _source.Connect(suppressEmptyChangeSets: false)
.Subscribe(c=> change = c);
change.Should().NotBeNull();
change!.Count.Should().Be(0);
}
[Fact]
public void EmptyChangesWithFilter()
{
IChangeSet<Person, string>? change = null;
using var subscription = _source.Connect(p=>p.Age == 20, suppressEmptyChangeSets: false)
.Subscribe(c => change = c);
change.Should().NotBeNull();
change!.Count.Should().Be(0);
}
[Fact]
public void StaticFilterRemove()
{
var cache = new SourceCache<SomeObject, int>(x => x.Id);
var above5 = cache.Connect(x => x.Value > 5).AsObservableCache();
var below5 = cache.Connect(x => x.Value <= 5).AsObservableCache();
cache.AddOrUpdate(Enumerable.Range(1,10).Select(i=> new SomeObject(i,i)));
above5.Items.Should().BeEquivalentTo(Enumerable.Range(6, 5).Select(i => new SomeObject(i, i)));
below5.Items.Should().BeEquivalentTo(Enumerable.Range(1, 5).Select(i => new SomeObject(i, i)));
//should move from above 5 to below 5
cache.AddOrUpdate(new SomeObject(6,-1));
above5.Count.Should().Be(4);
below5.Count.Should().Be(6);
above5.Items.Should().BeEquivalentTo(Enumerable.Range(7, 4).Select(i => new SomeObject(i, i)));
below5.Items.Should().BeEquivalentTo(Enumerable.Range(1, 6).Select(i => new SomeObject(i, i == 6 ? -1 : i)));
}
public record class SomeObject(int Id, int Value);
[Fact]
public async Task MultiCacheFanInDoesNotDeadlock()
{
const int itemCount = 100;
using var cacheA = new SourceCache<TestItem, string>(static x => x.Key);
using var cacheB = new SourceCache<TestItem, string>(static x => x.Key);
using var destination = new SourceCache<TestItem, string>(static x => x.Key);
using var subA = cacheA.Connect().PopulateInto(destination);
using var subB = cacheB.Connect().PopulateInto(destination);
using var results = destination.Connect().AsAggregator();
var taskA = Task.Run(() =>
{
for (var i = 0; i < itemCount; i++)
{
cacheA.AddOrUpdate(new TestItem($"a-{i}", $"ValueA-{i}"));
}
});
var taskB = Task.Run(() =>
{
for (var i = 0; i < itemCount; i++)
{
cacheB.AddOrUpdate(new TestItem($"b-{i}", $"ValueB-{i}"));
}
});
var completed = Task.WhenAll(taskA, taskB);
var finished = await Task.WhenAny(completed, Task.Delay(TimeSpan.FromSeconds(10)));
finished.Should().BeSameAs(completed, "concurrent edits with cross-cache subscribers should not deadlock");
results.Error.Should().BeNull();
results.Data.Count.Should().Be(itemCount * 2, "all items from both caches should arrive in the destination");
results.Data.Items.Should().BeEquivalentTo([.. cacheA.Items, .. cacheB.Items], "all items should be in the destination");
}
[Fact]
public async Task DirectCrossWriteDoesNotDeadlock()
{
const int iterations = 100;
for (var iter = 0; iter < iterations; iter++)
{
using var cacheA = new SourceCache<TestItem, string>(static x => x.Key);
using var cacheB = new SourceCache<TestItem, string>(static x => x.Key);
// Bidirectional: A items flow into B, B items flow into A.
// Filter by prefix prevents infinite feedback.
using var aToB = cacheA.Connect()
.Filter(static x => x.Key.StartsWith('a'))
.Transform(static (item, _) => new TestItem("from-a-" + item.Key, item.Value))
.PopulateInto(cacheB);
using var bToA = cacheB.Connect()
.Filter(static x => x.Key.StartsWith('b'))
.Transform(static (item, _) => new TestItem("from-b-" + item.Key, item.Value))
.PopulateInto(cacheA);
using var barrier = new Barrier(2);
var taskA = Task.Run(() =>
{
barrier.SignalAndWait();
for (var i = 0; i < 1000; i++)
{
cacheA.AddOrUpdate(new TestItem("a" + i, "V" + i));
}
});
var taskB = Task.Run(() =>
{
barrier.SignalAndWait();
for (var i = 0; i < 1000; i++)
{
cacheB.AddOrUpdate(new TestItem("b" + i, "V" + i));
}
});
var completed = Task.WhenAll(taskA, taskB);
var finished = await Task.WhenAny(completed, Task.Delay(TimeSpan.FromSeconds(30)));
finished.Should().BeSameAs(completed, $"iteration {iter}: bidirectional cross-cache writes should not deadlock");
}
}
[Fact]
public void ConnectDuringDeliveryDoesNotDuplicate()
{
// Exploits the dequeue-to-OnNext window. Thread A writes two items in
// separate batches. The first delivery is held by a slow subscriber.
// While item1 delivery is blocked, item2 is committed to ReaderWriter
// and sitting in the queue. Thread B calls Connect(), takes a snapshot
// (sees both items), subscribes to _changes, then item2 is delivered
// via OnNext — producing a duplicate if not guarded by a generation counter.
using var cache = new SourceCache<TestItem, string>(static x => x.Key);
using var delivering = new ManualResetEventSlim(false);
using var item2Written = new ManualResetEventSlim(false);
using var connectDone = new ManualResetEventSlim(false);
var firstDelivery = true;
// First subscriber: blocks on the first delivery to create the window
using var slowSub = cache.Connect().Subscribe(_ =>
{
if (firstDelivery)
{
firstDelivery = false;
delivering.Set();
// Wait until item2 has been written and the Connect has subscribed
connectDone.Wait(TimeSpan.FromSeconds(5));
}
});
// Write item1 on a background thread — delivery starts, slow subscriber blocks
var writeTask = Task.Run(() =>
{
cache.AddOrUpdate(new TestItem("k1", "v1"));
});
// Wait for delivery of item1 to be in progress (slow sub is blocking)
delivering.Wait(TimeSpan.FromSeconds(5)).Should().BeTrue("delivery should have started");
// Now write item2 on another thread. It will acquire the lock, commit to
// ReaderWriter, enqueue a notification, and return. The notification sits
// in the queue because the deliverer (Thread A) is blocked by the slow sub.
var writeTask2 = Task.Run(() =>
{
cache.AddOrUpdate(new TestItem("k2", "v2"));
item2Written.Set();
});
item2Written.Wait(TimeSpan.FromSeconds(5)).Should().BeTrue("item2 should have been written");
// Now Connect on the main thread. The snapshot from ReaderWriter includes
// BOTH k1 and k2. The subscription to _changes is added. When the slow
// subscriber unblocks, item2's notification will be delivered via OnNext
// and the new subscriber will see k2 again — a duplicate Add.
var addCounts = new Dictionary<string, int>();
using var newSub = cache.Connect().Subscribe(changes =>
{
foreach (var c in changes)
{
if (c.Reason == ChangeReason.Add)
{
var key = c.Current.Key;
addCounts[key] = addCounts.GetValueOrDefault(key) + 1;
}
}
});
// Unblock the slow subscriber — delivery resumes, item2 delivered
connectDone.Set();
writeTask.Wait(TimeSpan.FromSeconds(5)).Should().BeTrue("writeTask should complete");
writeTask2.Wait(TimeSpan.FromSeconds(5)).Should().BeTrue("writeTask2 should complete");
// Each key should appear exactly once in the new subscriber's view
addCounts.GetValueOrDefault("k1").Should().Be(1, "k1 should appear once (snapshot only)");
addCounts.GetValueOrDefault("k2").Should().Be(1, "k2 should appear once, not duplicated from snapshot + queued delivery");
}
private sealed record TestItem(string Key, string Value);
}