Partitions storage and serialization big update

Async mode
pull/4/head
Ogoun 1 year ago
parent 96dce84019
commit 91784e256f

@ -8,7 +8,7 @@
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>
<PackageReference Include="Microsoft.FASTER.Core" Version="2.1.0" /> <PackageReference Include="Microsoft.FASTER.Core" Version="2.5.18" />
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>

@ -31,50 +31,67 @@ namespace PartitionFileStorageTest
return num_base + (uint)r.Next(999999); return num_base + (uint)r.Next(999999);
} }
private static void FastTest(StoreOptions<ulong, ulong, byte[], Metadata> options) private static async Task FastTest(StoreOptions<ulong, ulong, byte[], Metadata> options)
{ {
var r = new Random(Environment.TickCount); var r = new Random(Environment.TickCount);
var store = new Store<ulong, ulong, byte[], Metadata>(options); var store = new Store<ulong, ulong, byte[], Metadata>(options, new StoreSerializers<ulong, ulong, byte[]>(
var storePart = store.CreateBuilder(new Metadata { Date = new DateTime(2022, 11, 08) }); async (w, n) => await w.WriteULongAsync(n),
async (w, n) => await w.WriteULongAsync(n),
async (r) => { try { return new DeserializeResult<ulong>(true, await r.ReadULongAsync()); } catch { return new DeserializeResult<ulong>(false, 0); } },
async (r) => { try { return new DeserializeResult<ulong>(true, await r.ReadULongAsync()); } catch { return new DeserializeResult<ulong>(false, 0); } },
async (r) => { try { return new DeserializeResult<byte[]>(true, await r.ReadBytesAsync()); } catch { return new DeserializeResult<byte[]>(false, new byte[0]); } }));
Console.WriteLine("Small test start");
var c1 = (ulong)(86438 * 128); var c1 = (ulong)(86438 * 128);
var c2 = (ulong)(83438 * 128); var c2 = (ulong)(83438 * 128);
var c3 = (ulong)(831238 * 128); var c3 = (ulong)(831238 * 128);
storePart.Store(c1, Generate(r)); using (var storePart = store.CreateBuilder(new Metadata { Date = new DateTime(2022, 11, 08) }))
storePart.Store(c1, Generate(r)); {
storePart.Store(c1, Generate(r)); Console.WriteLine("Small test start");
storePart.Store(c2, Generate(r)); await storePart.Store(c1, Generate(r));
storePart.Store(c2, Generate(r)); await storePart.Store(c1, Generate(r));
storePart.Store(c2, Generate(r)); await storePart.Store(c1, Generate(r));
storePart.Store(c2, Generate(r)); await storePart.Store(c2, Generate(r));
storePart.Store(c2, Generate(r)); await storePart.Store(c2, Generate(r));
storePart.Store(c3, Generate(r)); await storePart.Store(c2, Generate(r));
storePart.Store(c3, Generate(r)); await storePart.Store(c2, Generate(r));
storePart.Store(c3, Generate(r)); await storePart.Store(c2, Generate(r));
await storePart.Store(c3, Generate(r));
await storePart.Store(c3, Generate(r));
await storePart.Store(c3, Generate(r));
storePart.CompleteAdding(); storePart.CompleteAdding();
storePart.Compress(); await storePart.Compress();
var readPart = store.CreateAccessor(new Metadata { Date = new DateTime(2022, 11, 08) }); }
using (var readPart = store.CreateAccessor(new Metadata { Date = new DateTime(2022, 11, 08) }))
{
Console.WriteLine("Data:"); Console.WriteLine("Data:");
foreach (var e in readPart.Iterate()) await foreach (var kv in readPart.Iterate())
{ {
Console.WriteLine($"{e.Key}: {e.Value.Length}"); Console.WriteLine($"{kv.Key}: {kv.Value.Length}");
} }
readPart.RemoveKey(c1);
await readPart.RemoveKey(c1);
Console.WriteLine("Data after remove:"); Console.WriteLine("Data after remove:");
foreach (var e in readPart.Iterate())
await foreach (var kv in readPart.Iterate())
{ {
Console.WriteLine($"{e.Key}: {e.Value.Length}"); Console.WriteLine($"{kv.Key}: {kv.Value.Length}");
}
} }
} }
private static void FullStoreTest(StoreOptions<ulong, ulong, byte[], Metadata> options, private static async Task FullStoreTest(StoreOptions<ulong, ulong, byte[], Metadata> options,
List<(ulong, ulong)> pairs) List<(ulong, ulong)> pairs)
{ {
var meta = new Metadata { Date = new DateTime(2022, 11, 08) }; var meta = new Metadata { Date = new DateTime(2022, 11, 08) };
var r = new Random(Environment.TickCount); var r = new Random(Environment.TickCount);
var store = new Store<ulong, ulong, byte[], Metadata>(options); var store = new Store<ulong, ulong, byte[], Metadata>(options, new StoreSerializers<ulong, ulong, byte[]>(
async (w, n) => await w.WriteULongAsync(n),
async (w, n) => await w.WriteULongAsync(n),
async (r) => { try { return new DeserializeResult<ulong>(true, await r.ReadULongAsync()); } catch { return new DeserializeResult<ulong>(false, 0); } },
async (r) => { try { return new DeserializeResult<ulong>(true, await r.ReadULongAsync()); } catch { return new DeserializeResult<ulong>(false, 0); } },
async (r) => { try { return new DeserializeResult<byte[]>(true, await r.ReadBytesAsync()); } catch { return new DeserializeResult<byte[]>(false, new byte[0]); } }));
var storePart = store.CreateBuilder(meta); var storePart = store.CreateBuilder(meta);
var sw = new Stopwatch(); var sw = new Stopwatch();
sw.Start(); sw.Start();
@ -90,7 +107,7 @@ namespace PartitionFileStorageTest
var val = pairs[i].Item2; var val = pairs[i].Item2;
if (testData.ContainsKey(key) == false) testData[key] = new HashSet<ulong>(); if (testData.ContainsKey(key) == false) testData[key] = new HashSet<ulong>();
testData[key].Add(val); testData[key].Add(val);
storePart.Store(key, val); await storePart.Store(key, val);
if (key % 717 == 0) if (key % 717 == 0)
{ {
testKeys1.Add(key); testKeys1.Add(key);
@ -105,7 +122,7 @@ namespace PartitionFileStorageTest
Log.Info($"Fill journal: {sw.ElapsedMilliseconds}ms. Records writed: {storePart.TotalRecords}"); Log.Info($"Fill journal: {sw.ElapsedMilliseconds}ms. Records writed: {storePart.TotalRecords}");
sw.Restart(); sw.Restart();
storePart.CompleteAdding(); storePart.CompleteAdding();
storePart.Compress(); await storePart.Compress();
sw.Stop(); sw.Stop();
Log.Info($"Compress: {sw.ElapsedMilliseconds}ms"); Log.Info($"Compress: {sw.ElapsedMilliseconds}ms");
sw.Restart(); sw.Restart();
@ -135,7 +152,7 @@ namespace PartitionFileStorageTest
ulong totalKeys = 0; ulong totalKeys = 0;
foreach (var key in testKeys1) foreach (var key in testKeys1)
{ {
var result = readPart.Find(key); var result = readPart.Find(key).Result;
totalData += (ulong)(result.Value?.Length ?? 0); totalData += (ulong)(result.Value?.Length ?? 0);
totalKeys++; totalKeys++;
} }
@ -145,7 +162,7 @@ namespace PartitionFileStorageTest
Log.Info("Test #1 remove by keys"); Log.Info("Test #1 remove by keys");
for (int i = 0; i < testKeys1.Count; i++) for (int i = 0; i < testKeys1.Count; i++)
{ {
readPart.RemoveKey(testKeys1[i], false); await readPart.RemoveKey(testKeys1[i], false);
} }
sw.Restart(); sw.Restart();
readPart.RebuildIndex(); readPart.RebuildIndex();
@ -155,7 +172,7 @@ namespace PartitionFileStorageTest
foreach (var key in testKeys1) foreach (var key in testKeys1)
{ {
var result = readPart.Find(key); var result = readPart.Find(key);
totalData += (ulong)(result.Value?.Length ?? 0); totalData += (ulong)(result.Result.Value?.Length ?? 0);
totalKeys++; totalKeys++;
} }
Log.Info($"\t\tFound: {totalKeys} keys. {totalData} bytes"); Log.Info($"\t\tFound: {totalKeys} keys. {totalData} bytes");
@ -165,19 +182,19 @@ namespace PartitionFileStorageTest
foreach (var key in testKeys2) foreach (var key in testKeys2)
{ {
var result = readPart.Find(key); var result = readPart.Find(key);
totalData += (ulong)(result.Value?.Length ?? 0); totalData += (ulong)(result.Result.Value?.Length ?? 0);
totalKeys++; totalKeys++;
} }
Log.Info($"\t\tFound: {totalKeys} keys. {totalData} bytes"); Log.Info($"\t\tFound: {totalKeys} keys. {totalData} bytes");
totalData = 0; totalData = 0;
totalKeys = 0; totalKeys = 0;
Log.Info("Test #2 remove keys batch"); Log.Info("Test #2 remove keys batch");
readPart.RemoveKeys(testKeys2); await readPart.RemoveKeys(testKeys2);
Log.Info("Test #2 reading after remove"); Log.Info("Test #2 reading after remove");
foreach (var key in testKeys2) foreach (var key in testKeys2)
{ {
var result = readPart.Find(key); var result = readPart.Find(key);
totalData += (ulong)(result.Value?.Length ?? 0); totalData += (ulong)(result.Result.Value?.Length ?? 0);
totalKeys++; totalKeys++;
} }
Log.Info($"\t\tFound: {totalKeys} keys. {totalData} bytes"); Log.Info($"\t\tFound: {totalKeys} keys. {totalData} bytes");
@ -185,11 +202,12 @@ namespace PartitionFileStorageTest
totalKeys = 0; totalKeys = 0;
Log.Info("Iterator test"); Log.Info("Iterator test");
foreach (var e in readPart.Iterate()) await foreach (var kv in readPart.Iterate())
{ {
totalData += (ulong)(e.Value?.Length ?? 0); totalData += (ulong)(kv.Value?.Length ?? 0);
totalKeys++; totalKeys++;
} }
Log.Info($"\t\tFound: {totalKeys} keys. {totalData} bytes"); Log.Info($"\t\tFound: {totalKeys} keys. {totalData} bytes");
totalData = 0; totalData = 0;
totalKeys = 0; totalKeys = 0;
@ -199,7 +217,7 @@ namespace PartitionFileStorageTest
{ {
if (test.Value.Count > 0 && testKeys1.Contains(test.Key) == false && testKeys2.Contains(test.Key) == false) if (test.Value.Count > 0 && testKeys1.Contains(test.Key) == false && testKeys2.Contains(test.Key) == false)
{ {
var result = Compressor.DecodeBytesContent(readPart.Find(test.Key).Value).ToHashSet(); var result = Compressor.DecodeBytesContent(readPart.Find(test.Key).Result.Value).ToHashSet();
if (test.Value.Count != result.Count) if (test.Value.Count != result.Count)
{ {
Log.Info($"Key '{test.Key}' not found!"); Log.Info($"Key '{test.Key}' not found!");
@ -227,12 +245,17 @@ namespace PartitionFileStorageTest
} }
} }
private static void FullStoreMultithreadTest(StoreOptions<ulong, ulong, byte[], Metadata> options) private static async Task FullStoreMultithreadTest(StoreOptions<ulong, ulong, byte[], Metadata> options)
{ {
var meta = new Metadata { Date = new DateTime(2022, 11, 08) }; var meta = new Metadata { Date = new DateTime(2022, 11, 08) };
var r = new Random(Environment.TickCount); var r = new Random(Environment.TickCount);
var store = new Store<ulong, ulong, byte[], Metadata>(options); var store = new Store<ulong, ulong, byte[], Metadata>(options, new StoreSerializers<ulong, ulong, byte[]>(
var storePart = store.CreateBuilder(meta); async (w, n) => await w.WriteULongAsync(n),
async (w, n) => await w.WriteULongAsync(n),
async (r) => { try { return new DeserializeResult<ulong>(true, await r.ReadULongAsync()); } catch { return new DeserializeResult<ulong>(false, 0); } },
async (r) => { try { return new DeserializeResult<ulong>(true, await r.ReadULongAsync()); } catch { return new DeserializeResult<ulong>(false, 0); } },
async (r) => { try { return new DeserializeResult<byte[]>(true, await r.ReadBytesAsync()); } catch { return new DeserializeResult<byte[]>(false, new byte[0]); } }));
var sw = new Stopwatch(); var sw = new Stopwatch();
sw.Start(); sw.Start();
var insertCount = (long)(0.7 * PAIRS_COUNT); var insertCount = (long)(0.7 * PAIRS_COUNT);
@ -241,6 +264,8 @@ namespace PartitionFileStorageTest
var parallelOptions = new ParallelOptions { MaxDegreeOfParallelism = 24 }; var parallelOptions = new ParallelOptions { MaxDegreeOfParallelism = 24 };
var Keys = new ConcurrentHashSet<ulong>(); var Keys = new ConcurrentHashSet<ulong>();
using (var storePart = store.CreateBuilder(meta))
{
Parallel.ForEach(MassGenerator((long)(0.7 * PAIRS_COUNT)), parallelOptions, pair => Parallel.ForEach(MassGenerator((long)(0.7 * PAIRS_COUNT)), parallelOptions, pair =>
{ {
var key = pair.Item1; var key = pair.Item1;
@ -256,28 +281,27 @@ namespace PartitionFileStorageTest
} }
Keys.Add(key); Keys.Add(key);
}); });
if (storePart.TotalRecords != insertCount) if (storePart.TotalRecords != insertCount)
{ {
Log.Error($"Count of stored record no equals expected. Recorded: {storePart.TotalRecords}. Expected: {insertCount}. Unique keys: {Keys.Count}"); Log.Error($"Count of stored record no equals expected. Recorded: {storePart.TotalRecords}. Expected: {insertCount}. Unique keys: {Keys.Count}");
} }
sw.Stop(); sw.Stop();
Log.Info($"Fill journal: {sw.ElapsedMilliseconds}ms"); Log.Info($"Fill journal: {sw.ElapsedMilliseconds}ms");
sw.Restart(); sw.Restart();
storePart.CompleteAdding(); storePart.CompleteAdding();
storePart.Compress(); await storePart.Compress();
sw.Stop(); sw.Stop();
Log.Info($"Compress: {sw.ElapsedMilliseconds}ms"); Log.Info($"Compress: {sw.ElapsedMilliseconds}ms");
sw.Restart(); sw.Restart();
storePart.RebuildIndex(); storePart.RebuildIndex();
}
sw.Stop(); sw.Stop();
Log.Info($"Rebuild indexes: {sw.ElapsedMilliseconds}ms"); Log.Info($"Rebuild indexes: {sw.ElapsedMilliseconds}ms");
Log.Info("Start merge test"); Log.Info("Start merge test");
sw.Restart(); sw.Restart();
var merger = store.CreateMergeAccessor(meta, data => Compressor.DecodeBytesContent(data)); using (var merger = store.CreateMergeAccessor(meta, data => Compressor.DecodeBytesContent(data)))
{
Parallel.ForEach(MassGenerator((long)(0.3 * PAIRS_COUNT)), parallelOptions, pair => Parallel.ForEach(MassGenerator((long)(0.3 * PAIRS_COUNT)), parallelOptions, pair =>
{ {
var key = pair.Item1; var key = pair.Item1;
@ -293,6 +317,7 @@ namespace PartitionFileStorageTest
Log.Info($"Merge journal filled: {sw.ElapsedMilliseconds}ms. Total data count: {PAIRS_COUNT}. Unique keys: {Keys.Count}"); Log.Info($"Merge journal filled: {sw.ElapsedMilliseconds}ms. Total data count: {PAIRS_COUNT}. Unique keys: {Keys.Count}");
merger.Compress(); // auto reindex merger.Compress(); // auto reindex
}
sw.Stop(); sw.Stop();
Log.Info($"Compress after merge: {sw.ElapsedMilliseconds}ms"); Log.Info($"Compress after merge: {sw.ElapsedMilliseconds}ms");
@ -300,10 +325,10 @@ namespace PartitionFileStorageTest
ulong totalKeys = 0; ulong totalKeys = 0;
var s = new HashSet<ulong>(); var s = new HashSet<ulong>();
store.Bypass(meta, (k, v) => await foreach (var kv in store.Bypass(meta))
{ {
s.Add(k); s.Add(kv.Key);
}); }
Log.Info($"Keys count: {s.Count}"); Log.Info($"Keys count: {s.Count}");
using (var readPart = store.CreateAccessor(meta)) using (var readPart = store.CreateAccessor(meta))
@ -343,14 +368,14 @@ namespace PartitionFileStorageTest
foreach (var key in testKeys2) foreach (var key in testKeys2)
{ {
var result = readPart.Find(key); var result = readPart.Find(key);
totalData += (ulong)(result.Value?.Length ?? 0); totalData += (ulong)(result.Result.Value?.Length ?? 0);
totalKeys++; totalKeys++;
} }
Log.Info($"\t\tFound: {totalKeys}/{Keys.Count} keys. {totalData} bytes"); Log.Info($"\t\tFound: {totalKeys}/{Keys.Count} keys. {totalData} bytes");
totalData = 0; totalData = 0;
totalKeys = 0; totalKeys = 0;
Log.Info("Test #2 remove keys batch"); Log.Info("Test #2 remove keys batch");
readPart.RemoveKeys(testKeys2); await readPart.RemoveKeys(testKeys2);
foreach (var k in testKeys2) foreach (var k in testKeys2)
{ {
Keys.TryRemove(k); Keys.TryRemove(k);
@ -359,7 +384,7 @@ namespace PartitionFileStorageTest
foreach (var key in testKeys2) foreach (var key in testKeys2)
{ {
var result = readPart.Find(key); var result = readPart.Find(key);
totalData += (ulong)(result.Value?.Length ?? 0); totalData += (ulong)(result.Result.Value?.Length ?? 0);
totalKeys++; totalKeys++;
} }
Log.Info($"\t\tFound: {totalKeys} keys. {totalData} bytes"); Log.Info($"\t\tFound: {totalKeys} keys. {totalData} bytes");
@ -367,9 +392,9 @@ namespace PartitionFileStorageTest
totalKeys = 0; totalKeys = 0;
Log.Info("Iterator test"); Log.Info("Iterator test");
foreach (var e in readPart.Iterate()) await foreach (var kv in readPart.Iterate())
{ {
totalData += (ulong)(e.Value?.Length ?? 0); totalData += (ulong)(kv.Value?.Length ?? 0);
totalKeys++; totalKeys++;
} }
} }
@ -380,7 +405,12 @@ namespace PartitionFileStorageTest
private static void FaultIndexTest(string filePath) private static void FaultIndexTest(string filePath)
{ {
var serializer = new StoreStandartSerializer<ulong, ulong, byte[]>(); var serializer = new StoreSerializers<ulong, ulong, byte[]>(
async (w, n) => await w.WriteULongAsync(n),
async (w, n) => await w.WriteULongAsync(n),
async (r) => { try { return new DeserializeResult<ulong>(true, await r.ReadULongAsync()); } catch { return new DeserializeResult<ulong>(false, 0); } },
async (r) => { try { return new DeserializeResult<ulong>(true, await r.ReadULongAsync()); } catch { return new DeserializeResult<ulong>(false, 0); } },
async (r) => { try { return new DeserializeResult<byte[]>(true, await r.ReadBytesAsync()); } catch { return new DeserializeResult<byte[]>(false, new byte[0]); } });
// 1 build index // 1 build index
var index = new Dictionary<ulong, long>(); var index = new Dictionary<ulong, long>();
using (var reader = new MemoryStreamReader(new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite, 4096 * 1024))) using (var reader = new MemoryStreamReader(new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite, 4096 * 1024)))
@ -390,11 +420,13 @@ namespace PartitionFileStorageTest
{ {
counter--; counter--;
var pos = reader.Position; var pos = reader.Position;
serializer.KeyDeserializer.Invoke(reader, out var k);
serializer.ValueDeserializer.Invoke(reader, out var _); var kv = serializer.KeyDeserializer.Invoke(reader).Result;
var vv = serializer.ValueDeserializer.Invoke(reader).Result;
if (counter == 0) if (counter == 0)
{ {
index[k] = pos; index[kv.Value] = pos;
counter = 16; counter = 16;
} }
} }
@ -407,12 +439,12 @@ namespace PartitionFileStorageTest
var accessor = fileReader.GetAccessor(pair.Value); var accessor = fileReader.GetAccessor(pair.Value);
using (var reader = new MemoryStreamReader(accessor)) using (var reader = new MemoryStreamReader(accessor))
{ {
serializer.KeyDeserializer.Invoke(reader, out var k); var kv = serializer.KeyDeserializer.Invoke(reader).Result;
if (k != pair.Key) if (kv.Value != pair.Key)
{ {
Log.Warning("Broken index"); Log.Warning("Broken index");
} }
serializer.ValueDeserializer.Invoke(reader, out var _); serializer.ValueDeserializer.Invoke(reader).Wait();
} }
} }
@ -436,7 +468,12 @@ namespace PartitionFileStorageTest
private static void FaultUncompressedReadTest(string filePath) private static void FaultUncompressedReadTest(string filePath)
{ {
var serializer = new StoreStandartSerializer<ulong, ulong, byte[]>(); var serializer = new StoreSerializers<ulong, ulong, byte[]>(
async (w, n) => await w.WriteULongAsync(n),
async (w, n) => await w.WriteULongAsync(n),
async (r) => { try { return new DeserializeResult<ulong>(true, await r.ReadULongAsync()); } catch { return new DeserializeResult<ulong>(false, 0); } },
async (r) => { try { return new DeserializeResult<ulong>(true, await r.ReadULongAsync()); } catch { return new DeserializeResult<ulong>(false, 0); } },
async (r) => { try { return new DeserializeResult<byte[]>(true, await r.ReadBytesAsync()); } catch { return new DeserializeResult<byte[]>(false, new byte[0]); } });
// 1 build index // 1 build index
var dict = new Dictionary<ulong, HashSet<ulong>>(); var dict = new Dictionary<ulong, HashSet<ulong>>();
using (var reader = new MemoryStreamReader(new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.None, 4096 * 1024))) using (var reader = new MemoryStreamReader(new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.None, 4096 * 1024)))
@ -445,17 +482,17 @@ namespace PartitionFileStorageTest
{ {
try try
{ {
serializer.KeyDeserializer.Invoke(reader, out var key); var key = serializer.KeyDeserializer.Invoke(reader).Result;
if (false == dict.ContainsKey(key)) if (false == dict.ContainsKey(key.Value))
{ {
dict[key] = new HashSet<ulong>(); dict[key.Value] = new HashSet<ulong>();
} }
if (reader.EOS) if (reader.EOS)
{ {
break; break;
} }
serializer.InputDeserializer.Invoke(reader, out var input); var input = serializer.InputDeserializer.Invoke(reader).Result;
dict[key].Add(input); dict[key.Value].Add(input.Value);
} }
catch (Exception ex) catch (Exception ex)
{ {
@ -490,12 +527,17 @@ namespace PartitionFileStorageTest
KeyComparer = (left, right) => string.Compare(left, right, true), KeyComparer = (left, right) => string.Compare(left, right, true),
ThreadSafeWriting = true ThreadSafeWriting = true
}; };
var store = new Store<string, ulong, byte[], StoreMetadata>(options); var store = new Store<string, ulong, byte[], StoreMetadata>(options, new StoreSerializers<string, ulong, byte[]>(
async (w, n) => await w.WriteStringAsync(n),
async (w, n) => await w.WriteULongAsync(n),
async (r) => { try { return new DeserializeResult<string>(true, await r.ReadStringAsync()); } catch { return new DeserializeResult<string>(false, string.Empty); } },
async (r) => { try { return new DeserializeResult<ulong>(true, await r.ReadULongAsync()); } catch { return new DeserializeResult<ulong>(false, 0); } },
async (r) => { try { return new DeserializeResult<byte[]>(true, await r.ReadBytesAsync()); } catch { return new DeserializeResult<byte[]>(false, new byte[0]); } }));
var builder = store.CreateBuilder(meta); var builder = store.CreateBuilder(meta);
builder.Compress(); builder.Compress();
} }
static void Main(string[] args) static async Task Main(string[] args)
{ {
//FaultCompressionTest(@"F:\Desktop\DATKA\DNS", new StoreMetadata { Date = new DateTime(2023, 01, 20) }); //FaultCompressionTest(@"F:\Desktop\DATKA\DNS", new StoreMetadata { Date = new DateTime(2023, 01, 20) });
@ -558,22 +600,23 @@ namespace PartitionFileStorageTest
} }
*/ */
Log.Info("Start test"); Log.Info("Start test");
// FastTest(options);
FSUtils.CleanAndTestFolder(root);
FullStoreMultithreadTest(optionsMultiThread);
FSUtils.CleanAndTestFolder(root);
await FastTest(options);
/*
FSUtils.CleanAndTestFolder(root); FSUtils.CleanAndTestFolder(root);
FullStoreTest(options, pairs); await FullStoreMultithreadTest(optionsMultiThread);
*/
//TestParallelFileReadingMMF();
/*
/*FSUtils.CleanAndTestFolder(root);
FullStoreTest(options, pairs);*/
FSUtils.CleanAndTestFolder(root); FSUtils.CleanAndTestFolder(root);
//TestParallelFileReadingMMF();
*/
Console.WriteLine("Completed");
Console.ReadKey(); Console.ReadKey();
} }

@ -1,21 +1,62 @@
using System; using System;
using System.Linq;
using System.Reflection;
using ZeroLevel.Services.Invokation;
using ZeroLevel.Services.ObjectMapping;
using ZeroLevel.Services.Serialization;
namespace TestApp namespace TestApp
{ {
internal static class Program internal static class Program
{ {
private static void Main(string[] args) private class Wrapper
{
public string ReadId;
public string WriteId;
public IInvokeWrapper Invoker;
public T Read<T>(IBinaryReader reader)
{
return (T)Invoker.Invoke(reader, ReadId);
}
public object ReadObject(IBinaryReader reader)
{ {
AppDomain.CurrentDomain.AssemblyResolve += CurrentDomain_AssemblyResolve; return Invoker.Invoke(reader, ReadId);
} }
private static System.Reflection.Assembly CurrentDomain_AssemblyResolve(object sender, ResolveEventArgs e) public void Write<T>(IBinaryWriter writer, T value)
{ {
if (e.Name.StartsWith("Microsoft.Build.")) Invoker.Invoke(writer, WriteId, new object[] { value });
}
public void WriteObject(IBinaryWriter writer, object value)
{ {
// искать в локальной папке Invoker.Invoke(writer, WriteId, new object[] { value });
} }
return null; }
private static Func<MethodInfo, bool> CreateArrayPredicate<T>()
{
var typeArg = typeof(T).GetElementType();
return mi => mi.Name.Equals("WriteArray", StringComparison.Ordinal) &&
mi.GetParameters().First().ParameterType.GetElementType().IsAssignableFrom(typeArg);
}
private static Func<MethodInfo, bool> CreateCollectionPredicate<T>()
{
var typeArg = typeof(T).GetGenericArguments().First();
return mi => mi.Name.Equals("WriteCollection", StringComparison.Ordinal) &&
mi.GetParameters().First().ParameterType.GetGenericArguments().First().IsAssignableFrom(typeArg);
}
private static void Main(string[] args)
{
var wrapper = new Wrapper { Invoker = InvokeWrapper.Create() };
var ReadId = wrapper.Invoker.Configure(typeof(MemoryStreamReader), "ReadDateTimeArray").First();
var WriteId = wrapper.Invoker.Configure(typeof(MemoryStreamWriter), CreateArrayPredicate<DateTime?[]>()).First();
Console.Write(WriteId);
} }
} }
} }

@ -7,7 +7,7 @@
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="13.0.2" /> <PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>

@ -7,7 +7,7 @@
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="13.0.2" /> <PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="Topshelf" Version="4.3.0" /> <PackageReference Include="Topshelf" Version="4.3.0" />
</ItemGroup> </ItemGroup>

@ -44,4 +44,8 @@
<PackageReference Include="System.Numerics.Vectors" Version="4.5.0" /> <PackageReference Include="System.Numerics.Vectors" Version="4.5.0" />
</ItemGroup> </ItemGroup>
<ItemGroup>
<ProjectReference Include="..\ZeroLevel\ZeroLevel.csproj" />
</ItemGroup>
</Project> </Project>

@ -18,7 +18,7 @@
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="13.0.2" /> <PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>

@ -337,9 +337,9 @@ namespace ZeroLevel.Serialization
[Fact] [Fact]
public void SerializeCollectionDateTime() public void SerializeCollectionDateTime()
{ {
MakeCollectionTest<DateTime>(null); MakeCollectionTest<DateTime?>(null);
MakeCollectionTest<DateTime>(new DateTime[] { }); MakeCollectionTest<DateTime?>(new DateTime?[] { });
MakeCollectionTest<DateTime>(new DateTime[] { DateTime.Now, DateTime.UtcNow, DateTime.Today, DateTime.Now.AddYears(2000), DateTime.MinValue, DateTime.MaxValue }); MakeCollectionTest<DateTime?>(new DateTime?[] { DateTime.Now, DateTime.UtcNow, DateTime.Today, DateTime.Now.AddYears(2000), null, DateTime.MinValue, DateTime.MaxValue });
} }
[Fact] [Fact]

@ -9,9 +9,9 @@
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.4.0" /> <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.6.3" />
<PackageReference Include="xunit" Version="2.4.2" /> <PackageReference Include="xunit" Version="2.5.0" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.4.5"> <PackageReference Include="xunit.runner.visualstudio" Version="2.5.0">
<PrivateAssets>all</PrivateAssets> <PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets> <IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference> </PackageReference>

@ -1,16 +1,18 @@
using System; using System;
using System.Threading.Tasks;
namespace ZeroLevel.Services.Memory namespace ZeroLevel.Services.Memory
{ {
public interface IViewAccessor public interface IViewAccessor
: IDisposable : IDisposable
{ {
bool IsMemoryStream { get; }
/// <summary> /// <summary>
/// End of view /// End of view
/// </summary> /// </summary>
bool EOV { get; } bool EOV { get; }
long Position { get; } long Position { get; }
byte[] ReadBuffer(int count); Task<byte[]> ReadBuffer(int count);
bool CheckOutOfRange(int offset); bool CheckOutOfRange(int offset);
void Seek(long offset); void Seek(long offset);
} }

@ -1,6 +1,8 @@
using System; using System;
using System.IO; using System.IO;
using System.IO.MemoryMappedFiles; using System.IO.MemoryMappedFiles;
using System.Runtime.CompilerServices;
using System.Threading.Tasks;
namespace ZeroLevel.Services.Memory namespace ZeroLevel.Services.Memory
{ {
@ -20,29 +22,27 @@ namespace ZeroLevel.Services.Memory
public long Position => _absoluteOffset + _accessor.Position; public long Position => _absoluteOffset + _accessor.Position;
public bool CheckOutOfRange(int offset) public bool IsMemoryStream => false;
{
return offset < 0 || (_accessor.Position + offset) > _accessor.Length; [MethodImpl(MethodImplOptions.AggressiveInlining)]
} public bool CheckOutOfRange(int offset) => offset < 0 || (_accessor.Position + offset) > _accessor.Length;
public void Dispose() public void Dispose()
{ {
_accessor?.Dispose(); _accessor?.Dispose();
} }
public byte[] ReadBuffer(int count) public async Task<byte[]> ReadBuffer(int count)
{ {
if (count == 0) return null; if (count == 0) return null;
var buffer = new byte[count]; var buffer = new byte[count];
var readedCount = _accessor.Read(buffer, 0, count); var readedCount = await _accessor.ReadAsync(buffer, 0, count);
if (count != readedCount) if (count != readedCount)
throw new InvalidOperationException($"The stream returned less data ({count} bytes) than expected ({readedCount} bytes)"); throw new InvalidOperationException($"The stream returned less data ({count} bytes) than expected ({readedCount} bytes)");
return buffer; return buffer;
} }
public void Seek(long offset) [MethodImpl(MethodImplOptions.AggressiveInlining)]
{ public void Seek(long offset) => _accessor.Seek(offset, SeekOrigin.Begin);
_accessor.Seek(offset, SeekOrigin.Begin);
}
} }
} }

@ -1,5 +1,7 @@
using System; using System;
using System.IO; using System.IO;
using System.Runtime.CompilerServices;
using System.Threading.Tasks;
namespace ZeroLevel.Services.Memory namespace ZeroLevel.Services.Memory
{ {
@ -16,16 +18,16 @@ namespace ZeroLevel.Services.Memory
public long Position => _stream.Position; public long Position => _stream.Position;
public bool CheckOutOfRange(int offset) public bool IsMemoryStream => _stream is MemoryStream;
{
return offset < 0 || (_stream.Position + offset) > _stream.Length; [MethodImpl(MethodImplOptions.AggressiveInlining)]
} public bool CheckOutOfRange(int offset) => offset < 0 || (_stream.Position + offset) > _stream.Length;
public byte[] ReadBuffer(int count) public async Task<byte[]> ReadBuffer(int count)
{ {
if (count == 0) return null; if (count == 0) return null;
var buffer = new byte[count]; var buffer = new byte[count];
var readedCount = _stream.Read(buffer, 0, count); var readedCount = await _stream.ReadAsync(buffer, 0, count);
if (count != readedCount) if (count != readedCount)
throw new InvalidOperationException($"The stream returned less data ({count} bytes) than expected ({readedCount} bytes)"); throw new InvalidOperationException($"The stream returned less data ({count} bytes) than expected ({readedCount} bytes)");
return buffer; return buffer;
@ -36,9 +38,7 @@ namespace ZeroLevel.Services.Memory
_stream.Dispose(); _stream.Dispose();
} }
public void Seek(long offset) [MethodImpl(MethodImplOptions.AggressiveInlining)]
{ public void Seek(long offset) => _stream.Seek(offset, SeekOrigin.Begin);
_stream.Seek(offset, SeekOrigin.Begin);
}
} }
} }

@ -1,9 +0,0 @@
namespace ZeroLevel.Services.PartitionStorage
{
/*TODO IN FUTURE*/
internal struct ValueIndex<TValue>
{
public TValue Value { get; set; }
public long Offset { get; set; }
}
}

@ -1,5 +1,6 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Threading.Tasks;
using ZeroLevel.Services.PartitionStorage.Interfaces; using ZeroLevel.Services.PartitionStorage.Interfaces;
namespace ZeroLevel.Services.PartitionStorage namespace ZeroLevel.Services.PartitionStorage
@ -28,15 +29,15 @@ namespace ZeroLevel.Services.PartitionStorage
/// <summary> /// <summary>
/// Performs a search for data in the repository /// Performs a search for data in the repository
/// </summary> /// </summary>
StoreSearchResult<TKey, TValue, TMeta> Search(StoreSearchRequest<TKey, TMeta> searchRequest); Task<StoreSearchResult<TKey, TValue, TMeta>> Search(StoreSearchRequest<TKey, TMeta> searchRequest);
/// <summary> /// <summary>
/// bypass all key value by meta /// bypass all key value by meta
/// </summary> /// </summary>
void Bypass(TMeta meta, Action<TKey, TValue> handler); IAsyncEnumerable<KV<TKey, TValue>> Bypass(TMeta meta);
/// <summary> /// <summary>
/// true - if key exists /// true - if key exists
/// </summary> /// </summary>
bool Exists(TMeta meta, TKey key); Task<bool> Exists(TMeta meta, TKey key);
/// <summary> /// <summary>
/// Deleting a partition /// Deleting a partition
/// </summary> /// </summary>

@ -1,4 +1,6 @@
using System.Collections.Generic; using System;
using System.Collections.Generic;
using System.Threading.Tasks;
namespace ZeroLevel.Services.PartitionStorage namespace ZeroLevel.Services.PartitionStorage
{ {
@ -18,42 +20,42 @@ namespace ZeroLevel.Services.PartitionStorage
/// <summary> /// <summary>
/// Search in a partition for a specified key /// Search in a partition for a specified key
/// </summary> /// </summary>
StorePartitionKeyValueSearchResult<TKey, TValue> Find(TKey key); Task<SearchResult<TKey, TValue>> Find(TKey key);
/// <summary> /// <summary>
/// Search in a partition for a specified keys /// Search in a partition for a specified keys
/// </summary> /// </summary>
IEnumerable<StorePartitionKeyValueSearchResult<TKey, TValue>> Find(IEnumerable<TKey> keys); Task Find(IEnumerable<TKey> keys, Action<TKey, TValue> searchResultHandler);
/// <summary> /// <summary>
/// Iterating over all recorded data /// Iterating over all recorded data
/// </summary> /// </summary>
IEnumerable<StorePartitionKeyValueSearchResult<TKey, TValue>> Iterate(); IAsyncEnumerable<KV<TKey, TValue>> Iterate();
/// <summary> /// <summary>
/// Iterating over all recorded data of the file with the specified key /// Iterating over all recorded data of the file with the specified key
/// </summary> /// </summary>
IEnumerable<StorePartitionKeyValueSearchResult<TKey, TValue>> IterateKeyBacket(TKey key); Task IterateKeyBacket(TKey key, Action<TKey, TValue> kvHandler);
/// <summary> /// <summary>
/// Deleting the specified key and associated data /// Deleting the specified key and associated data
/// </summary> /// </summary>
/// <param name="key">Key</param> /// <param name="key">Key</param>
/// <param name="autoReindex">true - automatically rebuild the index of the file from which data was deleted (default = false)</param> /// <param name="autoReindex">true - automatically rebuild the index of the file from which data was deleted (default = false)</param>
void RemoveKey(TKey key, bool autoReindex = false); Task RemoveKey(TKey key, bool autoReindex = false);
/// <summary> /// <summary>
/// Deleting the specified keys and associated data /// Deleting the specified keys and associated data
/// </summary> /// </summary>
/// <param name="keys">Keys</param> /// <param name="keys">Keys</param>
/// <param name="autoReindex">true - automatically rebuild the index of the file from which data was deleted (default = true)</param> /// <param name="autoReindex">true - automatically rebuild the index of the file from which data was deleted (default = true)</param>
void RemoveKeys(IEnumerable<TKey> keys, bool autoReindex = true); Task RemoveKeys(IEnumerable<TKey> keys, bool autoReindex = true);
/// <summary> /// <summary>
/// Delete all keys with data except the specified key /// Delete all keys with data except the specified key
/// </summary> /// </summary>
/// <param name="key">Key</param> /// <param name="key">Key</param>
/// <param name="autoReindex">true - automatically rebuild the index of the file from which data was deleted (default = true)</param> /// <param name="autoReindex">true - automatically rebuild the index of the file from which data was deleted (default = true)</param>
void RemoveAllExceptKey(TKey key, bool autoReindex = true); Task RemoveAllExceptKey(TKey key, bool autoReindex = true);
/// <summary> /// <summary>
/// Delete all keys with data other than the specified ones /// Delete all keys with data other than the specified ones
/// </summary> /// </summary>
/// <param name="keys">Keys</param> /// <param name="keys">Keys</param>
/// <param name="autoReindex">true - automatically rebuild the index of the file from which data was deleted (default = true)</param> /// <param name="autoReindex">true - automatically rebuild the index of the file from which data was deleted (default = true)</param>
void RemoveAllExceptKeys(IEnumerable<TKey> keys, bool autoReindex = true); Task RemoveAllExceptKeys(IEnumerable<TKey> keys, bool autoReindex = true);
} }
} }

@ -1,4 +1,5 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.Threading.Tasks;
namespace ZeroLevel.Services.PartitionStorage namespace ZeroLevel.Services.PartitionStorage
{ {
@ -15,11 +16,11 @@ namespace ZeroLevel.Services.PartitionStorage
{ {
get; get;
} }
IEnumerable<StorePartitionKeyValueSearchResult<TKey, TInput>> Iterate(); IAsyncEnumerable<SearchResult<TKey, TInput>> Iterate();
/// <summary> /// <summary>
/// Writing a key-value pair /// Writing a key-value pair
/// </summary> /// </summary>
void Store(TKey key, TInput value); Task Store(TKey key, TInput value);
/// <summary> /// <summary>
/// Called after all key-value pairs are written to the partition /// Called after all key-value pairs are written to the partition
/// </summary> /// </summary>
@ -27,7 +28,7 @@ namespace ZeroLevel.Services.PartitionStorage
/// <summary> /// <summary>
/// Performs compression/grouping of recorded data in a partition /// Performs compression/grouping of recorded data in a partition
/// </summary> /// </summary>
void Compress(); Task Compress();
/// <summary> /// <summary>
/// Rebuilds indexes for data in a partition /// Rebuilds indexes for data in a partition
/// </summary> /// </summary>

@ -1,4 +1,6 @@
namespace ZeroLevel.Services.PartitionStorage.Interfaces using System.Threading.Tasks;
namespace ZeroLevel.Services.PartitionStorage.Interfaces
{ {
/// <summary> /// <summary>
/// Provides write operations in catalog partition /// Provides write operations in catalog partition
@ -16,10 +18,10 @@
/// <summary> /// <summary>
/// Writing a key-value pair /// Writing a key-value pair
/// </summary> /// </summary>
void Store(TKey key, TInput value); Task Store(TKey key, TInput value);
/// <summary> /// <summary>
/// Perform the conversion of the records from (TKey; TInput) to (TKey; TValue). Called after CompleteAdding /// Perform the conversion of the records from (TKey; TInput) to (TKey; TValue). Called after CompleteAdding
/// </summary> /// </summary>
void Compress(); Task Compress();
} }
} }

@ -1,14 +1,19 @@
using System; using System;
using System.Threading.Tasks;
using ZeroLevel.Services.Serialization; using ZeroLevel.Services.Serialization;
namespace ZeroLevel.Services.PartitionStorage.Interfaces namespace ZeroLevel.Services.PartitionStorage.Interfaces
{ {
public interface IStoreSerializer<TKey, TInput, TValue> public interface IStoreSerializer<TKey, TInput, TValue>
{ {
Action<MemoryStreamWriter, TKey> KeySerializer { get; } Func<MemoryStreamWriter, TKey, Task> KeySerializer { get; }
Action<MemoryStreamWriter, TInput> InputSerializer { get; }
TryDeserializeMethod<TKey> KeyDeserializer { get; } Func<MemoryStreamWriter, TInput, Task> InputSerializer { get; }
TryDeserializeMethod<TInput> InputDeserializer { get; }
TryDeserializeMethod<TValue> ValueDeserializer { get; } Func<MemoryStreamReader, Task<DeserializeResult<TKey>>> KeyDeserializer { get; }
Func<MemoryStreamReader, Task<DeserializeResult<TInput>>> InputDeserializer { get; }
Func<MemoryStreamReader, Task<DeserializeResult<TValue>>> ValueDeserializer { get; }
} }
} }

@ -0,0 +1,4 @@
namespace ZeroLevel.Services.PartitionStorage
{
public record KV<TKey, TValue>(TKey Key, TValue Value);
}

@ -13,7 +13,7 @@ namespace ZeroLevel.Services.PartitionStorage.Partition
internal sealed class CompactKeyStorePartitionBuilder<TKey, TInput, TValue, TMeta> internal sealed class CompactKeyStorePartitionBuilder<TKey, TInput, TValue, TMeta>
: BasePartition<TKey, TInput, TValue, TMeta>, IStorePartitionBuilder<TKey, TInput, TValue> : BasePartition<TKey, TInput, TValue, TMeta>, IStorePartitionBuilder<TKey, TInput, TValue>
{ {
private readonly Action<TKey, TInput> _storeMethod; private readonly Func<TKey, TInput, Task> _storeMethod;
private long _totalRecords = 0; private long _totalRecords = 0;
@ -39,9 +39,9 @@ namespace ZeroLevel.Services.PartitionStorage.Partition
#region IStorePartitionBuilder #region IStorePartitionBuilder
public void Store(TKey key, TInput value) public async Task Store(TKey key, TInput value)
{ {
_storeMethod.Invoke(key, value); await _storeMethod.Invoke(key, value);
Interlocked.Increment(ref _totalRecords); Interlocked.Increment(ref _totalRecords);
} }
@ -50,18 +50,16 @@ namespace ZeroLevel.Services.PartitionStorage.Partition
CloseWriteStreams(); CloseWriteStreams();
} }
public void Compress() public async Task Compress()
{ {
var files = Directory.GetFiles(_catalog); var files = Directory.GetFiles(_catalog);
if (files != null && files.Length > 0) if (files != null && files.Length > 0)
{ {
Parallel.ForEach(files, file => CompressFile(file)); await Parallel.ForEachAsync(files, async (file, ct) => await CompressFile(file));
} }
} }
public IEnumerable<StorePartitionKeyValueSearchResult<TKey, TInput>> Iterate() public async IAsyncEnumerable<SearchResult<TKey, TInput>> Iterate()
{ {
TKey key;
TInput input;
var files = Directory.GetFiles(_catalog); var files = Directory.GetFiles(_catalog);
if (files != null && files.Length > 0) if (files != null && files.Length > 0)
{ {
@ -73,9 +71,13 @@ namespace ZeroLevel.Services.PartitionStorage.Partition
{ {
while (reader.EOS == false) while (reader.EOS == false)
{ {
if (Serializer.KeyDeserializer.Invoke(reader, out key) == false) break; var kv = await Serializer.KeyDeserializer.Invoke(reader);
if (Serializer.InputDeserializer.Invoke(reader, out input) == false) break; if (kv.Success == false) break;
yield return new StorePartitionKeyValueSearchResult<TKey, TInput> { Key = key, Value = input, Status = SearchResult.Success };
var iv = await Serializer.InputDeserializer.Invoke(reader);
if (iv.Success == false) break;
yield return new SearchResult<TKey, TInput> { Key = kv.Value, Value = iv.Value, Success = true };
} }
} }
} }
@ -86,17 +88,16 @@ namespace ZeroLevel.Services.PartitionStorage.Partition
#endregion #endregion
#region Private methods #region Private methods
private void StoreDirect(TKey key, TInput value) private async Task StoreDirect(TKey key, TInput value)
{ {
var groupKey = _options.GetFileName(key, _info); var groupKey = _options.GetFileName(key, _info);
if (TryGetWriteStream(groupKey, out var stream)) if (TryGetWriteStream(groupKey, out var stream))
{ {
Serializer.KeySerializer.Invoke(stream, key); await Serializer.KeySerializer.Invoke(stream, key);
Thread.MemoryBarrier(); await Serializer.InputSerializer.Invoke(stream, value);
Serializer.InputSerializer.Invoke(stream, value);
} }
} }
private void StoreDirectSafe(TKey key, TInput value) private async Task StoreDirectSafe(TKey key, TInput value)
{ {
var groupKey = _options.GetFileName(key, _info); var groupKey = _options.GetFileName(key, _info);
bool lockTaken = false; bool lockTaken = false;
@ -105,9 +106,8 @@ namespace ZeroLevel.Services.PartitionStorage.Partition
Monitor.Enter(stream, ref lockTaken); Monitor.Enter(stream, ref lockTaken);
try try
{ {
Serializer.KeySerializer.Invoke(stream, key); await Serializer.KeySerializer.Invoke(stream, key);
Thread.MemoryBarrier(); await Serializer.InputSerializer.Invoke(stream, value);
Serializer.InputSerializer.Invoke(stream, value);
} }
finally finally
{ {
@ -119,10 +119,8 @@ namespace ZeroLevel.Services.PartitionStorage.Partition
} }
} }
internal void CompressFile(string file) internal async Task CompressFile(string file)
{ {
TKey key;
TInput input;
var dict = new Dictionary<TKey, HashSet<TInput>>(); var dict = new Dictionary<TKey, HashSet<TInput>>();
PhisicalFileAccessorCachee.LockFile(file); PhisicalFileAccessorCachee.LockFile(file);
try try
@ -131,23 +129,25 @@ namespace ZeroLevel.Services.PartitionStorage.Partition
{ {
while (reader.EOS == false) while (reader.EOS == false)
{ {
if (false == Serializer.KeyDeserializer.Invoke(reader, out key)) var kv = await Serializer.KeyDeserializer.Invoke(reader);
if (kv.Success == false)
{ {
throw new Exception($"[StorePartitionBuilder.CompressFile] Fault compress data in file '{file}'. Incorrect file structure. Fault read key."); throw new Exception($"[StorePartitionBuilder.CompressFile] Fault compress data in file '{file}'. Incorrect file structure. Fault read key.");
} }
if (false == dict.ContainsKey(key)) if (false == dict.ContainsKey(kv.Value))
{ {
dict[key] = new HashSet<TInput>(); dict[kv.Value] = new HashSet<TInput>();
} }
if (reader.EOS) if (reader.EOS)
{ {
break; break;
} }
if (false == Serializer.InputDeserializer.Invoke(reader, out input)) var iv = await Serializer.InputDeserializer.Invoke(reader);
if (iv.Success == false)
{ {
throw new Exception($"[StorePartitionBuilder.CompressFile] Fault compress data in file '{file}'. Incorrect file structure. Fault input value."); throw new Exception($"[StorePartitionBuilder.CompressFile] Fault compress data in file '{file}'. Incorrect file structure. Fault input value.");
} }
dict[key].Add(input); dict[kv.Value].Add(iv.Value);
} }
} }
var tempFile = FSUtils.GetAppLocalTemporaryFile(); var tempFile = FSUtils.GetAppLocalTemporaryFile();

@ -1,7 +0,0 @@
namespace ZeroLevel.Services.PartitionStorage.Partition
{
internal class InternalDirectHybridPartition<TKey, TCompactKey>
{
}
}

@ -2,6 +2,7 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Threading.Tasks;
using ZeroLevel.Services.PartitionStorage.Interfaces; using ZeroLevel.Services.PartitionStorage.Interfaces;
using ZeroLevel.Services.PartitionStorage.Partition; using ZeroLevel.Services.PartitionStorage.Partition;
using ZeroLevel.Services.Serialization; using ZeroLevel.Services.Serialization;
@ -60,14 +61,14 @@ namespace ZeroLevel.Services.PartitionStorage
/// </summary> /// </summary>
public void DropData() => _temporaryAccessor.DropData(); public void DropData() => _temporaryAccessor.DropData();
public string GetCatalogPath() => _accessor.GetCatalogPath(); public string GetCatalogPath() => _accessor.GetCatalogPath();
public void Store(TKey key, TInput value) => _temporaryAccessor.Store(key, value); public async Task Store(TKey key, TInput value) => await _temporaryAccessor.Store(key, value);
public int CountDataFiles() => Math.Max(_accessor.CountDataFiles(), public int CountDataFiles() => Math.Max(_accessor.CountDataFiles(),
_temporaryAccessor.CountDataFiles()); _temporaryAccessor.CountDataFiles());
/// <summary> /// <summary>
/// Performs compression/grouping of recorded data in a partition /// Performs compression/grouping of recorded data in a partition
/// </summary> /// </summary>
public void Compress() public async Task Compress()
{ {
var newFiles = Directory.GetFiles(_temporaryAccessor.GetCatalogPath()); var newFiles = Directory.GetFiles(_temporaryAccessor.GetCatalogPath());
@ -88,7 +89,7 @@ namespace ZeroLevel.Services.PartitionStorage
{ {
foreach (var i in r.Value) foreach (var i in r.Value)
{ {
_temporaryAccessor.Store(r.Key, i); await _temporaryAccessor.Store(r.Key, i);
} }
} }
} }
@ -99,7 +100,7 @@ namespace ZeroLevel.Services.PartitionStorage
// compress new file // compress new file
foreach (var file in newFiles) foreach (var file in newFiles)
{ {
(_temporaryAccessor as StorePartitionBuilder<TKey, TInput, TValue, TMeta>) await (_temporaryAccessor as StorePartitionBuilder<TKey, TInput, TValue, TMeta>)
.CompressFile(file); .CompressFile(file);
} }
@ -141,7 +142,7 @@ namespace ZeroLevel.Services.PartitionStorage
#endregion #endregion
#region Private methods #region Private methods
private IEnumerable<StorePartitionKeyValueSearchResult<TKey, IEnumerable<TInput>>> private IEnumerable<SearchResult<TKey, IEnumerable<TInput>>>
IterateReadKeyInputs(string filePath) IterateReadKeyInputs(string filePath)
{ {
if (File.Exists(filePath)) if (File.Exists(filePath))
@ -154,11 +155,11 @@ namespace ZeroLevel.Services.PartitionStorage
var k = _keyDeserializer.Invoke(reader); var k = _keyDeserializer.Invoke(reader);
var v = _valueDeserializer.Invoke(reader); var v = _valueDeserializer.Invoke(reader);
var input = _decompress(v); var input = _decompress(v);
yield return new StorePartitionKeyValueSearchResult<TKey, IEnumerable<TInput>> yield return new SearchResult<TKey, IEnumerable<TInput>>
{ {
Key = k, Key = k,
Value = input, Value = input,
Status = SearchResult.Success Success = true
}; };
} }
} }

@ -2,6 +2,7 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Threading.Tasks;
using ZeroLevel.Services.FileSystem; using ZeroLevel.Services.FileSystem;
using ZeroLevel.Services.Memory; using ZeroLevel.Services.Memory;
using ZeroLevel.Services.PartitionStorage.Interfaces; using ZeroLevel.Services.PartitionStorage.Interfaces;
@ -29,10 +30,8 @@ namespace ZeroLevel.Services.PartitionStorage
#region IStorePartitionAccessor #region IStorePartitionAccessor
public StorePartitionKeyValueSearchResult<TKey, TValue> Find(TKey key) public async Task<SearchResult<TKey, TValue>> Find(TKey key)
{ {
TKey k;
TValue v;
IViewAccessor memoryAccessor; IViewAccessor memoryAccessor;
try try
{ {
@ -49,10 +48,10 @@ namespace ZeroLevel.Services.PartitionStorage
catch (Exception ex) catch (Exception ex)
{ {
Log.SystemError(ex, $"[StorePartitionAccessor.Find] Fault get IViewAccessor by key {(key == null ? string.Empty : key.ToString())}"); Log.SystemError(ex, $"[StorePartitionAccessor.Find] Fault get IViewAccessor by key {(key == null ? string.Empty : key.ToString())}");
return new StorePartitionKeyValueSearchResult<TKey, TValue> return new SearchResult<TKey, TValue>
{ {
Key = key, Key = key,
Status = SearchResult.FileLockedOrUnavaliable, Success = false,
Value = default Value = default
}; };
} }
@ -62,14 +61,18 @@ namespace ZeroLevel.Services.PartitionStorage
{ {
while (reader.EOS == false) while (reader.EOS == false)
{ {
if (Serializer.KeyDeserializer.Invoke(reader, out k) == false) break; var kv = await Serializer.KeyDeserializer.Invoke(reader);
if (Serializer.ValueDeserializer.Invoke(reader, out v) == false) break; if (kv.Success == false) break;
var c = _options.KeyComparer(key, k);
if (c == 0) return new StorePartitionKeyValueSearchResult<TKey, TValue> var vv = await Serializer.ValueDeserializer.Invoke(reader);
if(vv.Success == false) break;
var c = _options.KeyComparer(key, kv.Value);
if (c == 0) return new SearchResult<TKey, TValue>
{ {
Key = key, Key = key,
Value = v, Value = vv.Value,
Status = SearchResult.Success Success = true
}; };
if (c == -1) if (c == -1)
{ {
@ -78,14 +81,14 @@ namespace ZeroLevel.Services.PartitionStorage
} }
} }
} }
return new StorePartitionKeyValueSearchResult<TKey, TValue> return new SearchResult<TKey, TValue>
{ {
Key = key, Key = key,
Status = SearchResult.NotFound, Success = false,
Value = default Value = default
}; };
} }
public IEnumerable<StorePartitionKeyValueSearchResult<TKey, TValue>> Find(IEnumerable<TKey> keys) public async Task Find(IEnumerable<TKey> keys, Action<TKey, TValue> searchResultHandler)
{ {
var results = keys.Distinct() var results = keys.Distinct()
.GroupBy( .GroupBy(
@ -93,18 +96,13 @@ namespace ZeroLevel.Services.PartitionStorage
k => k, (key, g) => new { FileName = key, Keys = g.ToArray() }); k => k, (key, g) => new { FileName = key, Keys = g.ToArray() });
foreach (var group in results) foreach (var group in results)
{ {
foreach (var r in Find(group.FileName, group.Keys)) await Find(group.FileName, group.Keys, searchResultHandler);
{
yield return r;
}
} }
} }
public IEnumerable<StorePartitionKeyValueSearchResult<TKey, TValue>> Iterate() public async IAsyncEnumerable<KV<TKey, TValue>> Iterate()
{ {
if (Directory.Exists(_catalog)) if (Directory.Exists(_catalog))
{ {
TKey k;
TValue v;
var files = Directory.GetFiles(_catalog); var files = Directory.GetFiles(_catalog);
if (files != null && files.Length > 0) if (files != null && files.Length > 0)
{ {
@ -117,9 +115,13 @@ namespace ZeroLevel.Services.PartitionStorage
{ {
while (reader.EOS == false) while (reader.EOS == false)
{ {
if (Serializer.KeyDeserializer.Invoke(reader, out k) == false) break; var kv = await Serializer.KeyDeserializer.Invoke(reader);
if (Serializer.ValueDeserializer.Invoke(reader, out v) == false) break; if (kv.Success == false) break;
yield return new StorePartitionKeyValueSearchResult<TKey, TValue> { Key = k, Value = v, Status = SearchResult.Success };
var vv = await Serializer.ValueDeserializer.Invoke(reader);
if (vv.Success == false) break;
yield return new KV<TKey, TValue>(kv.Value, vv.Value);
} }
} }
} }
@ -127,10 +129,8 @@ namespace ZeroLevel.Services.PartitionStorage
} }
} }
} }
public IEnumerable<StorePartitionKeyValueSearchResult<TKey, TValue>> IterateKeyBacket(TKey key) public async Task IterateKeyBacket(TKey key, Action<TKey, TValue> kvHandler)
{ {
TKey k;
TValue v;
var fileName = _options.GetFileName(key, _info); var fileName = _options.GetFileName(key, _info);
var filePath = Path.Combine(_catalog, fileName); var filePath = Path.Combine(_catalog, fileName);
if (File.Exists(filePath)) if (File.Exists(filePath))
@ -142,9 +142,13 @@ namespace ZeroLevel.Services.PartitionStorage
{ {
while (reader.EOS == false) while (reader.EOS == false)
{ {
if (Serializer.KeyDeserializer.Invoke(reader, out k) == false) break; var kv = await Serializer.KeyDeserializer.Invoke(reader);
if (Serializer.ValueDeserializer.Invoke(reader, out v) == false) break; if (kv.Success == false) break;
yield return new StorePartitionKeyValueSearchResult<TKey, TValue> { Key = k, Value = v, Status = SearchResult.Success };
var vv = await Serializer.ValueDeserializer.Invoke(reader);
if (vv.Success == false) break;
kvHandler.Invoke(kv.Value, vv.Value);
} }
} }
} }
@ -158,11 +162,11 @@ namespace ZeroLevel.Services.PartitionStorage
Indexes.ResetCachee(); Indexes.ResetCachee();
} }
} }
public void RemoveAllExceptKey(TKey key, bool autoReindex = true) public async Task RemoveAllExceptKey(TKey key, bool autoReindex = true)
{ {
RemoveAllExceptKeys(new[] { key }, autoReindex); await RemoveAllExceptKeys(new[] { key }, autoReindex);
} }
public void RemoveAllExceptKeys(IEnumerable<TKey> keys, bool autoReindex = true) public async Task RemoveAllExceptKeys(IEnumerable<TKey> keys, bool autoReindex = true)
{ {
var results = keys.Distinct() var results = keys.Distinct()
.GroupBy( .GroupBy(
@ -170,18 +174,18 @@ namespace ZeroLevel.Services.PartitionStorage
k => k, (key, g) => new { FileName = key, Keys = g.OrderBy(k => k).ToArray() }); k => k, (key, g) => new { FileName = key, Keys = g.OrderBy(k => k).ToArray() });
foreach (var group in results) foreach (var group in results)
{ {
RemoveKeyGroup(group.FileName, group.Keys, false, autoReindex); await RemoveKeyGroup(group.FileName, group.Keys, false, autoReindex);
if (_options.Index.Enabled) if (_options.Index.Enabled)
{ {
Indexes.RemoveCacheeItem(group.FileName); Indexes.RemoveCacheeItem(group.FileName);
} }
} }
} }
public void RemoveKey(TKey key, bool autoReindex = false) public async Task RemoveKey(TKey key, bool autoReindex = false)
{ {
RemoveKeys(new[] { key }, autoReindex); await RemoveKeys(new[] { key }, autoReindex);
} }
public void RemoveKeys(IEnumerable<TKey> keys, bool autoReindex = true) public async Task RemoveKeys(IEnumerable<TKey> keys, bool autoReindex = true)
{ {
var results = keys.Distinct() var results = keys.Distinct()
.GroupBy( .GroupBy(
@ -189,7 +193,7 @@ namespace ZeroLevel.Services.PartitionStorage
k => k, (key, g) => new { FileName = key, Keys = g.OrderBy(k => k).ToArray() }); k => k, (key, g) => new { FileName = key, Keys = g.OrderBy(k => k).ToArray() });
foreach (var group in results) foreach (var group in results)
{ {
RemoveKeyGroup(group.FileName, group.Keys, true, autoReindex); await RemoveKeyGroup(group.FileName, group.Keys, true, autoReindex);
if (_options.Index.Enabled) if (_options.Index.Enabled)
{ {
Indexes.RemoveCacheeItem(group.FileName); Indexes.RemoveCacheeItem(group.FileName);
@ -200,8 +204,7 @@ namespace ZeroLevel.Services.PartitionStorage
#region Private methods #region Private methods
private IEnumerable<StorePartitionKeyValueSearchResult<TKey, TValue>> Find(string fileName, private async Task Find(string fileName, TKey[] keys, Action<TKey, TValue> searchResultHandler)
TKey[] keys)
{ {
TKey k; TKey k;
TValue v; TValue v;
@ -230,17 +233,16 @@ namespace ZeroLevel.Services.PartitionStorage
{ {
while (reader.EOS == false) while (reader.EOS == false)
{ {
if (Serializer.KeyDeserializer.Invoke(reader, out k) == false) break; var kv = await Serializer.KeyDeserializer.Invoke(reader);
if (Serializer.ValueDeserializer.Invoke(reader, out v) == false) break; if (kv.Success == false) break;
var c = _options.KeyComparer(searchKey, k);
var vv = await Serializer.ValueDeserializer.Invoke(reader);
if (vv.Success == false) break;
var c = _options.KeyComparer(searchKey, kv.Value);
if (c == 0) if (c == 0)
{ {
yield return new StorePartitionKeyValueSearchResult<TKey, TValue> searchResultHandler.Invoke(kv.Value, vv.Value);
{
Key = searchKey,
Value = v,
Status = SearchResult.Success
};
break; break;
} }
else if (c == -1) else if (c == -1)
@ -263,17 +265,16 @@ namespace ZeroLevel.Services.PartitionStorage
var keys_arr = keys.OrderBy(k => k).ToArray(); var keys_arr = keys.OrderBy(k => k).ToArray();
while (reader.EOS == false && index < keys_arr.Length) while (reader.EOS == false && index < keys_arr.Length)
{ {
if (Serializer.KeyDeserializer.Invoke(reader, out k) == false) break; var kv = await Serializer.KeyDeserializer.Invoke(reader);
if (Serializer.ValueDeserializer.Invoke(reader, out v) == false) break; if (kv.Success == false) break;
var c = _options.KeyComparer(keys_arr[index], k);
var vv = await Serializer.ValueDeserializer.Invoke(reader);
if (vv.Success == false) break;
var c = _options.KeyComparer(keys_arr[index], kv.Value);
if (c == 0) if (c == 0)
{ {
yield return new StorePartitionKeyValueSearchResult<TKey, TValue> searchResultHandler.Invoke(kv.Value, vv.Value);
{
Key = keys_arr[index],
Value = v,
Status = SearchResult.Success
};
index++; index++;
} }
else if (c == -1) else if (c == -1)
@ -283,7 +284,7 @@ namespace ZeroLevel.Services.PartitionStorage
index++; index++;
if (index < keys_arr.Length) if (index < keys_arr.Length)
{ {
c = _options.KeyComparer(keys_arr[index], k); c = _options.KeyComparer(keys_arr[index], kv.Value);
} }
} while (index < keys_arr.Length && c == -1); } while (index < keys_arr.Length && c == -1);
} }
@ -294,9 +295,8 @@ namespace ZeroLevel.Services.PartitionStorage
} }
} }
private void RemoveKeyGroup(string fileName, TKey[] keys, bool inverseRemove, bool autoReindex) private async Task RemoveKeyGroup(string fileName, TKey[] keys, bool inverseRemove, bool autoReindex)
{ {
TKey k;
var filePath = Path.Combine(_catalog, fileName); var filePath = Path.Combine(_catalog, fileName);
if (File.Exists(filePath)) if (File.Exists(filePath))
{ {
@ -325,18 +325,22 @@ namespace ZeroLevel.Services.PartitionStorage
while (reader.EOS == false) while (reader.EOS == false)
{ {
var startPosition = reader.Position; var startPosition = reader.Position;
if (Serializer.KeyDeserializer.Invoke(reader, out k) == false)
var kv = await Serializer.KeyDeserializer.Invoke(reader);
if (kv.Success == false)
{ {
Log.Error($"[StorePartitionAccessor.RemoveKeyGroup] Fault remove keys from file '{fileName}'. Incorrect file structure. Fault read key."); Log.Error($"[StorePartitionAccessor.RemoveKeyGroup] Fault remove keys from file '{fileName}'. Incorrect file structure. Fault read key.");
return; return;
} }
if (Serializer.ValueDeserializer.Invoke(reader, out var _) == false)
var vv = await Serializer.ValueDeserializer.Invoke(reader);
if (vv.Success == false)
{ {
Log.Error($"[StorePartitionAccessor.RemoveKeyGroup] Fault remove keys from file '{fileName}'. Incorrect file structure. Fault read value."); Log.Error($"[StorePartitionAccessor.RemoveKeyGroup] Fault remove keys from file '{fileName}'. Incorrect file structure. Fault read value.");
return; return;
} }
var endPosition = reader.Position; var endPosition = reader.Position;
var c = _options.KeyComparer(searchKey, k); var c = _options.KeyComparer(searchKey, kv.Value);
if (c == 0) if (c == 0)
{ {
ranges.Add(new FilePositionRange { Start = startPosition, End = endPosition }); ranges.Add(new FilePositionRange { Start = startPosition, End = endPosition });
@ -362,18 +366,23 @@ namespace ZeroLevel.Services.PartitionStorage
while (reader.EOS == false && index < keys_arr.Length) while (reader.EOS == false && index < keys_arr.Length)
{ {
var startPosition = reader.Position; var startPosition = reader.Position;
if (Serializer.KeyDeserializer.Invoke(reader, out k) == false)
var kv = await Serializer.KeyDeserializer.Invoke(reader);
if (kv.Success == false)
{ {
Log.Error($"[StorePartitionAccessor.RemoveKeyGroup] Fault remove keys from file '{fileName}'. Incorrect file structure. Fault read key."); Log.Error($"[StorePartitionAccessor.RemoveKeyGroup] Fault remove keys from file '{fileName}'. Incorrect file structure. Fault read key.");
return; return;
} }
if (Serializer.ValueDeserializer.Invoke(reader, out var _) == false)
var vv = await Serializer.ValueDeserializer.Invoke(reader);
if (vv.Success == false)
{ {
Log.Error($"[StorePartitionAccessor.RemoveKeyGroup] Fault remove keys from file '{fileName}'. Incorrect file structure. Fault read value."); Log.Error($"[StorePartitionAccessor.RemoveKeyGroup] Fault remove keys from file '{fileName}'. Incorrect file structure. Fault read value.");
return; return;
} }
var endPosition = reader.Position; var endPosition = reader.Position;
var c = _options.KeyComparer(keys_arr[index], k); var c = _options.KeyComparer(keys_arr[index], kv.Value);
if (c == 0) if (c == 0)
{ {
ranges.Add(new FilePositionRange { Start = startPosition, End = endPosition }); ranges.Add(new FilePositionRange { Start = startPosition, End = endPosition });
@ -386,7 +395,7 @@ namespace ZeroLevel.Services.PartitionStorage
index++; index++;
if (index < keys_arr.Length) if (index < keys_arr.Length)
{ {
c = _options.KeyComparer(keys_arr[index], k); c = _options.KeyComparer(keys_arr[index], kv.Value);
} }
} while (index < keys_arr.Length && c == -1); } while (index < keys_arr.Length && c == -1);
} }

@ -14,7 +14,7 @@ namespace ZeroLevel.Services.PartitionStorage
internal sealed class StorePartitionBuilder<TKey, TInput, TValue, TMeta> internal sealed class StorePartitionBuilder<TKey, TInput, TValue, TMeta>
: BasePartition<TKey, TInput, TValue, TMeta>, IStorePartitionBuilder<TKey, TInput, TValue> : BasePartition<TKey, TInput, TValue, TMeta>, IStorePartitionBuilder<TKey, TInput, TValue>
{ {
private readonly Func<TKey, TInput, bool> _storeMethod; private readonly Func<TKey, TInput, Task<bool>> _storeMethod;
private long _totalRecords = 0; private long _totalRecords = 0;
@ -40,9 +40,9 @@ namespace ZeroLevel.Services.PartitionStorage
#region IStorePartitionBuilder #region IStorePartitionBuilder
public void Store(TKey key, TInput value) public async Task Store(TKey key, TInput value)
{ {
if (_storeMethod.Invoke(key, value)) if (await _storeMethod.Invoke(key, value))
{ {
Interlocked.Increment(ref _totalRecords); Interlocked.Increment(ref _totalRecords);
} }
@ -53,18 +53,16 @@ namespace ZeroLevel.Services.PartitionStorage
CloseWriteStreams(); CloseWriteStreams();
} }
public void Compress() public async Task Compress()
{ {
var files = Directory.GetFiles(_catalog); var files = Directory.GetFiles(_catalog);
if (files != null && files.Length > 0) if (files != null && files.Length > 0)
{ {
Parallel.ForEach(files, file => CompressFile(file)); await Parallel.ForEachAsync(files, async(file, _) => await CompressFile(file));
} }
} }
public IEnumerable<StorePartitionKeyValueSearchResult<TKey, TInput>> Iterate() public async IAsyncEnumerable<SearchResult<TKey, TInput>> Iterate()
{ {
TKey key;
TInput val;
var files = Directory.GetFiles(_catalog); var files = Directory.GetFiles(_catalog);
if (files != null && files.Length > 0) if (files != null && files.Length > 0)
{ {
@ -77,9 +75,13 @@ namespace ZeroLevel.Services.PartitionStorage
{ {
while (reader.EOS == false) while (reader.EOS == false)
{ {
if (Serializer.KeyDeserializer.Invoke(reader, out key) == false) break; var kv = await Serializer.KeyDeserializer.Invoke(reader);
if (Serializer.InputDeserializer.Invoke(reader, out val) == false) break; if (kv.Success == false) break;
yield return new StorePartitionKeyValueSearchResult<TKey, TInput> { Key = key, Value = val, Status = SearchResult.Success };
var vv = await Serializer.InputDeserializer.Invoke(reader);
if (vv.Success == false) break;
yield return new SearchResult<TKey, TInput> { Key = kv.Value, Value = vv.Value, Success = true };
} }
} }
} }
@ -90,14 +92,14 @@ namespace ZeroLevel.Services.PartitionStorage
#endregion #endregion
#region Private methods #region Private methods
private bool StoreDirect(TKey key, TInput value) private async Task<bool> StoreDirect(TKey key, TInput value)
{ {
var groupKey = _options.GetFileName(key, _info); var groupKey = _options.GetFileName(key, _info);
if (TryGetWriteStream(groupKey, out var stream)) if (TryGetWriteStream(groupKey, out var stream))
{ {
Serializer.KeySerializer.Invoke(stream, key); await Serializer.KeySerializer.Invoke(stream, key);
Thread.MemoryBarrier(); Thread.MemoryBarrier();
Serializer.InputSerializer.Invoke(stream, value); await Serializer.InputSerializer.Invoke(stream, value);
return true; return true;
} }
else else
@ -106,7 +108,7 @@ namespace ZeroLevel.Services.PartitionStorage
} }
return false; return false;
} }
private bool StoreDirectSafe(TKey key, TInput value) private async Task<bool> StoreDirectSafe(TKey key, TInput value)
{ {
var groupKey = _options.GetFileName(key, _info); var groupKey = _options.GetFileName(key, _info);
bool lockTaken = false; bool lockTaken = false;
@ -115,9 +117,9 @@ namespace ZeroLevel.Services.PartitionStorage
Monitor.Enter(stream, ref lockTaken); Monitor.Enter(stream, ref lockTaken);
try try
{ {
Serializer.KeySerializer.Invoke(stream, key); await Serializer.KeySerializer.Invoke(stream, key);
Thread.MemoryBarrier(); Thread.MemoryBarrier();
Serializer.InputSerializer.Invoke(stream, value); await Serializer.InputSerializer.Invoke(stream, value);
return true; return true;
} }
finally finally
@ -135,10 +137,8 @@ namespace ZeroLevel.Services.PartitionStorage
return false; return false;
} }
internal void CompressFile(string file) internal async Task CompressFile(string file)
{ {
TKey key;
TInput input;
PhisicalFileAccessorCachee.LockFile(file); PhisicalFileAccessorCachee.LockFile(file);
try try
{ {
@ -150,25 +150,27 @@ namespace ZeroLevel.Services.PartitionStorage
{ {
while (reader.EOS == false) while (reader.EOS == false)
{ {
if (Serializer.KeyDeserializer.Invoke(reader, out key) == false) var kv = await Serializer.KeyDeserializer.Invoke(reader);
if (kv.Success == false)
{ {
throw new Exception($"[StorePartitionBuilder.CompressFile] Fault compress data in file '{file}'. Incorrect file structure. Fault read key."); throw new Exception($"[StorePartitionBuilder.CompressFile] Fault compress data in file '{file}'. Incorrect file structure. Fault read key.");
} }
if (key != null) if (kv.Value != null)
{ {
if (false == dict.ContainsKey(key)) if (false == dict.ContainsKey(kv.Value))
{ {
dict[key] = new HashSet<TInput>(); dict[kv.Value] = new HashSet<TInput>();
} }
if (reader.EOS) if (reader.EOS)
{ {
break; break;
} }
if (Serializer.InputDeserializer.Invoke(reader, out input) == false) var iv = await Serializer.InputDeserializer.Invoke(reader);
if (iv.Success == false)
{ {
throw new Exception($"[StorePartitionBuilder.CompressFile] Fault compress data in file '{file}'. Incorrect file structure. Fault read input value."); throw new Exception($"[StorePartitionBuilder.CompressFile] Fault compress data in file '{file}'. Incorrect file structure. Fault read input value.");
} }
dict[key].Add(input); dict[kv.Value].Add(iv.Value);
} }
else else
{ {

@ -1,9 +1,9 @@
namespace ZeroLevel.Services.PartitionStorage namespace ZeroLevel.Services.PartitionStorage
{ {
public enum SearchResult public class SearchResult<TKey, TValue>
{ {
Success, public bool Success { get; set; }
NotFound, public TKey Key { get; set; }
FileLockedOrUnavaliable public TValue Value { get; set; }
} }
} }

@ -1,9 +0,0 @@
namespace ZeroLevel.Services.PartitionStorage
{
public class StorePartitionKeyValueSearchResult<TKey, TValue>
{
public SearchResult Status { get; set; }
public TKey Key { get; set; }
public TValue Value { get; set; }
}
}

@ -4,6 +4,6 @@ namespace ZeroLevel.Services.PartitionStorage
{ {
public class StoreSearchResult<TKey, TValue, TMeta> public class StoreSearchResult<TKey, TValue, TMeta>
{ {
public IDictionary<TMeta, IEnumerable<StorePartitionKeyValueSearchResult<TKey, TValue>>> Results { get; set; } public IDictionary<TMeta, IEnumerable<KV<TKey, TValue>>> Results { get; set; }
} }
} }

@ -17,18 +17,12 @@ namespace ZeroLevel.Services.PartitionStorage
private readonly PhisicalFileAccessorCachee _fileAccessorCachee; private readonly PhisicalFileAccessorCachee _fileAccessorCachee;
public Store(StoreOptions<TKey, TInput, TValue, TMeta> options, public Store(StoreOptions<TKey, TInput, TValue, TMeta> options,
IStoreSerializer<TKey, TInput, TValue> serializer = null) IStoreSerializer<TKey, TInput, TValue> serializer)
{ {
if (options == null) throw new ArgumentNullException(nameof(options)); if (options == null) throw new ArgumentNullException(nameof(options));
if (serializer == null) throw new ArgumentNullException(nameof(serializer));
_options = options; _options = options;
if (serializer == null)
{
_serializer = new StoreStandartSerializer<TKey, TInput, TValue>();
}
else
{
_serializer = serializer; _serializer = serializer;
}
if (Directory.Exists(_options.RootFolder) == false) if (Directory.Exists(_options.RootFolder) == false)
{ {
Directory.CreateDirectory(_options.RootFolder); Directory.CreateDirectory(_options.RootFolder);
@ -76,10 +70,10 @@ namespace ZeroLevel.Services.PartitionStorage
_fileAccessorCachee.DropAllIndexReaders(); _fileAccessorCachee.DropAllIndexReaders();
} }
public StoreSearchResult<TKey, TValue, TMeta> Search(StoreSearchRequest<TKey, TMeta> searchRequest) public async Task<StoreSearchResult<TKey, TValue, TMeta>> Search(StoreSearchRequest<TKey, TMeta> searchRequest)
{ {
var result = new StoreSearchResult<TKey, TValue, TMeta>(); var result = new StoreSearchResult<TKey, TValue, TMeta>();
var results = new ConcurrentDictionary<TMeta, IEnumerable<StorePartitionKeyValueSearchResult<TKey, TValue>>>(); var results = new ConcurrentDictionary<TMeta, IEnumerable<KV<TKey, TValue>>>();
if (searchRequest.PartitionSearchRequests?.Any() ?? false) if (searchRequest.PartitionSearchRequests?.Any() ?? false)
{ {
var partitionsSearchInfo = searchRequest var partitionsSearchInfo = searchRequest
@ -89,16 +83,19 @@ namespace ZeroLevel.Services.PartitionStorage
{ {
MaxDegreeOfParallelism = _options.MaxDegreeOfParallelism MaxDegreeOfParallelism = _options.MaxDegreeOfParallelism
}; };
Parallel.ForEach(partitionsSearchInfo, options, (pair, _) => await Parallel.ForEachAsync(partitionsSearchInfo, options, async (pair, _) =>
{ {
var accessor = CreateAccessor(pair.Key); var accessor = CreateAccessor(pair.Key);
if (accessor != null) if (accessor != null)
{ {
using (accessor) using (accessor)
{ {
results[pair.Key] = accessor var set = new List<KV<TKey, TValue>>();
.Find(pair.Value) await foreach (var kv in accessor.Iterate())
.ToArray(); {
set.Add(new KV<TKey, TValue>(kv.Key, kv.Value));
}
results[pair.Key] = set;
} }
} }
}); });
@ -112,29 +109,30 @@ namespace ZeroLevel.Services.PartitionStorage
_fileAccessorCachee.Dispose(); _fileAccessorCachee.Dispose();
} }
public void Bypass(TMeta meta, Action<TKey, TValue> handler) public async IAsyncEnumerable<KV<TKey, TValue>> Bypass(TMeta meta)
{ {
var accessor = CreateAccessor(meta); var accessor = CreateAccessor(meta);
if (accessor != null) if (accessor != null)
{ {
using (accessor) using (accessor)
{ {
foreach (var kv in accessor.Iterate()) await foreach (var kv in accessor.Iterate())
{ {
handler.Invoke(kv.Key, kv.Value); yield return kv;
} }
} }
} }
} }
public bool Exists(TMeta meta, TKey key) public async Task<bool> Exists(TMeta meta, TKey key)
{ {
var accessor = CreateAccessor(meta); var accessor = CreateAccessor(meta);
if (accessor != null) if (accessor != null)
{ {
using (accessor) using (accessor)
{ {
return accessor.Find(key).Status == SearchResult.Success; var info = await accessor.Find(key);
return info.Success;
} }
} }
return false; return false;

@ -0,0 +1,44 @@
using System;
using System.Threading.Tasks;
using ZeroLevel.Services.PartitionStorage.Interfaces;
using ZeroLevel.Services.Serialization;
namespace ZeroLevel.Services.PartitionStorage
{
public record DeserializeResult<T>(bool Success, T Value);
public delegate Task<DeserializeResult<T>> TryDeserializeMethod<T>(MemoryStreamReader reader);
public sealed class StoreSerializers<TKey, TInput, TValue>
: IStoreSerializer<TKey, TInput, TValue>
{
private readonly Func<MemoryStreamWriter, TKey, Task> _keySerializer;
private readonly Func<MemoryStreamWriter, TInput, Task> _inputSerializer;
private readonly Func<MemoryStreamReader, Task<DeserializeResult<TKey>>> _keyDeserializer;
private readonly Func<MemoryStreamReader, Task<DeserializeResult<TInput>>> _inputDeserializer;
private readonly Func<MemoryStreamReader, Task<DeserializeResult<TValue>>> _valueDeserializer;
public StoreSerializers(Func<MemoryStreamWriter, TKey, Task> keySerializer,
Func<MemoryStreamWriter, TInput, Task> inputSerializer,
Func<MemoryStreamReader, Task<DeserializeResult<TKey>>> keyDeserializer,
Func<MemoryStreamReader, Task<DeserializeResult<TInput>>> inputDeserializer,
Func<MemoryStreamReader, Task<DeserializeResult<TValue>>> valueDeserializer)
{
_keySerializer = keySerializer;
_inputSerializer = inputSerializer;
_keyDeserializer = keyDeserializer;
_inputDeserializer = inputDeserializer;
_valueDeserializer = valueDeserializer;
}
public Func<MemoryStreamWriter, TKey, Task> KeySerializer => _keySerializer;
public Func<MemoryStreamWriter, TInput, Task> InputSerializer => _inputSerializer;
public Func<MemoryStreamReader, Task<DeserializeResult<TKey>>> KeyDeserializer => _keyDeserializer;
public Func<MemoryStreamReader, Task<DeserializeResult<TInput>>> InputDeserializer => _inputDeserializer;
public Func<MemoryStreamReader, Task<DeserializeResult<TValue>>> ValueDeserializer => _valueDeserializer;
}
}

@ -1,39 +0,0 @@
using System;
using ZeroLevel.Services.PartitionStorage.Interfaces;
using ZeroLevel.Services.Serialization;
namespace ZeroLevel.Services.PartitionStorage
{
// TODO INTERNAL
public sealed class StoreStandartSerializer<TKey, TInput, TValue>
: IStoreSerializer<TKey, TInput, TValue>
{
private readonly Action<MemoryStreamWriter, TKey> _keySerializer;
private readonly Action<MemoryStreamWriter, TInput> _inputSerializer;
private readonly TryDeserializeMethod<TKey> _keyDeserializer;
private readonly TryDeserializeMethod<TInput> _inputDeserializer;
private readonly TryDeserializeMethod<TValue> _valueDeserializer;
public StoreStandartSerializer()
{
_keySerializer = MessageSerializer.GetSerializer<TKey>();
_inputSerializer = MessageSerializer.GetSerializer<TInput>();
_keyDeserializer = MessageSerializer.GetSafetyDeserializer<TKey>();
_inputDeserializer = MessageSerializer.GetSafetyDeserializer<TInput>();
_valueDeserializer = MessageSerializer.GetSafetyDeserializer<TValue>();
}
public Action<MemoryStreamWriter, TKey> KeySerializer => _keySerializer;
public Action<MemoryStreamWriter, TInput> InputSerializer => _inputSerializer;
public TryDeserializeMethod<TKey> KeyDeserializer => _keyDeserializer;
public TryDeserializeMethod<TInput> InputDeserializer => _inputDeserializer;
public TryDeserializeMethod<TValue> ValueDeserializer => _valueDeserializer;
}
}

@ -1,7 +0,0 @@
namespace ZeroLevel.Services.PartitionStorage
{
public sealed class UniformKeyValueStorage<TKey, TValue>
{
}
}

@ -2,6 +2,7 @@
using System.Collections.Concurrent; using System.Collections.Concurrent;
using System.Collections.Generic; using System.Collections.Generic;
using System.Net; using System.Net;
using System.Threading.Tasks;
namespace ZeroLevel.Services.Serialization namespace ZeroLevel.Services.Serialization
{ {
@ -57,7 +58,7 @@ namespace ZeroLevel.Services.Serialization
IPAddress[] ReadIPArray(); IPAddress[] ReadIPArray();
IPEndPoint[] ReadIPEndPointArray(); IPEndPoint[] ReadIPEndPointArray();
Guid[] ReadGuidArray(); Guid[] ReadGuidArray();
DateTime[] ReadDateTimeArray(); DateTime?[] ReadDateTimeArray();
Int64[] ReadInt64Array(); Int64[] ReadInt64Array();
Int32[] ReadInt32Array(); Int32[] ReadInt32Array();
UInt64[] ReadUInt64Array(); UInt64[] ReadUInt64Array();
@ -78,7 +79,7 @@ namespace ZeroLevel.Services.Serialization
List<T> ReadCollection<T>() where T : IBinarySerializable, new(); List<T> ReadCollection<T>() where T : IBinarySerializable, new();
List<string> ReadStringCollection(); List<string> ReadStringCollection();
List<Guid> ReadGuidCollection(); List<Guid> ReadGuidCollection();
List<DateTime> ReadDateTimeCollection(); List<DateTime?> ReadDateTimeCollection();
List<char> ReadCharCollection(); List<char> ReadCharCollection();
List<Int64> ReadInt64Collection(); List<Int64> ReadInt64Collection();
List<Int32> ReadInt32Collection(); List<Int32> ReadInt32Collection();
@ -104,7 +105,7 @@ namespace ZeroLevel.Services.Serialization
IEnumerable<IPAddress> ReadIPCollectionLazy(); IEnumerable<IPAddress> ReadIPCollectionLazy();
IEnumerable<IPEndPoint> ReadIPEndPointCollectionLazy(); IEnumerable<IPEndPoint> ReadIPEndPointCollectionLazy();
IEnumerable<Guid> ReadGuidCollectionLazy(); IEnumerable<Guid> ReadGuidCollectionLazy();
IEnumerable<DateTime> ReadDateTimeCollectionLazy(); IEnumerable<DateTime?> ReadDateTimeCollectionLazy();
IEnumerable<Int64> ReadInt64CollectionLazy(); IEnumerable<Int64> ReadInt64CollectionLazy();
IEnumerable<Int32> ReadInt32CollectionLazy(); IEnumerable<Int32> ReadInt32CollectionLazy();
IEnumerable<UInt64> ReadUInt64CollectionLazy(); IEnumerable<UInt64> ReadUInt64CollectionLazy();
@ -134,4 +135,86 @@ namespace ZeroLevel.Services.Serialization
void SetPosition(long position); void SetPosition(long position);
} }
public interface IBinaryReaderAsync
: IDisposable
{
Task<bool> ReadBooleanAsync();
Task<char> ReadCharAsync();
Task<byte> ReadByteAsync();
Task<byte[]> ReadBytesAsync();
Task<Double> ReadDoubleAsync();
Task<float> ReadFloatAsync();
Task<short> ReadShortAsync();
Task<ushort> ReadUShortAsync();
Task<Int32> ReadInt32Async();
Task<UInt32> ReadUInt32Async();
Task<Int64> ReadLongAsync();
Task<UInt64> ReadULongAsync();
Task<string> ReadStringAsync();
Task<Guid> ReadGuidAsync();
Task<DateTime?> ReadDateTimeAsync();
Task<TimeOnly?> ReadTimeAsync();
Task<DateOnly?> ReadDateAsync();
Task<decimal> ReadDecimalAsync();
Task<TimeSpan> ReadTimeSpanAsync();
Task<IPAddress> ReadIPAsync();
Task<IPEndPoint> ReadIPEndpointAsync();
#region Extensions
#region Arrays
Task<T[]> ReadArrayAsync<T>() where T : IAsyncBinarySerializable, new();
Task<string[]> ReadStringArrayAsync();
Task<IPAddress[]> ReadIPArrayAsync();
Task<IPEndPoint[]> ReadIPEndPointArrayAsync();
Task<Guid[]> ReadGuidArrayAsync();
Task<DateTime?[]> ReadDateTimeArrayAsync();
Task<Int64[]> ReadInt64ArrayAsync();
Task<Int32[]> ReadInt32ArrayAsync();
Task<UInt64[]> ReadUInt64ArrayAsync();
Task<UInt32[]> ReadUInt32ArrayAsync();
Task<char[]> ReadCharArrayAsync();
Task<short[]> ReadShortArrayAsync();
Task<ushort[]> ReadUShortArrayAsync();
Task<float[]> ReadFloatArrayAsync();
Task<Double[]> ReadDoubleArrayAsync();
Task<bool[]> ReadBooleanArrayAsync();
Task<byte[]> ReadByteArrayAsync();
Task<byte[][]> ReadByteArrayArrayAsync();
Task<decimal[]> ReadDecimalArrayAsync();
Task<TimeSpan[]> ReadTimeSpanArrayAsync();
#endregion
#region Collections
Task<List<T>> ReadCollectionAsync<T>() where T : IAsyncBinarySerializable, new();
Task<List<string>> ReadStringCollectionAsync();
Task<List<Guid>> ReadGuidCollectionAsync();
Task<List<DateTime?>> ReadDateTimeCollectionAsync();
Task<List<char>> ReadCharCollectionAsync();
Task<List<Int64>> ReadInt64CollectionAsync();
Task<List<Int32>> ReadInt32CollectionAsync();
Task<List<Double>> ReadDoubleCollectionAsync();
Task<List<Decimal>> ReadDecimalCollectionAsync();
Task<List<TimeSpan>> ReadTimeSpanCollectionAsync();
Task<List<float>> ReadFloatCollectionAsync();
Task<List<bool>> ReadBooleanCollectionAsync();
Task<List<byte>> ReadByteCollectionAsync();
Task<List<byte[]>> ReadByteArrayCollectionAsync();
Task<List<IPAddress>> ReadIPCollectionAsync();
Task<List<IPEndPoint>> ReadIPEndPointCollectionAsync();
Task<List<UInt64>> ReadUInt64CollectionAsync();
Task<List<UInt32>> ReadUInt32CollectionAsync();
Task<List<short>> ReadShortCollectionAsync();
Task<List<ushort>> ReadUShortCollectionAsync();
#endregion
Task<T> ReadAsync<T>() where T : IAsyncBinarySerializable;
Task<T> ReadAsync<T>(object arg) where T : IAsyncBinarySerializable;
Task<T> ReadCompatibleAsync<T>();
Task<Dictionary<TKey, TValue>> ReadDictionaryAsync<TKey, TValue>();
Task<ConcurrentDictionary<TKey, TValue>> ReadDictionaryAsConcurrentAsync<TKey, TValue>();
#endregion Extensions
}
} }

@ -1,4 +1,6 @@
namespace ZeroLevel.Services.Serialization using System.Threading.Tasks;
namespace ZeroLevel.Services.Serialization
{ {
public interface IBinarySerializable public interface IBinarySerializable
{ {
@ -6,4 +8,11 @@
void Deserialize(IBinaryReader reader); void Deserialize(IBinaryReader reader);
} }
public interface IAsyncBinarySerializable
{
Task SerializeAsync(IBinaryWriter writer);
Task DeserializeAsync(IBinaryReader reader);
}
} }

@ -3,6 +3,7 @@ using System.Collections.Concurrent;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO; using System.IO;
using System.Net; using System.Net;
using System.Threading.Tasks;
namespace ZeroLevel.Services.Serialization namespace ZeroLevel.Services.Serialization
{ {
@ -60,6 +61,7 @@ namespace ZeroLevel.Services.Serialization
void WriteArray(IPEndPoint[] array); void WriteArray(IPEndPoint[] array);
void WriteArray(Guid[] array); void WriteArray(Guid[] array);
void WriteArray(DateTime[] array); void WriteArray(DateTime[] array);
void WriteArray(DateTime?[] array);
void WriteArray(UInt64[] array); void WriteArray(UInt64[] array);
void WriteArray(UInt32[] array); void WriteArray(UInt32[] array);
void WriteArray(char[] array); void WriteArray(char[] array);
@ -83,6 +85,7 @@ namespace ZeroLevel.Services.Serialization
void WriteCollection(IEnumerable<char> collection); void WriteCollection(IEnumerable<char> collection);
void WriteCollection(IEnumerable<Guid> collection); void WriteCollection(IEnumerable<Guid> collection);
void WriteCollection(IEnumerable<DateTime> collection); void WriteCollection(IEnumerable<DateTime> collection);
void WriteCollection(IEnumerable<DateTime?> collection);
void WriteCollection(IEnumerable<Int64> collection); void WriteCollection(IEnumerable<Int64> collection);
void WriteCollection(IEnumerable<Int32> collection); void WriteCollection(IEnumerable<Int32> collection);
void WriteCollection(IEnumerable<UInt64> collection); void WriteCollection(IEnumerable<UInt64> collection);
@ -112,4 +115,110 @@ namespace ZeroLevel.Services.Serialization
Stream Stream { get; } Stream Stream { get; }
} }
public interface IAsyncBinaryWriter
: IDisposable
{
Task WriteCharAsync(char val);
Task WriteBytesAsync(byte[] val);
Task WriteShortAsync(short number);
Task WriteUShortAsync(ushort number);
Task WriteDoubleAsync(double val);
Task WriteFloatAsync(float val);
Task WriteInt32Async(Int32 number);
Task WriteUInt32Async(UInt32 number);
Task WriteLongAsync(Int64 number);
Task WriteULongAsync(UInt64 number);
Task WriteStringAsync(string line);
Task WriteGuidAsync(Guid guid);
Task WriteDateTimeAsync(DateTime? datetime);
Task WriteTimeAsync(TimeOnly? time);
Task WriteDateAsync(DateOnly? date);
Task WriteDecimalAsync(Decimal number);
Task WriteTimeSpanAsync(TimeSpan period);
Task WriteIPAsync(IPAddress ip);
Task WriteIPEndpointAsync(IPEndPoint endpoint);
#region Extensions
#region Arrays
Task WriteArrayAsync<T>(T[] array)
where T : IAsyncBinarySerializable;
Task WriteArrayAsync(string[] array);
Task WriteArrayAsync(IPAddress[] array);
Task WriteArrayAsync(IPEndPoint[] array);
Task WriteArrayAsync(Guid[] array);
Task WriteArrayAsync(DateTime[] array);
Task WriteArrayAsync(DateTime?[] array);
Task WriteArrayAsync(UInt64[] array);
Task WriteArrayAsync(UInt32[] array);
Task WriteArrayAsync(char[] array);
Task WriteArrayAsync(short[] array);
Task WriteArrayAsync(ushort[] array);
Task WriteArrayAsync(Int64[] array);
Task WriteArrayAsync(Int32[] array);
Task WriteArrayAsync(float[] array);
Task WriteArrayAsync(Double[] array);
Task WriteArrayAsync(bool[] array);
Task WriteArrayAsync(byte[] array);
Task WriteArrayAsync(byte[][] array);
Task WriteArrayAsync(decimal[] array);
Task WriteArrayAsync(TimeSpan[] array);
#endregion
#region Collections
Task WriteCollectionAsync<T>(IEnumerable<T> collection)
where T : IAsyncBinarySerializable;
Task WriteCollectionAsync(IEnumerable<string> collection);
Task WriteCollectionAsync(IEnumerable<char> collection);
Task WriteCollectionAsync(IEnumerable<Guid> collection);
Task WriteCollectionAsync(IEnumerable<DateTime> collection);
Task WriteCollectionAsync(IEnumerable<DateTime?> collection);
Task WriteCollectionAsync(IEnumerable<Int64> collection);
Task WriteCollectionAsync(IEnumerable<Int32> collection);
Task WriteCollectionAsync(IEnumerable<UInt64> collection);
Task WriteCollectionAsync(IEnumerable<UInt32> collection);
Task WriteCollectionAsync(IEnumerable<short> collection);
Task WriteCollectionAsync(IEnumerable<ushort> collection);
Task WriteCollectionAsync(IEnumerable<Double> collection);
Task WriteCollectionAsync(IEnumerable<Decimal> collection);
Task WriteCollectionAsync(IEnumerable<TimeSpan> collection);
Task WriteCollectionAsync(IEnumerable<float> collection);
Task WriteCollectionAsync(IEnumerable<bool> collection);
Task WriteCollectionAsync(IEnumerable<byte> collection);
Task WriteCollectionAsync(IEnumerable<byte[]> collection);
Task WriteCollectionAsync(IEnumerable<IPEndPoint> collection);
Task WriteCollectionAsync(IEnumerable<IPAddress> collection);
#endregion
Task WriteDictionaryAsync<TKey, TValue>(IDictionary<TKey, TValue> collection);
Task WriteDictionaryAsync<TKey, TValue>(ConcurrentDictionary<TKey, TValue> collection);
Task WriteAsync<T>(T item)
where T : IAsyncBinarySerializable;
Task WriteCompatibleAsync<T>(T item);
#endregion Extensions
Stream Stream { get; }
}
} }

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

@ -2,10 +2,10 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.IO; using System.IO;
using System.Runtime.Serialization; using System.Runtime.Serialization;
using System.Threading.Tasks;
namespace ZeroLevel.Services.Serialization namespace ZeroLevel.Services.Serialization
{ {
public delegate bool TryDeserializeMethod<T>(MemoryStreamReader reader, out T output);
public static class MessageSerializer public static class MessageSerializer
{ {
public static byte[] Serialize<T>(T obj) public static byte[] Serialize<T>(T obj)
@ -86,15 +86,6 @@ namespace ZeroLevel.Services.Serialization
return false; return false;
} }
public static TryDeserializeMethod<T> GetSafetyDeserializer<T>()
{
if (typeof(IBinarySerializable).IsAssignableFrom(typeof(T)))
{
return TryObjectDeserialize<T>;
}
return TryPrimitiveTypeDeserialize<T>;
}
public static byte[] SerializeCompatible(object obj) public static byte[] SerializeCompatible(object obj)
{ {
if (null == obj) if (null == obj)
@ -248,6 +239,16 @@ namespace ZeroLevel.Services.Serialization
} }
return PrimitiveTypeSerializer.Deserialize<T>(reader); return PrimitiveTypeSerializer.Deserialize<T>(reader);
} }
public static async Task<T> DeserializeCompatibleAsync<T>(IBinaryReader reader)
{
if (typeof(IAsyncBinarySerializable).IsAssignableFrom(typeof(T)))
{
var direct = (IAsyncBinarySerializable)Activator.CreateInstance<T>();
await direct.DeserializeAsync(reader);
return (T)direct;
}
return PrimitiveTypeSerializer.Deserialize<T>(reader);
}
public static object DeserializeCompatible(Type type, byte[] data) public static object DeserializeCompatible(Type type, byte[] data)
{ {

@ -3,7 +3,6 @@ using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Net; using System.Net;
using System.Reflection; using System.Reflection;
using System.Runtime.CompilerServices;
using ZeroLevel.Services.Invokation; using ZeroLevel.Services.Invokation;
using ZeroLevel.Services.Reflection; using ZeroLevel.Services.Reflection;
@ -83,6 +82,7 @@ namespace ZeroLevel.Services.Serialization
_cachee.Add(typeof(ushort[]), Create<ushort[]>()); _cachee.Add(typeof(ushort[]), Create<ushort[]>());
_cachee.Add(typeof(Decimal[]), Create<Decimal[]>()); _cachee.Add(typeof(Decimal[]), Create<Decimal[]>());
_cachee.Add(typeof(DateTime[]), Create<DateTime[]>()); _cachee.Add(typeof(DateTime[]), Create<DateTime[]>());
_cachee.Add(typeof(DateTime?[]), Create<DateTime?[]>());
_cachee.Add(typeof(Guid[]), Create<Guid[]>()); _cachee.Add(typeof(Guid[]), Create<Guid[]>());
_cachee.Add(typeof(String[]), Create<String[]>()); _cachee.Add(typeof(String[]), Create<String[]>());
_cachee.Add(typeof(TimeSpan[]), Create<TimeSpan[]>()); _cachee.Add(typeof(TimeSpan[]), Create<TimeSpan[]>());
@ -103,6 +103,7 @@ namespace ZeroLevel.Services.Serialization
_cachee.Add(typeof(IEnumerable<ushort>), Create<IEnumerable<ushort>>()); _cachee.Add(typeof(IEnumerable<ushort>), Create<IEnumerable<ushort>>());
_cachee.Add(typeof(IEnumerable<Decimal>), Create<IEnumerable<Decimal>>()); _cachee.Add(typeof(IEnumerable<Decimal>), Create<IEnumerable<Decimal>>());
_cachee.Add(typeof(IEnumerable<DateTime>), Create<IEnumerable<DateTime>>()); _cachee.Add(typeof(IEnumerable<DateTime>), Create<IEnumerable<DateTime>>());
_cachee.Add(typeof(IEnumerable<DateTime?>), Create<IEnumerable<DateTime?>>());
_cachee.Add(typeof(IEnumerable<Guid>), Create<IEnumerable<Guid>>()); _cachee.Add(typeof(IEnumerable<Guid>), Create<IEnumerable<Guid>>());
_cachee.Add(typeof(IEnumerable<String>), Create<IEnumerable<String>>()); _cachee.Add(typeof(IEnumerable<String>), Create<IEnumerable<String>>());
_cachee.Add(typeof(IEnumerable<TimeSpan>), Create<IEnumerable<TimeSpan>>()); _cachee.Add(typeof(IEnumerable<TimeSpan>), Create<IEnumerable<TimeSpan>>());
@ -122,6 +123,7 @@ namespace ZeroLevel.Services.Serialization
_arrayTypesCachee.Add(typeof(ushort), typeof(ushort[])); _arrayTypesCachee.Add(typeof(ushort), typeof(ushort[]));
_arrayTypesCachee.Add(typeof(Decimal), typeof(Decimal[])); _arrayTypesCachee.Add(typeof(Decimal), typeof(Decimal[]));
_arrayTypesCachee.Add(typeof(DateTime), typeof(DateTime[])); _arrayTypesCachee.Add(typeof(DateTime), typeof(DateTime[]));
_arrayTypesCachee.Add(typeof(DateTime?), typeof(DateTime?[]));
_arrayTypesCachee.Add(typeof(Guid), typeof(Guid[])); _arrayTypesCachee.Add(typeof(Guid), typeof(Guid[]));
_arrayTypesCachee.Add(typeof(String), typeof(String[])); _arrayTypesCachee.Add(typeof(String), typeof(String[]));
_arrayTypesCachee.Add(typeof(TimeSpan), typeof(TimeSpan[])); _arrayTypesCachee.Add(typeof(TimeSpan), typeof(TimeSpan[]));
@ -142,6 +144,7 @@ namespace ZeroLevel.Services.Serialization
_enumTypesCachee.Add(typeof(ushort), typeof(IEnumerable<ushort>)); _enumTypesCachee.Add(typeof(ushort), typeof(IEnumerable<ushort>));
_enumTypesCachee.Add(typeof(Decimal), typeof(IEnumerable<Decimal>)); _enumTypesCachee.Add(typeof(Decimal), typeof(IEnumerable<Decimal>));
_enumTypesCachee.Add(typeof(DateTime), typeof(IEnumerable<DateTime>)); _enumTypesCachee.Add(typeof(DateTime), typeof(IEnumerable<DateTime>));
_enumTypesCachee.Add(typeof(DateTime?), typeof(IEnumerable<DateTime?>));
_enumTypesCachee.Add(typeof(Guid), typeof(IEnumerable<Guid>)); _enumTypesCachee.Add(typeof(Guid), typeof(IEnumerable<Guid>));
_enumTypesCachee.Add(typeof(String), typeof(IEnumerable<String>)); _enumTypesCachee.Add(typeof(String), typeof(IEnumerable<String>));
_enumTypesCachee.Add(typeof(TimeSpan), typeof(IEnumerable<TimeSpan>)); _enumTypesCachee.Add(typeof(TimeSpan), typeof(IEnumerable<TimeSpan>));
@ -286,6 +289,11 @@ namespace ZeroLevel.Services.Serialization
wrapper.ReadId = wrapper.Invoker.Configure(typeof(MemoryStreamReader), "ReadDateTimeArray").First(); wrapper.ReadId = wrapper.Invoker.Configure(typeof(MemoryStreamReader), "ReadDateTimeArray").First();
wrapper.WriteId = wrapper.Invoker.Configure(typeof(MemoryStreamWriter), CreateArrayPredicate<Tw>()).First(); wrapper.WriteId = wrapper.Invoker.Configure(typeof(MemoryStreamWriter), CreateArrayPredicate<Tw>()).First();
} }
else if (type == typeof(DateTime?[]))
{
wrapper.ReadId = wrapper.Invoker.Configure(typeof(MemoryStreamReader), "ReadDateTimeArray").First();
wrapper.WriteId = wrapper.Invoker.Configure(typeof(MemoryStreamWriter), CreateArrayPredicate<Tw>()).First();
}
else if (type == typeof(Double[])) else if (type == typeof(Double[]))
{ {
wrapper.ReadId = wrapper.Invoker.Configure(typeof(MemoryStreamReader), "ReadDoubleArray").First(); wrapper.ReadId = wrapper.Invoker.Configure(typeof(MemoryStreamReader), "ReadDoubleArray").First();
@ -384,6 +392,11 @@ namespace ZeroLevel.Services.Serialization
wrapper.ReadId = wrapper.Invoker.Configure(typeof(MemoryStreamReader), "ReadDateTimeCollection").First(); wrapper.ReadId = wrapper.Invoker.Configure(typeof(MemoryStreamReader), "ReadDateTimeCollection").First();
wrapper.WriteId = wrapper.Invoker.Configure(typeof(MemoryStreamWriter), CreateCollectionPredicate<Tw>()).First(); wrapper.WriteId = wrapper.Invoker.Configure(typeof(MemoryStreamWriter), CreateCollectionPredicate<Tw>()).First();
} }
else if (type == typeof(IEnumerable<DateTime?>))
{
wrapper.ReadId = wrapper.Invoker.Configure(typeof(MemoryStreamReader), "ReadDateTimeCollection").First();
wrapper.WriteId = wrapper.Invoker.Configure(typeof(MemoryStreamWriter), CreateCollectionPredicate<Tw>()).First();
}
else if (type == typeof(IEnumerable<Double>)) else if (type == typeof(IEnumerable<Double>))
{ {
wrapper.ReadId = wrapper.Invoker.Configure(typeof(MemoryStreamReader), "ReadDoubleCollection").First(); wrapper.ReadId = wrapper.Invoker.Configure(typeof(MemoryStreamReader), "ReadDoubleCollection").First();

Loading…
Cancel
Save

Powered by TurnKey Linux.