Move to .net standart 2.1

master
Ogoun 5 months ago
parent 640879c3bd
commit 40188181fb

@ -2,7 +2,7 @@
<PropertyGroup> <PropertyGroup>
<OutputType>Exe</OutputType> <OutputType>Exe</OutputType>
<TargetFramework>net6.0</TargetFramework> <TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings> <ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable> <Nullable>enable</Nullable>
</PropertyGroup> </PropertyGroup>

@ -2,7 +2,7 @@
<PropertyGroup> <PropertyGroup>
<OutputType>Exe</OutputType> <OutputType>Exe</OutputType>
<TargetFramework>net6.0</TargetFramework> <TargetFramework>net8.0</TargetFramework>
<Platforms>AnyCPU;x64;x86</Platforms> <Platforms>AnyCPU;x64;x86</Platforms>
</PropertyGroup> </PropertyGroup>

@ -2,7 +2,7 @@
<PropertyGroup> <PropertyGroup>
<OutputType>WinExe</OutputType> <OutputType>WinExe</OutputType>
<TargetFramework>net6.0-windows</TargetFramework> <TargetFramework>net8.0-windows7.0</TargetFramework>
<Nullable>enable</Nullable> <Nullable>enable</Nullable>
<UseWPF>true</UseWPF> <UseWPF>true</UseWPF>
<Platforms>AnyCPU;x64</Platforms> <Platforms>AnyCPU;x64</Platforms>

@ -2,7 +2,7 @@
<PropertyGroup> <PropertyGroup>
<OutputType>WinExe</OutputType> <OutputType>WinExe</OutputType>
<TargetFramework>net6.0-windows</TargetFramework> <TargetFramework>net8.0-windows7.0</TargetFramework>
<Nullable>enable</Nullable> <Nullable>enable</Nullable>
<UseWPF>true</UseWPF> <UseWPF>true</UseWPF>
<Platforms>AnyCPU;x64</Platforms> <Platforms>AnyCPU;x64</Platforms>

@ -1,23 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net6.0</TargetFramework>
<Platforms>AnyCPU;x64</Platforms>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="System.Drawing.Common" Version="8.0.1" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\ZeroLevel.HNSW\ZeroLevel.HNSW.csproj" />
</ItemGroup>
<ItemGroup>
<None Update="t10k-images.idx3-ubyte">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
</ItemGroup>
</Project>

@ -1,7 +0,0 @@
namespace HNSWDemo.Model
{
public enum Gender
{
Unknown, Male, Feemale
}
}

@ -1,51 +0,0 @@
using System;
using System.Collections.Generic;
using ZeroLevel.HNSW;
namespace HNSWDemo.Model
{
public class Person
{
public Gender Gender { get; set; }
public int Age { get; set; }
public long Number { get; set; }
private static (float[], Person) Generate(int vector_size)
{
var rnd = new Random((int)Environment.TickCount);
var vector = new float[vector_size];
DefaultRandomGenerator.Instance.NextFloats(vector);
VectorUtils.NormalizeSIMD(vector);
var p = new Person();
p.Age = rnd.Next(15, 80);
var gr = rnd.Next(0, 3);
p.Gender = (gr == 0) ? Gender.Male : (gr == 1) ? Gender.Feemale : Gender.Unknown;
p.Number = CreateNumber(rnd);
return (vector, p);
}
public static List<(float[], Person)> GenerateRandom(int vectorSize, int vectorsCount)
{
var vectors = new List<(float[], Person)>();
for (int i = 0; i < vectorsCount; i++)
{
vectors.Add(Generate(vectorSize));
}
return vectors;
}
static HashSet<long> _exists = new HashSet<long>();
private static long CreateNumber(Random rnd)
{
long start_number;
do
{
start_number = 79600000000L;
start_number = start_number + rnd.Next(4, 8) * 10000000;
start_number += rnd.Next(0, 1000000);
}
while (_exists.Add(start_number) == false);
return start_number;
}
}
}

@ -1,35 +0,0 @@
using HNSWDemo.Tests;
using System;
using System.IO;
using ZeroLevel.HNSW;
namespace HNSWDemo
{
class Program
{
static void Main(string[] args)
{
//new QuantizatorTest().Run();
//new AutoClusteringMNISTTest().Run();
new AccuracityTest().Run();
Console.WriteLine("Completed");
Console.ReadKey();
}
static int GetC(string file)
{
var name = Path.GetFileNameWithoutExtension(file);
var index = name.IndexOf("_M");
if (index > 0)
{
index = name.IndexOf("_", index + 2);
if (index > 0)
{
var num = name.Substring(index + 1, name.Length - index - 1);
return int.Parse(num);
}
}
return -1;
}
}
}

@ -1,9 +0,0 @@
{
"profiles": {
"HNSWDemo": {
"commandName": "Project",
"hotReloadEnabled": false,
"nativeDebugging": false
}
}
}

@ -1,76 +0,0 @@
using HNSWDemo.Utils;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using ZeroLevel.HNSW;
using ZeroLevel.Services.Mathemathics;
namespace HNSWDemo.Tests
{
public class AccuracityTest
: ITest
{
private static int K = 200;
private static int count = 10000;
private static int testCount = 500;
private static int dimensionality = 128;
public void Run()
{
var totalHits = new List<int>();
var timewatchesNP = new List<float>();
var timewatchesHNSW = new List<float>();
var samples = VectorUtils.RandomVectors(dimensionality, count);
var sw = new Stopwatch();
var test = new VectorsDirectCompare(samples, Metrics.CosineDistance);
var world = new SmallWorld<float[]>(NSWOptions<float[]>.Create(8, 16, 100, 100, (a, b) => (float)Metrics.DotProductDistance(a, b)));
sw.Start();
var ids = world.AddItems(samples.ToArray());
sw.Stop();
Console.WriteLine($"Insert {ids.Length} items: {sw.ElapsedMilliseconds} ms");
Console.WriteLine("Start test");
var test_vectors = VectorUtils.RandomVectors(dimensionality, testCount);
foreach (var v in test_vectors)
{
sw.Restart();
var gt = test.KNearest(v, K).ToDictionary(p => p.Item1, p => p.Item2);
sw.Stop();
timewatchesNP.Add(sw.ElapsedMilliseconds);
sw.Restart();
var result = world.Search(v, K);
sw.Stop();
timewatchesHNSW.Add(sw.ElapsedMilliseconds);
var hits = 0;
foreach (var r in result)
{
if (gt.ContainsKey(r.Item1))
{
hits++;
}
}
totalHits.Add(hits);
}
Console.WriteLine($"MIN Accuracity: {totalHits.Min() * 100 / K}%");
Console.WriteLine($"AVG Accuracity: {totalHits.Average() * 100 / K}%");
Console.WriteLine($"MAX Accuracity: {totalHits.Max() * 100 / K}%");
Console.WriteLine($"MIN HNSW TIME: {timewatchesHNSW.Min()} ms");
Console.WriteLine($"AVG HNSW TIME: {timewatchesHNSW.Average()} ms");
Console.WriteLine($"MAX HNSW TIME: {timewatchesHNSW.Max()} ms");
Console.WriteLine($"MIN NP TIME: {timewatchesNP.Min()} ms");
Console.WriteLine($"AVG NP TIME: {timewatchesNP.Average()} ms");
Console.WriteLine($"MAX NP TIME: {timewatchesNP.Max()} ms");
}
}
}

@ -1,160 +0,0 @@
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Drawing.Imaging;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices;
using ZeroLevel.HNSW;
using ZeroLevel.HNSW.Services;
using ZeroLevel.Services.FileSystem;
using ZeroLevel.Services.Mathemathics;
namespace HNSWDemo.Tests
{
public class AutoClusteringMNISTTest
: ITest
{
private static int Width = 3000;
private static int Height = 3000;
private static byte[] PadLines(byte[] bytes, int rows, int columns)
{
int currentStride = columns; // 3
int newStride = columns; // 4
byte[] newBytes = new byte[newStride * rows];
for (int i = 0; i < rows; i++)
Buffer.BlockCopy(bytes, currentStride * i, newBytes, newStride * i, currentStride);
return newBytes;
}
public void Run()
{
var folder = @"D:\Mnist";
int columns = 28;
int rows = 28;
int imageCount, rowCount, colCount;
var buf = new byte[4];
var image = new byte[rows * columns];
var vectors = new List<byte[]>();
using (var fs = new FileStream("t10k-images.idx3-ubyte", FileMode.Open, FileAccess.Read, FileShare.None))
{
// first 4 bytes is a magic number
fs.Read(buf, 0, 4);
// second 4 bytes is the number of images
fs.Read(buf, 0, 4);
imageCount = BitConverter.ToInt32(buf.Reverse().ToArray(), 0);
// third 4 bytes is the row count
fs.Read(buf, 0, 4);
rowCount = BitConverter.ToInt32(buf.Reverse().ToArray(), 0);
// fourth 4 bytes is the column count
fs.Read(buf, 0, 4);
colCount = BitConverter.ToInt32(buf.Reverse().ToArray(), 0);
for (int i = 0; i < imageCount; i++)
{
fs.Read(image, 0, image.Length);
var v = new byte[image.Length];
Array.Copy(image, v, image.Length);
vectors.Add(v);
}
}
var options = NSWOptions<byte[]>.Create(8, 16, 200, 200, Metrics.L2EuclideanDistance);
SmallWorld<byte[]> world;
if (File.Exists("graph_mnist.bin"))
{
using (var fs = new FileStream("graph_mnist.bin", FileMode.Open, FileAccess.Read, FileShare.None))
{
world = SmallWorld.CreateWorldFrom<byte[]>(options, fs);
}
}
else
{
world = SmallWorld.CreateWorld<byte[]>(options);
world.AddItems(vectors);
using (var fs = new FileStream("graph_mnist.bin", FileMode.Create, FileAccess.Write, FileShare.None))
{
world.Serialize(fs);
}
}
var distance = new Func<int, int, float>((id1, id2) => Metrics.L2EuclideanDistance(world.GetVector(id1), world.GetVector(id2)));
var links = world.GetLinks().SelectMany(pair => pair.Value.Select(p=> distance(pair.Key, p))).ToList();
var exists = links.Where(n => n > 0).ToArray();
var histogram = new Histogram(HistogramMode.LOG, links);
DrawHistogram(histogram, @"D:\histogram.jpg");
var clusters = AutomaticGraphClusterer.DetectClusters(world);
Console.WriteLine($"Found {clusters.Count} clusters");
while (clusters.Count > 10)
{
var last = clusters[clusters.Count - 1];
var testDistance = clusters[0].MinDistance(distance, last);
var index = 0;
for (int i = 1; i < clusters.Count - 1; i++)
{
var d = clusters[i].MinDistance(distance, last);
if (d < testDistance)
{
testDistance = d;
index = i;
}
}
clusters[index].Merge(last);
clusters.RemoveAt(clusters.Count - 1);
}
for (int i = 0; i < clusters.Count; i++)
{
var ouput = Path.Combine(folder, i.ToString("D3"));
FSUtils.CleanAndTestFolder(ouput);
foreach (var v in clusters[i])
{
int stride = columns;
byte[] newbytes = PadLines(world.GetVector(v), rows, columns);
using (var im = new Bitmap(columns, rows, stride, PixelFormat.Format8bppIndexed, Marshal.UnsafeAddrOfPinnedArrayElement(newbytes, 0)))
{
im.Save(Path.Combine(ouput, $"{v}.bmp"));
}
}
Console.WriteLine($"Cluster {i + 1} countains {clusters[i].Count} items");
}
}
static void DrawHistogram(Histogram histogram, string filename)
{
var wb = Width / histogram.Values.Length;
var k = ((float)Height) / (float)histogram.Values.Max();
var maxes = histogram.GetMaximums().ToDictionary(m => m.Index, m => m);
int threshold = histogram.CuttOff();
using (var bmp = new Bitmap(Width, Height))
{
using (var g = Graphics.FromImage(bmp))
{
for (int i = 0; i < histogram.Values.Length; i++)
{
var height = (int)(histogram.Values[i] * k);
if (maxes.ContainsKey(i))
{
g.DrawRectangle(Pens.Red, i * wb, bmp.Height - height, wb, height);
g.DrawRectangle(Pens.Red, i * wb + 1, bmp.Height - height, wb - 1, height);
}
else
{
g.DrawRectangle(Pens.Blue, i * wb, bmp.Height - height, wb, height);
}
if (i == threshold)
{
g.DrawLine(Pens.Green, i * wb + wb / 2, 0, i * wb + wb / 2, bmp.Height);
}
}
}
bmp.Save(filename);
}
}
}
}

@ -1,27 +0,0 @@
using System;
using ZeroLevel.HNSW;
using ZeroLevel.HNSW.Services;
using ZeroLevel.Services.Mathemathics;
namespace HNSWDemo.Tests
{
public class AutoClusteringTest
: ITest
{
private static int Count = 3000;
private static int Dimensionality = 128;
public void Run()
{
var vectors = VectorUtils.RandomVectors(Dimensionality, Count);
var world = SmallWorld.CreateWorld<float[]>(NSWOptions<float[]>.Create(8, 16, 200, 200, Metrics.L2EuclideanDistance));
world.AddItems(vectors);
var clusters = AutomaticGraphClusterer.DetectClusters(world);
Console.WriteLine($"Found {clusters.Count} clusters");
for (int i = 0; i < clusters.Count; i++)
{
Console.WriteLine($"Cluster {i + 1} countains {clusters[i].Count} items");
}
}
}
}

@ -1,57 +0,0 @@
using HNSWDemo.Model;
using System;
using System.Linq;
using ZeroLevel.HNSW;
namespace HNSWDemo.Tests
{
public class FilterTest
: ITest
{
private const int count = 3000;
private const int testCount = 100;
private const int dimensionality = 128;
public void Run()
{
var map = new HNSWMap<long>();
var samples = Person.GenerateRandom(dimensionality, count);
var testDict = samples.ToDictionary(s => s.Item2.Number, s => s.Item2);
var world = new SmallWorld<float[]>(NSWOptions<float[]>.Create(6, 15, 200, 200, CosineDistance.ForUnits));
var ids = world.AddItems(samples.Select(i => i.Item1).ToArray());
for (int bi = 0; bi < samples.Count; bi++)
{
map.Append(samples[bi].Item2.Number, ids[bi]);
}
Console.WriteLine("Start test");
int K = 200;
var vectors = VectorUtils.RandomVectors(dimensionality, testCount);
var context = new SearchContext()
.SetActiveNodes(map
.ConvertFeaturesToIds(samples
.Where(p => p.Item2.Age > 20 && p.Item2.Age < 50 && p.Item2.Gender == Gender.Feemale)
.Select(p => p.Item2.Number)));
var hits = 0;
var miss = 0;
foreach (var v in vectors)
{
var numbers = map.ConvertIdsToFeatures(world.Search(v, K, context).Select(r => r.Item1));
foreach (var r in numbers)
{
var record = testDict[r];
if (context.NodeCheckMode == Mode.None || (record.Gender == Gender.Feemale && record.Age > 20 && record.Age < 50))
{
hits++;
}
else
{
miss++;
}
}
}
Console.WriteLine($"SUCCESS: {hits}");
Console.WriteLine($"ERROR: {miss}");
}
}
}

@ -1,82 +0,0 @@
using System;
using System.Drawing;
using System.IO;
using System.Linq;
using ZeroLevel.HNSW;
using ZeroLevel.Services.Mathemathics;
namespace HNSWDemo.Tests
{
public class HistogramTest
: ITest
{
private static int Count = 3000;
private static int Dimensionality = 128;
private static int Width = 2440;
private static int Height = 1920;
public void Run()
{
Create(Dimensionality, @"D:\hist");
// Process.Start("explorer", $"D:\\hist{Dimensionality.ToString("D3")}.jpg");
/* for (int i = 12; i < 512; i++)
{
Create(i, @"D:\hist");
}*/
}
private void Create(int dim, string output)
{
var vectors = VectorUtils.RandomVectors(dim, Count);
var world = SmallWorld.CreateWorld<float[]>(NSWOptions<float[]>.Create(8, 16, 200, 200, Metrics.L2EuclideanDistance));
world.AddItems(vectors);
var distance = new Func<int, int, float>((id1, id2) => Metrics.L2EuclideanDistance(world.GetVector(id1), world.GetVector(id2)));
var weights = world.GetLinks().SelectMany(pair => pair.Value.Select(id => distance(pair.Key, id)));
var histogram = new Histogram(HistogramMode.SQRT, weights);
histogram.Smooth();
int threshold = histogram.CuttOff();
var min = histogram.Bounds[threshold - 1];
var max = histogram.Bounds[threshold];
var R = (max + min) / 2;
DrawHistogram(histogram, Path.Combine(output, $"hist{dim.ToString("D3")}.jpg"));
}
static void DrawHistogram(Histogram histogram, string filename)
{
var wb = Width / histogram.Values.Length;
var k = ((float)Height) / (float)histogram.Values.Max();
var maxes = histogram.GetMaximums().ToDictionary(m => m.Index, m => m);
int threshold = histogram.CuttOff();
using (var bmp = new Bitmap(Width, Height))
{
using (var g = Graphics.FromImage(bmp))
{
for (int i = 0; i < histogram.Values.Length; i++)
{
var height = (int)(histogram.Values[i] * k);
if (maxes.ContainsKey(i))
{
g.DrawRectangle(Pens.Red, i * wb, bmp.Height - height, wb, height);
g.DrawRectangle(Pens.Red, i * wb + 1, bmp.Height - height, wb - 1, height);
}
else
{
g.DrawRectangle(Pens.Blue, i * wb, bmp.Height - height, wb, height);
}
if (i == threshold)
{
g.DrawLine(Pens.Green, i * wb + wb / 2, 0, i * wb + wb / 2, bmp.Height);
}
}
}
bmp.Save(filename);
}
}
}
}

@ -1,7 +0,0 @@
namespace HNSWDemo.Tests
{
public interface ITest
{
void Run();
}
}

@ -1,29 +0,0 @@
using System;
using System.Diagnostics;
using ZeroLevel.HNSW;
using ZeroLevel.Services.Mathemathics;
namespace HNSWDemo.Tests
{
public class InsertTimeExplosionTest
: ITest
{
private static int Count = 10000;
private static int IterationCount = 100;
private static int Dimensionality = 128;
public void Run()
{
var sw = new Stopwatch();
var world = new SmallWorld<float[]>(NSWOptions<float[]>.Create(6, 12, 100, 100, Metrics.CosineDistance));
for (int i = 0; i < IterationCount; i++)
{
var samples = VectorUtils.RandomVectors(Dimensionality, Count);
sw.Restart();
var ids = world.AddItems(samples.ToArray());
sw.Stop();
Console.WriteLine($"ITERATION: [{i.ToString("D4")}] COUNT: [{ids.Length}] ELAPSED [{sw.ElapsedMilliseconds} ms]");
}
}
}
}

@ -1,121 +0,0 @@
using HNSWDemo.Model;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using ZeroLevel.HNSW;
using ZeroLevel.Services.Mathemathics;
namespace HNSWDemo.Tests
{
internal class LALTest
: ITest
{
private const int count = 20000;
private const int dimensionality = 128;
private const string _graphFileCachee = @"lal_test_graph.bin";
private const string _mapFileCachee = @"lal_test_map.bin";
public void Run()
{
var moda = 3;
var persons = Person.GenerateRandom(dimensionality, count);
var samples = new Dictionary<int, List<(float[], Person)>>();
var options = NSWOptions<float[]>.Create(6, 8, 100, 100, Metrics.CosineDistance);
foreach (var p in persons)
{
var c = (int)Math.Abs(p.Item2.Number.GetHashCode() % moda);
if (samples.ContainsKey(c) == false) samples.Add(c, new List<(float[], Person)>());
samples[c].Add(p);
}
SplittedLALGraph worlds;
HNSWMappers<long> mappers;
if (File.Exists(_graphFileCachee) && File.Exists(_mapFileCachee))
{
worlds = new SplittedLALGraph(_graphFileCachee);
mappers = new HNSWMappers<long>(_mapFileCachee, l => (int)Math.Abs(l.GetHashCode() % moda));
}
else
{
worlds = new SplittedLALGraph();
mappers = new HNSWMappers<long>(l => (int)Math.Abs(l.GetHashCode() % moda));
var worlds_dict = new Dictionary<int, SmallWorld<float[]>>();
var maps_dict = new Dictionary<int, HNSWMap<long>>();
foreach (var p in samples)
{
var c = p.Key;
if (worlds_dict.ContainsKey(c) == false)
{
worlds_dict.Add(c, new SmallWorld<float[]>(options));
}
if (maps_dict.ContainsKey(c) == false)
{
maps_dict.Add(c, new HNSWMap<long>());
}
var w = worlds_dict[c];
var m = maps_dict[c];
var ids = w.AddItems(p.Value.Select(i => i.Item1));
for (int i = 0; i < ids.Length; i++)
{
m.Append(p.Value[i].Item2.Number, ids[i]);
}
}
var name = Guid.NewGuid().ToString();
foreach (var p in samples)
{
var c = p.Key;
var w = worlds_dict[c];
var m = maps_dict[c];
using (var s = File.Create(name))
{
w.Serialize(s);
}
using (var s = File.OpenRead(name))
{
var l = LALGraph.FromHNSWGraph<float[]>(s);
worlds.Append(l, c);
}
File.Delete(name);
mappers.Append(m, c);
}
worlds.Save(_graphFileCachee);
mappers.Save(_mapFileCachee);
}
var entries = new long[10];
for (int i = 0; i < entries.Length; i++)
{
entries[i] = persons[DefaultRandomGenerator.Instance.Next(0, persons.Count - 1)].Item2.Number;
}
var contexts = mappers.CreateContext(null, entries);
var result = worlds.KNearest(5000, contexts);
Console.WriteLine($"Found: {result.Sum(r=>r.Value.Count)}");
/*Console.WriteLine("Entries:");
foreach (var n in entries)
{
Console.WriteLine($"\t{n}");
}
Console.WriteLine("Extensions:");
foreach (var r in result)
{
foreach (var n in mappers.ConvertIdsToFeatures(r.Key, r.Value))
{
Console.WriteLine($"\t[{n}]");
}
}*/
}
}
}

@ -1,44 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using ZeroLevel.HNSW;
using ZeroLevel.HNSW.Services;
using ZeroLevel.Services.Mathemathics;
namespace HNSWDemo.Tests
{
public class QuantizatorTest
: ITest
{
private static int Count = 500000;
private static int Dimensionality = 221;
public void Run()
{
var samples = VectorUtils.RandomVectors(Dimensionality, Count);
var min = samples.SelectMany(s => s).Min();
var max = samples.SelectMany(s => s).Max();
var q = new Quantizator(min, max);
var q_samples = samples.Select(s => q.QuantizeToInt(s)).ToArray();
// comparing
var list = new List<float>();
for (int i = 0; i < samples.Count - 1; i++)
{
var v1 = samples[i];
var v2 = samples[i + 1];
var dist = Metrics.CosineDistance(v1, v2);
var qv1 = q_samples[i];
var qv2 = q_samples[i + 1];
var qdist = Metrics.CosineDistance(qv1, qv2);
list.Add(Math.Abs(dist - qdist));
}
Console.WriteLine($"Min diff: {list.Min()}");
Console.WriteLine($"Avg diff: {list.Average()}");
Console.WriteLine($"Max diff: {list.Max()}");
}
}
}

@ -1,80 +0,0 @@
using HNSWDemo.Utils;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using ZeroLevel.HNSW;
using ZeroLevel.HNSW.Services;
using ZeroLevel.Services.Mathemathics;
namespace HNSWDemo.Tests
{
public class QuantizeAccuracityTest
: ITest
{
private static int Count = 5000;
private static int Dimensionality = 128;
private static int K = 200;
private static int TestCount =500;
public void Run()
{
var totalHits = new List<int>();
var timewatchesNP = new List<float>();
var timewatchesHNSW = new List<float>();
var q = new Quantizator(-1f, 1f);
var s = VectorUtils.RandomVectors(Dimensionality, Count);
var samples = s.Select(v => q.QuantizeToLong(v)).ToList();
var sw = new Stopwatch();
var test = new VectorsDirectCompare(s, Metrics.CosineDistance);
var world = new SmallWorld<long[]>(NSWOptions<long[]>.Create(6, 8, 100, 100, Metrics.CosineDistance));
sw.Start();
var ids = world.AddItems(samples.ToArray());
sw.Stop();
Console.WriteLine($"Insert {ids.Length} items: {sw.ElapsedMilliseconds} ms");
Console.WriteLine("Start test");
var tv = VectorUtils.RandomVectors(Dimensionality, TestCount);
var test_vectors = tv.Select(v => q.QuantizeToLong(v)).ToList();
for (int i = 0; i < tv.Count; i++)
{
sw.Restart();
var gt = test.KNearest(tv[i], K).ToDictionary(p => p.Item1, p => p.Item2);
sw.Stop();
timewatchesNP.Add(sw.ElapsedMilliseconds);
sw.Restart();
var result = world.Search(test_vectors[i], K);
sw.Stop();
timewatchesHNSW.Add(sw.ElapsedMilliseconds);
var hits = 0;
foreach (var r in result)
{
if (gt.ContainsKey(r.Item1))
{
hits++;
}
}
totalHits.Add(hits);
}
Console.WriteLine($"MIN Accuracity: {totalHits.Min() * 100 / K}%");
Console.WriteLine($"AVG Accuracity: {totalHits.Average() * 100 / K}%");
Console.WriteLine($"MAX Accuracity: {totalHits.Max() * 100 / K}%");
Console.WriteLine($"MIN HNSW TIME: {timewatchesHNSW.Min()} ms");
Console.WriteLine($"AVG HNSW TIME: {timewatchesHNSW.Average()} ms");
Console.WriteLine($"MAX HNSW TIME: {timewatchesHNSW.Max()} ms");
Console.WriteLine($"MIN NP TIME: {timewatchesNP.Min()} ms");
Console.WriteLine($"AVG NP TIME: {timewatchesNP.Average()} ms");
Console.WriteLine($"MAX NP TIME: {timewatchesNP.Max()} ms");
}
}
}

@ -1,72 +0,0 @@
using System;
using System.Drawing;
using System.Linq;
using ZeroLevel.HNSW;
using ZeroLevel.HNSW.Services;
using ZeroLevel.Services.Mathemathics;
namespace HNSWDemo.Tests
{
public class QuantizeHistogramTest
: ITest
{
private static int Count = 3000;
private static int Dimensionality = 128;
private static int Width = 3000;
private static int Height = 3000;
public void Run()
{
var vectors = VectorUtils.RandomVectors(Dimensionality, Count);
var q = new Quantizator(-1f, 1f);
var world = SmallWorld.CreateWorld<long[]>(NSWOptions<long[]>.Create(8, 16, 200, 200, Metrics.CosineDistance));
world.AddItems(vectors.Select(v => q.QuantizeToLong(v)).ToList());
var distance = new Func<int, int, float>((id1, id2) => Metrics.CosineDistance(world.GetVector(id1), world.GetVector(id2)));
var weights = world.GetLinks().SelectMany(pair => pair.Value.Select(id => distance(pair.Key, id)));
var histogram = new Histogram(HistogramMode.SQRT, weights);
histogram.Smooth();
int threshold = histogram.CuttOff();
var min = histogram.Bounds[threshold - 1];
var max = histogram.Bounds[threshold];
var R = (max + min) / 2;
DrawHistogram(histogram, @"D:\hist.jpg");
}
static void DrawHistogram(Histogram histogram, string filename)
{
var wb = Width / histogram.Values.Length;
var k = ((float)Height) / (float)histogram.Values.Max();
var maxes = histogram.GetMaximums().ToDictionary(m => m.Index, m => m);
int threshold = histogram.CuttOff();
using (var bmp = new Bitmap(Width, Height))
{
using (var g = Graphics.FromImage(bmp))
{
for (int i = 0; i < histogram.Values.Length; i++)
{
var height = (int)(histogram.Values[i] * k);
if (maxes.ContainsKey(i))
{
g.DrawRectangle(Pens.Red, i * wb, bmp.Height - height, wb, height);
g.DrawRectangle(Pens.Red, i * wb + 1, bmp.Height - height, wb - 1, height);
}
else
{
g.DrawRectangle(Pens.Blue, i * wb, bmp.Height - height, wb, height);
}
if (i == threshold)
{
g.DrawLine(Pens.Green, i * wb + wb / 2, 0, i * wb + wb / 2, bmp.Height);
}
}
}
bmp.Save(filename);
}
}
}
}

@ -1,32 +0,0 @@
using System;
using System.Diagnostics;
using System.Linq;
using ZeroLevel.HNSW;
using ZeroLevel.HNSW.Services;
using ZeroLevel.Services.Mathemathics;
namespace HNSWDemo.Tests
{
public class QuantizeInsertTimeExplosionTest
: ITest
{
private static int Count = 10000;
private static int IterationCount = 100;
private static int Dimensionality = 128;
public void Run()
{
var sw = new Stopwatch();
var world = new SmallWorld<long[]>(NSWOptions<long[]>.Create(6, 12, 100, 100, Metrics.CosineDistance));
var q = new Quantizator(-1f, 1f);
for (int i = 0; i < IterationCount; i++)
{
var samples = VectorUtils.RandomVectors(Dimensionality, Count);
sw.Restart();
var ids = world.AddItems(samples.Select(v => q.QuantizeToLong(v)).ToArray());
sw.Stop();
Console.WriteLine($"ITERATION: [{i.ToString("D4")}] COUNT: [{ids.Length}] ELAPSED [{sw.ElapsedMilliseconds} ms]");
}
}
}
}

@ -1,52 +0,0 @@
using System;
using System.Diagnostics;
using System.IO;
using ZeroLevel.HNSW;
namespace HNSWDemo.Tests
{
public class SaveRestoreTest
: ITest
{
private static int Count = 1000;
private static int Dimensionality = 128;
public void Run()
{
var samples = VectorUtils.RandomVectors(Dimensionality, Count);
var world = new SmallWorld<float[]>(NSWOptions<float[]>.Create(6, 15, 200, 200, CosineDistance.ForUnits));
var sw = new Stopwatch();
sw.Start();
var ids = world.AddItems(samples.ToArray());
sw.Stop();
Console.WriteLine($"Insert {ids.Length} items on {sw.ElapsedMilliseconds} ms");
Console.WriteLine("Start test");
byte[] dump;
using (var ms = new MemoryStream())
{
world.Serialize(ms);
dump = ms.ToArray();
}
Console.WriteLine($"Full dump size: {dump.Length} bytes");
byte[] testDump;
var restoredWorld = new SmallWorld<float[]>(NSWOptions<float[]>.Create(6, 15, 200, 200, CosineDistance.ForUnits));
using (var ms = new MemoryStream(dump))
{
restoredWorld.Deserialize(ms);
}
using (var ms = new MemoryStream())
{
restoredWorld.Serialize(ms);
testDump = ms.ToArray();
}
if (testDump.Length != dump.Length)
{
Console.WriteLine($"Incorrect restored size. Got {testDump.Length}. Expected: {dump.Length}");
return;
}
}
}
}

@ -1,95 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using ZeroLevel.HNSW;
namespace HNSWDemo.Utils
{
public class QLVectorsDirectCompare
{
private const int HALF_LONG_BITS = 32;
private readonly IList<long[]> _vectors;
private readonly Func<long[], long[], float> _distance;
public QLVectorsDirectCompare(List<long[]> vectors, Func<long[], long[], float> distance)
{
_vectors = vectors;
_distance = distance;
}
public IEnumerable<(int, float)> KNearest(long[] v, int k)
{
var weights = new Dictionary<int, float>();
for (int i = 0; i < _vectors.Count; i++)
{
var d = _distance(v, _vectors[i]);
weights[i] = d;
}
return weights.OrderBy(p => p.Value).Take(k).Select(p => (p.Key, p.Value));
}
public List<HashSet<int>> DetectClusters()
{
var links = new SortedList<long, float>();
for (int i = 0; i < _vectors.Count; i++)
{
for (int j = i + 1; j < _vectors.Count; j++)
{
long k = (((long)(i)) << HALF_LONG_BITS) + j;
links.Add(k, _distance(_vectors[i], _vectors[j]));
}
}
// 1. Find R - bound between intra-cluster distances and out-of-cluster distances
var histogram = new Histogram(HistogramMode.SQRT, links.Values);
int threshold = histogram.CuttOff();
var min = histogram.Bounds[threshold - 1];
var max = histogram.Bounds[threshold];
var R = (max + min) / 2;
// 2. Get links with distances less than R
var resultLinks = new SortedList<long, float>();
foreach (var pair in links)
{
if (pair.Value < R)
{
resultLinks.Add(pair.Key, pair.Value);
}
}
// 3. Extract clusters
List<HashSet<int>> clusters = new List<HashSet<int>>();
foreach (var pair in resultLinks)
{
var k = pair.Key;
var id1 = (int)(k >> HALF_LONG_BITS);
var id2 = (int)(k - (((long)id1) << HALF_LONG_BITS));
bool found = false;
foreach (var c in clusters)
{
if (c.Contains(id1))
{
c.Add(id2);
found = true;
break;
}
else if (c.Contains(id2))
{
c.Add(id1);
found = true;
break;
}
}
if (found == false)
{
var c = new HashSet<int>();
c.Add(id1);
c.Add(id2);
clusters.Add(c);
}
}
return clusters;
}
}
}

@ -1,95 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using ZeroLevel.HNSW;
namespace HNSWDemo.Utils
{
public class QVectorsDirectCompare
{
private const int HALF_LONG_BITS = 32;
private readonly IList<byte[]> _vectors;
private readonly Func<byte[], byte[], float> _distance;
public QVectorsDirectCompare(List<byte[]> vectors, Func<byte[], byte[], float> distance)
{
_vectors = vectors;
_distance = distance;
}
public IEnumerable<(int, float)> KNearest(byte[] v, int k)
{
var weights = new Dictionary<int, float>();
for (int i = 0; i < _vectors.Count; i++)
{
var d = _distance(v, _vectors[i]);
weights[i] = d;
}
return weights.OrderBy(p => p.Value).Take(k).Select(p => (p.Key, p.Value));
}
public List<HashSet<int>> DetectClusters()
{
var links = new SortedList<long, float>();
for (int i = 0; i < _vectors.Count; i++)
{
for (int j = i + 1; j < _vectors.Count; j++)
{
long k = (((long)i) << HALF_LONG_BITS) + j;
links.Add(k, _distance(_vectors[i], _vectors[j]));
}
}
// 1. Find R - bound between intra-cluster distances and out-of-cluster distances
var histogram = new Histogram(HistogramMode.SQRT, links.Values);
int threshold = histogram.CuttOff();
var min = histogram.Bounds[threshold - 1];
var max = histogram.Bounds[threshold];
var R = (max + min) / 2;
// 2. Get links with distances less than R
var resultLinks = new SortedList<long, float>();
foreach (var pair in links)
{
if (pair.Value < R)
{
resultLinks.Add(pair.Key, pair.Value);
}
}
// 3. Extract clusters
List<HashSet<int>> clusters = new List<HashSet<int>>();
foreach (var pair in resultLinks)
{
var k = pair.Key;
var id1 = (int)(k >> HALF_LONG_BITS);
var id2 = (int)(k - (((long)id1) << HALF_LONG_BITS));
bool found = false;
foreach (var c in clusters)
{
if (c.Contains(id1))
{
c.Add(id2);
found = true;
break;
}
else if (c.Contains(id2))
{
c.Add(id1);
found = true;
break;
}
}
if (found == false)
{
var c = new HashSet<int>();
c.Add(id1);
c.Add(id2);
clusters.Add(c);
}
}
return clusters;
}
}
}

@ -1,95 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using ZeroLevel.HNSW;
namespace HNSWDemo.Utils
{
public class VectorsDirectCompare
{
private const int HALF_LONG_BITS = 32;
private readonly IList<float[]> _vectors;
private readonly Func<float[], float[], float> _distance;
public VectorsDirectCompare(List<float[]> vectors, Func<float[], float[], float> distance)
{
_vectors = vectors;
_distance = distance;
}
public IEnumerable<(int, float)> KNearest(float[] v, int k)
{
var weights = new Dictionary<int, float>();
for (int i = 0; i < _vectors.Count; i++)
{
var d = _distance(v, _vectors[i]);
weights[i] = d;
}
return weights.OrderBy(p => p.Value).Take(k).Select(p => (p.Key, p.Value));
}
public List<HashSet<int>> DetectClusters()
{
var links = new SortedList<long, float>();
for (int i = 0; i < _vectors.Count; i++)
{
for (int j = i + 1; j < _vectors.Count; j++)
{
long k = (((long)(i)) << HALF_LONG_BITS) + j;
links.Add(k, _distance(_vectors[i], _vectors[j]));
}
}
// 1. Find R - bound between intra-cluster distances and out-of-cluster distances
var histogram = new Histogram(HistogramMode.SQRT, links.Values);
int threshold = histogram.CuttOff();
var min = histogram.Bounds[threshold - 1];
var max = histogram.Bounds[threshold];
var R = (max + min) / 2;
// 2. Get links with distances less than R
var resultLinks = new SortedList<long, float>();
foreach (var pair in links)
{
if (pair.Value < R)
{
resultLinks.Add(pair.Key, pair.Value);
}
}
// 3. Extract clusters
List<HashSet<int>> clusters = new List<HashSet<int>>();
foreach (var pair in resultLinks)
{
var k = pair.Key;
var id1 = (int)(k >> HALF_LONG_BITS);
var id2 = (int)(k - (((long)id1) << HALF_LONG_BITS));
bool found = false;
foreach (var c in clusters)
{
if (c.Contains(id1))
{
c.Add(id2);
found = true;
break;
}
else if (c.Contains(id2))
{
c.Add(id1);
found = true;
break;
}
}
if (found == false)
{
var c = new HashSet<int>();
c.Add(id1);
c.Add(id2);
clusters.Add(c);
}
}
return clusters;
}
}
}

@ -2,7 +2,7 @@
<PropertyGroup> <PropertyGroup>
<OutputType>Exe</OutputType> <OutputType>Exe</OutputType>
<TargetFramework>net6.0</TargetFramework> <TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings> <ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable> <Nullable>enable</Nullable>
</PropertyGroup> </PropertyGroup>

@ -61,7 +61,7 @@ namespace PartitionFileStorageTest
await storePart.Store(c3, Generate(r)); await storePart.Store(c3, Generate(r));
await storePart.Store(c3, Generate(r)); await storePart.Store(c3, Generate(r));
storePart.CompleteAdding(); storePart.CompleteAdding();
await storePart.Compress(); storePart.Compress();
} }
using (var readPart = store.CreateAccessor(new Metadata { Date = new DateTime(2022, 11, 08) })) using (var readPart = store.CreateAccessor(new Metadata { Date = new DateTime(2022, 11, 08) }))
@ -124,7 +124,7 @@ namespace PartitionFileStorageTest
Log.Info($"Fill journal: {sw.ElapsedMilliseconds}ms. Records writed: {storePart.TotalRecords}"); Log.Info($"Fill journal: {sw.ElapsedMilliseconds}ms. Records writed: {storePart.TotalRecords}");
sw.Restart(); sw.Restart();
storePart.CompleteAdding(); storePart.CompleteAdding();
await storePart.Compress(); storePart.Compress();
sw.Stop(); sw.Stop();
Log.Info($"Compress: {sw.ElapsedMilliseconds}ms"); Log.Info($"Compress: {sw.ElapsedMilliseconds}ms");
sw.Restart(); sw.Restart();
@ -269,11 +269,11 @@ namespace PartitionFileStorageTest
using (var storePart = store.CreateBuilder(meta)) using (var storePart = store.CreateBuilder(meta))
{ {
Parallel.ForEach(MassGenerator((long)(0.7 * PAIRS_COUNT)), parallelOptions, pair => await Parallel.ForEachAsync(MassGenerator((long)(0.7 * PAIRS_COUNT)), CancellationToken.None, async (pair, _) =>
{ {
var key = pair.Item1; var key = pair.Item1;
var val = pair.Item2; var val = pair.Item2;
storePart.Store(key, val); await storePart.Store(key, val);
if (key % 717 == 0) if (key % 717 == 0)
{ {
testKeys1.Add(key); testKeys1.Add(key);
@ -292,7 +292,7 @@ namespace PartitionFileStorageTest
Log.Info($"Fill journal: {sw.ElapsedMilliseconds}ms"); Log.Info($"Fill journal: {sw.ElapsedMilliseconds}ms");
sw.Restart(); sw.Restart();
storePart.CompleteAdding(); storePart.CompleteAdding();
await storePart.Compress(); storePart.Compress();
sw.Stop(); sw.Stop();
Log.Info($"Compress: {sw.ElapsedMilliseconds}ms"); Log.Info($"Compress: {sw.ElapsedMilliseconds}ms");
sw.Restart(); sw.Restart();
@ -305,11 +305,11 @@ namespace PartitionFileStorageTest
sw.Restart(); sw.Restart();
using (var merger = store.CreateMergeAccessor(meta, data => Compressor.DecodeBytesContent(data))) using (var merger = store.CreateMergeAccessor(meta, data => Compressor.DecodeBytesContent(data)))
{ {
Parallel.ForEach(MassGenerator((long)(0.3 * PAIRS_COUNT)), parallelOptions, pair => await Parallel.ForEachAsync(MassGenerator((long)(0.3 * PAIRS_COUNT)), CancellationToken.None, async (pair, _) =>
{ {
var key = pair.Item1; var key = pair.Item1;
var val = pair.Item2; var val = pair.Item2;
merger.Store(key, val); await merger.Store(key, val);
Keys.Add(key); Keys.Add(key);
}); });
@ -610,9 +610,10 @@ namespace PartitionFileStorageTest
FSUtils.CleanAndTestFolder(root); FSUtils.CleanAndTestFolder(root);
await FastTest(options); await FastTest(options);
FSUtils.CleanAndTestFolder(root); FSUtils.CleanAndTestFolder(root);
await FullStoreMultithreadTest(optionsMultiThread); await FullStoreMultithreadTest(optionsMultiThread);
/*FSUtils.CleanAndTestFolder(root); /*FSUtils.CleanAndTestFolder(root);

@ -1,41 +0,0 @@
using Grpc.Net.Client;
using static Qdrant.Collections;
namespace Qdrant.Test
{
// QDRANT VERSION 1.15.1
internal class Program
{
const string COLLECTION_NAME = "my_test_collection";
static void Main(string[] args)
{
var address = @"http://localhost:6334";
var channel = GrpcChannel.ForAddress(address);
var collections = new CollectionsClient(channel);
var response = collections.Create(new CreateCollection
{
CollectionName = COLLECTION_NAME,
VectorsConfig = new VectorsConfig
{
Params = new VectorParams
{
Distance = Distance.Dot,
Size = 32,
HnswConfig = new HnswConfigDiff
{
OnDisk = false
}
}
}
});
Console.WriteLine($"CREATED: {response.Result}");
var d_response = collections.Delete(new DeleteCollection
{
CollectionName = COLLECTION_NAME
});
Console.WriteLine($"DELETED: {d_response.Result}");
}
}
}

@ -1,21 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net6.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Google.Protobuf" Version="3.25.2" />
<PackageReference Include="Grpc.Net.Client" Version="2.60.0" />
</ItemGroup>
<ItemGroup>
<Reference Include="ZeroLevel.Qdrant.GrpcClient">
<HintPath>..\..\ZeroLevel.Qdrant.GrpcClient\bin\Release\net6.0\ZeroLevel.Qdrant.GrpcClient.dll</HintPath>
</Reference>
</ItemGroup>
</Project>

@ -2,13 +2,16 @@
<PropertyGroup> <PropertyGroup>
<OutputType>Exe</OutputType> <OutputType>Exe</OutputType>
<TargetFramework>net6.0</TargetFramework> <TargetFramework>net8.0</TargetFramework>
<Platforms>AnyCPU;x64;x86</Platforms> <Platforms>AnyCPU;x64;x86</Platforms>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" /> <PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="ZeroLevel" Version="3.4.0.8" /> </ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\ZeroLevel\ZeroLevel.csproj" />
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>

@ -2,7 +2,7 @@
<PropertyGroup> <PropertyGroup>
<OutputType>Exe</OutputType> <OutputType>Exe</OutputType>
<TargetFramework>net6.0</TargetFramework> <TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings> <ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable> <Nullable>enable</Nullable>
<Platforms>AnyCPU;x64</Platforms> <Platforms>AnyCPU;x64</Platforms>

@ -2,7 +2,7 @@
<PropertyGroup> <PropertyGroup>
<OutputType>Exe</OutputType> <OutputType>Exe</OutputType>
<TargetFramework>net6.0</TargetFramework> <TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings> <ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable> <Nullable>enable</Nullable>
<Platforms>AnyCPU;x64</Platforms> <Platforms>AnyCPU;x64</Platforms>

@ -2,7 +2,7 @@
<PropertyGroup> <PropertyGroup>
<OutputType>Exe</OutputType> <OutputType>Exe</OutputType>
<TargetFramework>net6.0</TargetFramework> <TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings> <ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable> <Nullable>enable</Nullable>
<Platforms>AnyCPU;x64</Platforms> <Platforms>AnyCPU;x64</Platforms>

@ -2,7 +2,7 @@
<PropertyGroup> <PropertyGroup>
<OutputType>Exe</OutputType> <OutputType>Exe</OutputType>
<TargetFramework>net6.0</TargetFramework> <TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings> <ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable> <Nullable>enable</Nullable>
<Platforms>AnyCPU;x64</Platforms> <Platforms>AnyCPU;x64</Platforms>

@ -152,7 +152,7 @@ namespace ZeroLevel.UnitTests
await storePart.Store(numbers[2], texts[7]); // 2 - 21 await storePart.Store(numbers[2], texts[7]); // 2 - 21
storePart.CompleteAdding(); storePart.CompleteAdding();
await storePart.Compress(); storePart.Compress();
} }
// Assert // Assert

@ -2,7 +2,7 @@
<PropertyGroup> <PropertyGroup>
<OutputType>Exe</OutputType> <OutputType>Exe</OutputType>
<TargetFramework>net6.0</TargetFramework> <TargetFramework>net8.0</TargetFramework>
<Platforms>AnyCPU;x64;x86</Platforms> <Platforms>AnyCPU;x64;x86</Platforms>
</PropertyGroup> </PropertyGroup>

@ -1,24 +0,0 @@
using System.Threading;
namespace ZeroLevel.EventServer
{
public class EventRepository
:BaseSqLiteDB
{
private readonly SQLiteConnection _db;
private readonly ReaderWriterLockSlim _rwLock = new ReaderWriterLockSlim();
private readonly string _tableName;
public EventRepository()
{
_tableName = "events";
var path = PrepareDb($"{_tableName}.db");
_db = new SQLiteConnection($"Data Source={path};Version=3;");
_db.Open();
Execute($"CREATE TABLE IF NOT EXISTS {_tableName} (id INTEGER PRIMARY KEY AUTOINCREMENT, key TEXT, body BLOB)", _db);
Execute($"CREATE INDEX IF NOT EXISTS key_index ON {_tableName} (key)", _db);
}
}
}

@ -1,62 +0,0 @@
using ZeroLevel.EventServer.Model;
using ZeroLevel.Network;
using ZeroLevel.Services.Applications;
namespace ZeroLevel.EventServer
{
public class EventService
: BaseZeroService
{
public EventService()
{
}
protected override void StartAction()
{
var host = UseHost();
this.AutoregisterInboxes(host);
host.OnConnect += Host_OnConnect;
host.OnDisconnect += Host_OnDisconnect;
}
private void Host_OnDisconnect(ISocketClient obj)
{
Log.Info($"Client '{obj.Endpoint.Address}:{obj.Endpoint.Port}' disconnected");
}
private void Host_OnConnect(IClient obj)
{
Log.Info($"Client '{obj.Socket.Endpoint.Address}:{obj.Socket.Endpoint.Port}' connected");
}
protected override void StopAction()
{
}
#region Inboxes
[ExchangeReplier("onetime")]
public long OneTimeHandler(ISocketClient client, OneTimeEvent e)
{
return 0;
}
[ExchangeReplier("periodic")]
public long PeriodicHandler(ISocketClient client, PeriodicTimeEvent e)
{
return 0;
}
[ExchangeReplier("eventtrigger")]
public long AfterEventHandler(ISocketClient client, EventAfterEvent e)
{
return 0;
}
[ExchangeReplier("eventstrigger")]
public long AfterEventsHandler(ISocketClient client, EventAfterEvents e)
{
return 0;
}
#endregion
}
}

@ -1,8 +0,0 @@
namespace ZeroLevel.EventServer.Model
{
public abstract class BaseEvent
{
public string ServiceKey { get; set; }
public string Inbox { get; set; }
}
}

@ -1,23 +0,0 @@
namespace ZeroLevel.EventServer.Model
{
public enum Condition
: int
{
/// <summary>
/// В любом случае
/// </summary>
None = 0,
/// <summary>
/// Если хотя бы одно событие успешно обработано
/// </summary>
OneSuccessfull = 1,
/// <summary>
/// Если обработаны все события
/// </summary>
AllSuccessfull = 2,
/// <summary>
/// Если хотя бы одно событие не обработано
/// </summary>
AnyFault = 3
}
}

@ -1,10 +0,0 @@
namespace ZeroLevel.EventServer.Model
{
public class EventAfterEvent
: BaseEvent
{
public long EventId { get; set; }
public Condition Confition { get; set; }
}
}

@ -1,12 +0,0 @@
using System.Collections.Generic;
namespace ZeroLevel.EventServer.Model
{
public class EventAfterEvents
: BaseEvent
{
public IEnumerable<long> EventIds { get; set; }
public Condition Confition { get; set; }
}
}

@ -1,13 +0,0 @@
namespace ZeroLevel.EventServer
{
public class EventInfoRecord
{
public long EventId { get; set; }
public string ServiceKey { get; set; }
// OR
public string ServiceEndpoint { get; set; }
public string Inbox { get; set; }
}
}

@ -1,10 +0,0 @@
namespace ZeroLevel.EventServer.Model
{
public class EventResult
{
public long EventId;
public EventResultState State;
public long StartTimestamp;
public long EndTimestamp;
}
}

@ -1,9 +0,0 @@
namespace ZeroLevel.EventServer.Model
{
public enum EventResultState
{
InProgress,
Success,
Unsuccess
}
}

@ -1,11 +0,0 @@
namespace ZeroLevel.EventServer.Model
{
public enum EventType
: int
{
OneType = 0,
Periodic = 1,
EventTrigger = 2,
EventsTrigger = 3
}
}

@ -1,10 +0,0 @@
using System;
namespace ZeroLevel.EventServer.Model
{
public class OneTimeEvent
: BaseEvent
{
public TimeSpan Period { get; set; }
}
}

@ -1,10 +0,0 @@
using System;
namespace ZeroLevel.EventServer.Model
{
public class PeriodicTimeEvent
: BaseEvent
{
public TimeSpan Period { get; set; }
}
}

@ -1,15 +0,0 @@
namespace ZeroLevel.EventServer
{
class Program
{
static void Main(string[] args)
{
Bootstrap.Startup<EventService>(args, configuration: () => Configuration.ReadOrEmptySetFromIniFile("config.ini"))
.EnableConsoleLog()
.UseDiscovery()
.Run()
.WaitWhileStatus(ZeroServiceStatus.Running);
Bootstrap.Shutdown();
}
}
}

@ -1,12 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<Platforms>AnyCPU;x64;x86</Platforms>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\ZeroLevel\ZeroLevel.csproj" />
</ItemGroup>
</Project>

@ -1,287 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
namespace ZeroLevel.HNSW
{
public class HistogramValue
{
public int Index { get; internal set; }
public int Value { get; internal set; }
public float MinBound { get; internal set; }
public float MaxBound { get; internal set; }
}
public class Histogram
{
public HistogramMode Mode { get; }
public float Min { get; }
public float Max { get; }
public float BoundsPeriod { get; }
public float[] Bounds { get; }
public int[] Values { get; }
public Histogram(HistogramMode mode, IEnumerable<float> data)
{
Mode = mode;
Min = data.Min();
Max = data.Max();
int count = data.Count();
int M = mode == HistogramMode.LOG ? (int)(1f + 3.2f * Math.Log(count)) : (int)(Math.Sqrt(count));
BoundsPeriod = (Max - Min) / M;
Bounds = new float[M - 1];
float bound = Min + BoundsPeriod;
for (int i = 0; i < Bounds.Length; i++)
{
Bounds[i] = bound;
bound += BoundsPeriod;
}
Values = new int[M];
for (int i = 0; i < Values.Length; i++)
{
Values[i] = 0;
}
foreach (var v in data)
{
if (v < float.Epsilon) continue;
for (int i = 0; i < Bounds.Length; i++)
{
if (v < Bounds[i])
{
Values[i]++;
break;
}
}
}
}
public int Count => Values?.Length ?? 0;
public int CountSignChanges()
{
if ((Values?.Length ?? 0) <= 2) return 0;
int i = 0;
while (Values[i] <= float.Epsilon) { i++; continue; }
if ((Values.Length - i) <= 2) return 0;
var delta = Values[i + 1] - Values[i];
int changes = 0;
i++;
for (; i < Values.Length - 1; i++)
{
var d = Values[i + 1] - Values[i];
if (Math.Abs(d) <= float.Epsilon)
{
continue;
}
if (NumbersHasSameSign(d, delta) == false)
{
delta = d;
changes++;
}
}
return changes;
}
public void Smooth()
{
var buffer = new int[Values.Length];
Array.Copy(Values, buffer, buffer.Length);
for (int i = 2; i < Values.Length - 3; i++)
{
Values[i] = (buffer[i - 2] + buffer[i - 1] + buffer[i] + buffer[i + 1] + buffer[i + 2]) / 5;
}
}
public IEnumerable<HistogramValue> GetMaximums()
{
var list = new List<HistogramValue>();
if ((Values?.Length ?? 0) <= 2) return list;
int i = 0;
while (Values[i] <= float.Epsilon) { i++; continue; }
if ((Values.Length - i) <= 2) return list;
var delta = Values[i + 1] - Values[i];
i++;
for (; i < Values.Length - 1; i++)
{
var d = Values[i + 1] - Values[i];
if (Math.Abs(d) <= float.Epsilon)
{
continue;
}
if (NumbersHasSameSign(d, delta) == false)
{
if (delta > 0)
{
list.Add(new HistogramValue
{
Index = i,
Value = Values[i],
MinBound = Bounds[i - 1],
MaxBound = Bounds[i]
});
}
delta = d;
}
}
return list;
}
#region OTSU "https://en.wikipedia.org/wiki/Otsu's_method"
// function is used to compute the q values in the equation
private float Px(int init, int end)
{
int sum = 0;
int i;
for (i = init; i < end; i++)
sum += Values[i];
return (float)sum;
}
// function is used to compute the mean values in the equation (mu)
private float Mx(int init, int end)
{
int sum = 0;
int i;
for (i = init; i < end; i++)
sum += i * Values[i];
return (float)sum;
}
/*
public int OTSU()
{
float p1, p2, p12;
int k;
int threshold = 0;
float bcv = 0;
for (k = 0; k < Values.Length; k++)
{
p1 = Px(0, k);
p2 = Px(k + 1, Values.Length);
p12 = p1 * p2;
if (p12 == 0)
p12 = 1;
float diff = (Mx(0, k) * p2) - (Mx(k + 1, Values.Length) * p1);
var test = (float)diff * diff / p12;
if (test > bcv)
{
bcv = test;
threshold = k;
}
}
return threshold;
}
*/
/*
1. Градиент V[I] - V[i-1]
2. Походы окнами от 1 и выше, пока не сойдется к бимодальности
3. Найти cutoff как минимум между пиками
Modes = 0
W = 1
D = [V.count1]
Maxes = []
For I in [1..V.count]
D= V[I] - V[i-1]
do
Modes = 0
S = +1
do
for wnd in D
if wnd.sum > 0 & S < 0
S = +1
Elif wnd.sum < 0 & S > 0
Maxes.push(wnd.maxindex)
Modes ++
S = -1
W++
while Modes > 2
If Modes == 2
Cutoff = Maxes[0]
Min = V[I]
For I=Maxes[0] to Maxes[1]
if V[I] < Min
Min = V[I]
Cutoff = i
*/
public int CuttOff()
{
if (Values.Length > 1)
{
var grad = new int[Values.Length];
grad[0] = 0;
grad[1] = 0;
for (int k = 2; k < Values.Length; k++)
{
grad[k - 1] = Values[k] - Values[k - 1];
}
var modes = 0;
var window = 0;
var sign = 1;
var sum = 0;
var max = 0;
var maxInd = 0;
var maxes = new List<int>();
do
{
maxes.Clear();
window++;
modes = 0;
sum = 0;
for (int i = 0; i < grad.Length; i += window)
{
sum = grad[i];
max = Values[i];
maxInd = i;
for (var w = 1; w < window && (i + w) < grad.Length; w++)
{
sum += grad[i + w];
if (Values[i + w] > max)
{
max = Values[i + w];
maxInd = i + w;
}
}
if (sum > 0 && sign < 0)
{
sign = 1;
}
else if (sum < 0 && sign > 0)
{
modes++;
maxes.Add(maxInd);
sign = -1;
}
}
} while (modes > 2);
if (modes == 2)
{
var cutoff = maxes[0];
var min = Values[cutoff];
for (int i = maxes[0] + 1; i < maxes[1]; i++)
{
if (Values[i] < min)
{
cutoff = i;
min = Values[i];
}
}
return cutoff;
}
}
return -1;
}
#endregion
static bool NumbersHasSameSign(int left, int right)
{
return left >= 0 && right >= 0 || left < 0 && right < 0;
}
}
}

@ -1,14 +0,0 @@
namespace ZeroLevel.HNSW
{
public enum HistogramMode
{
/// <summary>
/// 1 + 3.2 * Ln(LinksCount)
/// </summary>
SQRT,
/// <summary>
/// Sqrt(LinksCount)
/// </summary>
LOG
}
}

@ -1,53 +0,0 @@
using System;
namespace ZeroLevel.HNSW
{
public sealed class NSWOptions<TItem>
{
/// <summary>
/// Max node connections on Layer
/// </summary>
public readonly int M;
/// <summary>
/// Max search buffer
/// </summary>
public readonly int EF;
/// <summary>
/// Max search buffer for inserting
/// </summary>
public readonly int EFConstruction;
public static NSWOptions<float[]> Create(int v1, int v2, int v3, int v4, Func<float[], float[], float> l2Euclidean, object selectionHeuristic)
{
throw new NotImplementedException();
}
/// <summary>
/// Distance function beetween vectors
/// </summary>
public readonly Func<TItem, TItem, float> Distance;
public readonly int LayersCount;
private NSWOptions(int layersCount,
int m,
int ef,
int ef_construction,
Func<TItem, TItem, float> distance)
{
LayersCount = layersCount;
M = m;
EF = ef;
EFConstruction = ef_construction;
Distance = distance;
}
public static NSWOptions<TItem> Create(int layersCount,
int M,
int EF,
int EF_construction,
Func<TItem, TItem, float> distance) =>
new NSWOptions<TItem>(layersCount, M, EF, EF_construction, distance);
}
}

@ -1,126 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.CompilerServices;
namespace ZeroLevel.HNSW
{
public enum Mode
{
None,
ActiveCheck,
InactiveCheck,
ActiveInactiveCheck
}
public sealed class SearchContext
{
/// <summary>
/// Список номеров которые разрешены к добавлению итогового результата, если поиск ведется в ограниченном наборе точек (например, после предварительной фильтрации)
/// </summary>
private HashSet<int> _activeNodes;
/// <summary>
/// Список точек с которых начинается поиск в графе для расширения
/// </summary>
private HashSet<int> _entryNodes;
/// <summary>
/// Режим работы алгоритма расширения, зависящий от того заданы ли ограничения в точках, и заданы ли точки начала поиска
/// </summary>
private Mode _mode;
public Mode NodeCheckMode => _mode;
public double PercentInTotal { get; private set; } = 0;
public long AvaliableNodesCount => _activeNodes?.Count ?? 0;
public SearchContext()
{
_mode = Mode.None;
}
/// <summary>
/// Расчет процентного содержания точек доступных для использования в данном контексте, по отношению к общему количеству точек
/// </summary>
public SearchContext CaclulatePercentage(long total)
{
if ((_mode == Mode.ActiveCheck || _mode == Mode.ActiveInactiveCheck) && total > 0)
{
PercentInTotal = ((_activeNodes?.Count ?? 0 * 100d) / (double)total) / 100.0d;
}
return this;
}
public SearchContext SetPercentage(double percent)
{
PercentInTotal = percent;
return this;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private bool _isActiveNode(int nodeId) => _activeNodes?.Contains(nodeId) ?? false;
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private bool _isEntryNode(int nodeId) => _entryNodes?.Contains(nodeId) ?? false;
/// <summary>
/// Проверка, подходит ли указанная точка для включения в набор расширения
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal bool IsActiveNode(int nodeId)
{
switch (_mode)
{
// Если задан набор разрешенных к использованию точек, проверяется вхождение в него
case Mode.ActiveCheck: return _isActiveNode(nodeId);
// Если задан набор точек начала поиска, проверка невхождения точки в него
case Mode.InactiveCheck: return _isEntryNode(nodeId) == false;
// Если задан и ограничивающий и начальный наборы точек, проверка и на ограничение и на невхождение в начальный набор
case Mode.ActiveInactiveCheck: return false == _isEntryNode(nodeId) && _isActiveNode(nodeId);
}
return nodeId >= 0;
}
public IEnumerable<int> EntryPoints => _entryNodes;
public SearchContext SetActiveNodes(IEnumerable<int> activeNodes)
{
if (activeNodes != null && activeNodes.Any())
{
if (_mode == Mode.ActiveCheck || _mode == Mode.ActiveInactiveCheck)
{
throw new InvalidOperationException("Active nodes are already defined");
}
_activeNodes = new HashSet<int>(activeNodes);
if (_mode == Mode.None)
{
_mode = Mode.ActiveCheck;
}
else if (_mode == Mode.InactiveCheck)
{
_mode = Mode.ActiveInactiveCheck;
}
}
return this;
}
public SearchContext SetEntryPointsNodes(IEnumerable<int> entryNodes)
{
if (entryNodes != null && entryNodes.Any())
{
if (_mode == Mode.InactiveCheck || _mode == Mode.ActiveInactiveCheck)
{
throw new InvalidOperationException("Inctive nodes are already defined");
}
_entryNodes = new HashSet<int>(entryNodes);
if (_mode == Mode.None)
{
_mode = Mode.InactiveCheck;
}
else if (_mode == Mode.ActiveCheck)
{
_mode = Mode.ActiveInactiveCheck;
}
}
return this;
}
}
}

@ -1,61 +0,0 @@
using System;
namespace ZeroLevel.HNSW.PHNSW
{
internal class HLevel<TPayload>
: IPHNSWLevel<TPayload>
{
private readonly float _distance;
public HLevel(float distance)
{
_distance = distance;
}
public Node<TPayload> Node { get; set; } = null;
public IPHNSWLevel<TPayload> NextLevelA { get; set; }
public IPHNSWLevel<TPayload> NextLevelB { get; set; }
private float _abDistance = float.MinValue;
public void Add(Node<TPayload> node)
{
if (NextLevelA.Node == null) { NextLevelA.Node = node; }
else if (NextLevelB.Node == null)
{
NextLevelB.Node = node;
_abDistance = PHNSWMetric.CosineDistance(NextLevelA.Node.Vector, NextLevelB.Node.Vector);
}
else
{
var an = PHNSWMetric.CosineDistance(NextLevelA.Node.Vector, node.Vector);
var bn = PHNSWMetric.CosineDistance(NextLevelB.Node.Vector, node.Vector);
var abDiff = Math.Abs(_distance - _abDistance);
var anDiff = Math.Abs(_distance - an);
var bnDiff = Math.Abs(_distance - bn);
if (abDiff < anDiff && abDiff < bnDiff)
{
if (an < bn)
{
NextLevelA.Add(node);
}
else
{
NextLevelB.Add(node);
}
}
else if (anDiff < bnDiff && anDiff < abDiff)
{
NextLevelA.Node = node;
NextLevelA.Add(node);
}
else
{
NextLevelB.Node = node;
NextLevelB.Add(node);
}
}
}
}
}

@ -1,8 +0,0 @@
namespace ZeroLevel.HNSW.PHNSW
{
public interface IPHNSWLevel<TPayload>
{
void Add(IPHNSWLevel<TPayload> prevLayer, Node<TPayload> node);
Node<TPayload> Node { get; internal set; }
}
}

@ -1,12 +0,0 @@
using ZeroLevel.DocumentObjectModel.Flow;
namespace ZeroLevel.HNSW.PHNSW
{
public class Node <TPayload>
{
public float[] Vector { get; set; }
public TPayload Payload { get; set; }
public List<Node<TPayload>> Neighbors { get; }
}
}

@ -1,35 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace ZeroLevel.HNSW.PHNSW
{
public static class PHNSWBuilder
{
public static IPHNSWLevel<TPayload> Build<TPayload>(int levels)
{
var distance = 0.33f;
var root = new HLevel<TPayload>(distance);
var horizontalLayers = new List<HLevel<TPayload>>(new[] { root });
for (var i = 0; i < levels; i++)
{
distance /= 2.0f;
var nextList = new List<HLevel<TPayload>>();
foreach (var layer in horizontalLayers)
{
var a = new HLevel<TPayload>(distance);
var b = new HLevel<TPayload>(distance);
layer.NextLevelA = a;
layer.NextLevelB = b;
nextList.Add(a);
nextList.Add(b);
}
horizontalLayers = nextList;
}
var uwLevel = new UWLevel<TPayload>();
}
}
}

@ -1,28 +0,0 @@
using System;
namespace ZeroLevel.HNSW.PHNSW
{
internal static class PHNSWMetric
{
internal static float CosineDistance(float[] u, float[] v)
{
if (u.Length != v.Length)
{
throw new ArgumentException("Vectors have non-matching dimensions");
}
float dot = 0.0f;
float nru = 0.0f;
float nrv = 0.0f;
for (int i = 0; i < u.Length; ++i)
{
dot += u[i] * v[i];
nru += u[i] * u[i];
nrv += v[i] * v[i];
}
var similarity = dot / (float)(Math.Sqrt(nru) * Math.Sqrt(nrv));
return 1 - similarity;
}
}
}

@ -1,150 +0,0 @@
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
namespace ZeroLevel.HNSW.Services
{
public class Cluster
: IEnumerable<int>
{
private HashSet<int> _elements = new HashSet<int>();
public int Count => _elements.Count;
public bool Contains(int id) => _elements.Contains(id);
public bool Add(int id) => _elements.Add(id);
public IEnumerator<int> GetEnumerator()
{
return _elements.GetEnumerator();
}
IEnumerator IEnumerable.GetEnumerator()
{
return _elements.GetEnumerator();
}
public void Merge(Cluster cluster)
{
foreach (var e in cluster)
{
this._elements.Add(e);
}
}
public float MaxDistance(Func<int, int, float> distance, Cluster other)
{
var max = float.MinValue;
foreach (var e in this._elements)
{
foreach (var o in other)
{
var d = distance(e, o);
if (d > max)
{
max = d;
}
}
}
return max;
}
public float MinDistance(Func<int, int, float> distance, Cluster other)
{
var min = float.MaxValue;
foreach (var e in this._elements)
{
foreach (var o in other)
{
var d = distance(e, o);
if (d < min)
{
min = d;
}
}
}
return min;
}
public float AvgDistance(Func<int, int, float> distance, Cluster other)
{
var dist = new List<float>();
foreach (var e in this._elements)
{
foreach (var o in other)
{
dist.Add(distance(e, o));
}
}
return dist.Average();
}
}
public static class AutomaticGraphClusterer
{
private class Link
{
public int Id1;
public int Id2;
public float Distance;
}
public static List<Cluster> DetectClusters<T>(SmallWorld<T> world)
{
var distance = world.DistanceFunction;
var links = world.GetLinks().SelectMany(pair => pair.Value.Select(id => new Link { Id1 = pair.Key, Id2 = id, Distance = distance(pair.Key, id) })).ToList();
// 1. Find R - bound between intra-cluster distances and out-of-cluster distances
var histogram = new Histogram(HistogramMode.LOG, links.Select(l => l.Distance));
int threshold = histogram.CuttOff();
var min = histogram.Bounds[threshold - 1];
var max = histogram.Bounds[threshold];
var R = (max + min) / 2;
// 2. Get links with distances less than R
var resultLinks = new List<Link>();
foreach (var l in links)
{
if (l.Distance < R)
{
resultLinks.Add(l);
}
}
// 3. Extract clusters
List<Cluster> clusters = new List<Cluster>();
foreach (var l in resultLinks)
{
var id1 = l.Id1;
var id2 = l.Id2;
bool found = false;
foreach (var c in clusters)
{
if (c.Contains(id1))
{
c.Add(id2);
found = true;
break;
}
else if (c.Contains(id2))
{
c.Add(id1);
found = true;
break;
}
}
if (found == false)
{
var c = new Cluster();
c.Add(id1);
c.Add(id2);
clusters.Add(c);
}
}
return clusters;
}
}
}

@ -1,244 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using ZeroLevel.Services.Serialization;
namespace ZeroLevel.HNSW
{
internal sealed class CompactBiDirectionalLinksSet
: IBinarySerializable, IDisposable
{
private readonly ReaderWriterLockSlim _rwLock = new ReaderWriterLockSlim();
private const int HALF_LONG_BITS = 32;
private SortedList<long, float> _set = new SortedList<long, float>();
internal SortedList<long, float> Links => _set;
internal (int, int) this[int index]
{
get
{
var k = _set.Keys[index];
var id1 = (int)(k >> HALF_LONG_BITS);
var id2 = (int)(k - (((long)id1) << HALF_LONG_BITS));
return (id1, id2);
}
}
internal int Count => _set.Count;
internal IEnumerable<(int, int, float)> FindLinksForId(int id)
{
_rwLock.EnterReadLock();
try
{
if (_set.Count == 1)
{
var k = _set.Keys[0];
var v = _set[k];
var id1 = (int)(k >> HALF_LONG_BITS);
var id2 = (int)(k - (((long)id1) << HALF_LONG_BITS));
if (id1 == id) yield return (id, id2, v);
else if (id2 == id) yield return (id1, id, v);
}
else if (_set.Count > 1)
{
foreach (var (k, v) in Search(_set, id))
{
var id1 = (int)(k >> HALF_LONG_BITS);
var id2 = (int)(k - (((long)id1) << HALF_LONG_BITS));
yield return (id1, id2, v);
}
}
}
finally
{
_rwLock.ExitReadLock();
}
}
internal IEnumerable<(int, int, float)> Items()
{
_rwLock.EnterReadLock();
try
{
foreach (var pair in _set)
{
var id1 = (int)(pair.Key >> HALF_LONG_BITS);
var id2 = (int)(pair.Key - (((long)id1) << HALF_LONG_BITS));
yield return (id1, id2, pair.Value);
}
}
finally
{
_rwLock.ExitReadLock();
}
}
internal void RemoveIndex(int id1, int id2)
{
long k1 = (((long)(id1)) << HALF_LONG_BITS) + id2;
long k2 = (((long)(id2)) << HALF_LONG_BITS) + id1;
_rwLock.EnterWriteLock();
try
{
if (_set.ContainsKey(k1))
{
_set.Remove(k1);
}
if (_set.ContainsKey(k2))
{
_set.Remove(k2);
}
}
finally
{
_rwLock.ExitWriteLock();
}
}
internal bool Add(int id1, int id2, float distance)
{
_rwLock.EnterWriteLock();
try
{
long k1 = (((long)(id1)) << HALF_LONG_BITS) + id2;
long k2 = (((long)(id2)) << HALF_LONG_BITS) + id1;
if (_set.ContainsKey(k1) == false)
{
_set.Add(k1, distance);
if (k1 != k2)
{
_set.Add(k2, distance);
}
return true;
}
}
finally
{
_rwLock.ExitWriteLock();
}
return false;
}
/*
function binary_search(A, n, T) is
L := 0
R := n 1
while L R do
m := floor((L + R) / 2)
if A[m] < T then
L := m + 1
else if A[m] > T then
R := m 1
else:
return m
return unsuccessful
*/
private static IEnumerable<(long, float)> Search(SortedList<long, float> set, int index)
{
long k = ((long)index) << HALF_LONG_BITS; // T
int left = 0;
int right = set.Count - 1;
int mid;
long test;
while (left <= right)
{
mid = (int)Math.Floor((right + left) / 2d);
test = (set.Keys[mid] >> HALF_LONG_BITS) << HALF_LONG_BITS; // A[m]
if (test < k)
{
left = mid + 1;
}
else if (test > k)
{
right = mid - 1;
}
else
{
return SearchByPosition(set, k, mid);
}
}
return Enumerable.Empty<(long, float)>();
}
private static IEnumerable<(long, float)> SearchByPosition(SortedList<long, float> set, long k, int position)
{
var start = position;
var end = position;
do
{
position--;
} while (position >= 0 && ((set.Keys[position] >> HALF_LONG_BITS) << HALF_LONG_BITS) == k);
start = position + 1;
position = end + 1;
while (position < set.Count && ((set.Keys[position] >> HALF_LONG_BITS) << HALF_LONG_BITS) == k)
{
position++;
}
end = position - 1;
for (int i = start; i <= end; i++)
{
yield return (set.Keys[i], set.Values[i]);
}
}
public Histogram CalculateHistogram(HistogramMode mode)
{
return new Histogram(mode, _set.Values);
}
internal float Distance(int id1, int id2)
{
long k = (((long)(id1)) << HALF_LONG_BITS) + id2;
if (_set.ContainsKey(k))
{
return _set[k];
}
return float.MaxValue;
}
public void Dispose()
{
_rwLock.Dispose();
_set.Clear();
_set = null;
}
public void Serialize(IBinaryWriter writer)
{
writer.WriteBoolean(true); // true - set with weights
writer.WriteInt32(_set.Count);
foreach (var record in _set)
{
writer.WriteLong(record.Key);
writer.WriteFloat(record.Value);
}
}
public void Deserialize(IBinaryReader reader)
{
if (reader.ReadBoolean() == false)
{
throw new InvalidOperationException("Incompatible data format. The set does not contain weights.");
}
_set.Clear();
_set = null;
var count = reader.ReadInt32();
_set = new SortedList<long, float>(count + 1);
for (int i = 0; i < count; i++)
{
var key = reader.ReadLong();
var value = reader.ReadFloat();
_set.Add(key, value);
}
}
}
}

@ -1,84 +0,0 @@
using System.Collections.Generic;
using System.IO;
using ZeroLevel.Services.Serialization;
namespace ZeroLevel.HNSW
{
// object -> vector -> vectorId
// HNSW vectorId + vector
// Map object feature - vectorId
public class HNSWMap<TFeature>
: IBinarySerializable
{
private Dictionary<TFeature, int> _map;
private Dictionary<int, TFeature> _reverse_map;
public int this[TFeature feature] => _map.GetValueOrDefault(feature);
public HNSWMap() { }
public HNSWMap(int capacity = -1)
{
if (capacity > 0)
{
_map = new Dictionary<TFeature, int>(capacity);
_reverse_map = new Dictionary<int, TFeature>(capacity);
}
else
{
_map = new Dictionary<TFeature, int>();
_reverse_map = new Dictionary<int, TFeature>();
}
}
public HNSWMap(Stream stream)
{
using (var reader = new MemoryStreamReader(stream))
{
Deserialize(reader);
}
}
public void Append(TFeature feature, int vectorId)
{
_map[feature] = vectorId;
_reverse_map[vectorId] = feature;
}
public IEnumerable<int> ConvertFeaturesToIds(IEnumerable<TFeature> features)
{
int id;
foreach (var feature in features)
{
if (_map.TryGetValue(feature, out id))
{
yield return id;
}
}
}
public IEnumerable<TFeature> ConvertIdsToFeatures(IEnumerable<int> ids)
{
TFeature feature;
foreach (var id in ids)
{
if (_reverse_map.TryGetValue(id, out feature))
{
yield return feature;
}
}
}
public void Deserialize(IBinaryReader reader)
{
this._map = reader.ReadDictionary<TFeature, int>();
this._reverse_map = reader.ReadDictionary<int, TFeature>();
}
public void Serialize(IBinaryWriter writer)
{
writer.WriteDictionary<TFeature, int>(this._map);
writer.WriteDictionary<int, TFeature>(this._reverse_map);
}
}
}

@ -1,142 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using ZeroLevel.Services.Serialization;
namespace ZeroLevel.HNSW
{
public class HNSWMappers<TFeature>
: IBinarySerializable
{
private IDictionary<int, HNSWMap<TFeature>> _mappers;
private readonly Func<TFeature, int> _bucketFunction;
public HNSWMappers(string filePath, Func<TFeature, int> bucketFunction)
{
_bucketFunction = bucketFunction;
using (var fs = File.OpenRead(filePath))
{
using (var bs = new BufferedStream(fs, 1024 * 1024 * 32))
{
using (var reader = new MemoryStreamReader(bs))
{
Deserialize(reader);
}
}
}
}
public void Save(string filePath)
{
using (var fs = File.OpenWrite(filePath))
{
using (var bs = new BufferedStream(fs, 1024 * 1024 * 32))
{
using (var writer = new MemoryStreamWriter(bs))
{
Serialize(writer);
}
}
}
}
public HNSWMappers(Func<TFeature, int> bucketFunction)
{
_mappers = new Dictionary<int, HNSWMap<TFeature>>();
_bucketFunction = bucketFunction;
}
public void Append(HNSWMap<TFeature> map, int c)
{
_mappers.Add(c, map);
}
public IEnumerable<TFeature> ConvertIdsToFeatures(int c, IEnumerable<int> ids)
{
foreach (var feature in _mappers[c].ConvertIdsToFeatures(ids))
{
yield return feature;
}
}
public IDictionary<int, SearchContext> CreateContext(IEnumerable<TFeature> activeNodes, IEnumerable<TFeature> entryPoints)
{
var actives = new Dictionary<int, List<int>>();
var entries = new Dictionary<int, List<int>>();
if (activeNodes != null)
{
foreach (var node in activeNodes)
{
var c = _bucketFunction(node);
if (_mappers.ContainsKey(c))
{
if (actives.ContainsKey(c) == false)
{
actives.Add(c, new List<int>());
}
actives[c].Add(_mappers[c][node]);
}
else
{
Log.Warning($"Active node {node} is not included in graphs!");
}
}
}
if (entryPoints != null)
{
foreach (var entryPoint in entryPoints)
{
var c = _bucketFunction(entryPoint);
if (_mappers.ContainsKey(c))
{
if (entries.ContainsKey(c) == false)
{
entries.Add(c, new List<int>());
}
entries[c].Add(_mappers[c][entryPoint]);
}
else
{
Log.Warning($"Entry point {entryPoint} is not included in graphs!");
}
}
}
var result = new Dictionary<int, SearchContext>();
foreach (var pair in _mappers)
{
var active = actives.GetValueOrDefault(pair.Key);
var entry = entries.GetValueOrDefault(pair.Key);
result.Add(pair.Key, new SearchContext().SetActiveNodes(active).SetEntryPointsNodes(entry));
}
var total = result.Values.Sum(v => v.AvaliableNodesCount);
if (total > 0)
{
foreach (var pair in result)
{
pair.Value.CaclulatePercentage(total);
}
}
else
{
//total = result.Values.Sum(v => v.EntryPoints.Count());
foreach (var pair in result)
{
//var p = (double)pair.Value.EntryPoints.Count() / (double)total;
pair.Value.SetPercentage(0.2d);
}
}
return result;
}
public void Deserialize(IBinaryReader reader)
{
this._mappers = reader.ReadDictionary<int, HNSWMap<TFeature>>();
}
public void Serialize(IBinaryWriter writer)
{
writer.WriteDictionary<int, HNSWMap<TFeature>>(this._mappers);
}
}
}

@ -1,120 +0,0 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using ZeroLevel.Services.Serialization;
namespace ZeroLevel.HNSW
{
public class LALGraph
: IBinarySerializable
{
private readonly LALLinks _links = new LALLinks();
public LALGraph() { }
public static LALGraph FromLALGraph(Stream stream)
{
var l = new LALGraph();
l.Deserialize(stream);
return l;
}
public static LALGraph FromHNSWGraph<TItem>(Stream stream)
{
var l = new LALGraph();
l.DeserializeFromHNSW<TItem>(stream);
return l;
}
public IEnumerable<int> KNearest(int k, SearchContext context)
{
var v = new VisitedBitSet(_links.Count, 1);
var C = new Queue<int>();
var W = new HashSet<int>();
var entryPoints = context.EntryPoints;
var nextEntry = new HashSet<int>();
do
{
foreach (var ep in entryPoints)
{
var neighboursIds = _links.FindNeighbors(ep);
for (int i = 0; i < neighboursIds.Length; ++i)
{
if (v.Contains(neighboursIds[i]) == false)
{
C.Enqueue(neighboursIds[i]);
nextEntry.Add(neighboursIds[i]);
}
}
v.Add(ep);
}
// run bfs
while (C.Count > 0)
{
// get next candidate to check and expand
var toExpand = C.Dequeue();
if (context.IsActiveNode(toExpand))
{
if (W.Count < k)
{
W.Add(toExpand);
if (W.Count > k)
{
W.Remove(W.First());
}
}
}
}
entryPoints = nextEntry.Select(id => id).ToList();
nextEntry.Clear();
}
while (W.Count < k && entryPoints.Any());
C.Clear();
v.Clear();
return W;
}
public void Deserialize(Stream stream)
{
using (var reader = new MemoryStreamReader(stream))
{
_links.Deserialize(reader);
}
}
public void DeserializeFromHNSW<TItem>(Stream stream)
{
using (var reader = new MemoryStreamReader(stream))
{
reader.ReadInt32(); // EntryPoint
reader.ReadInt32(); // MaxLayer
int count = reader.ReadInt32(); // Vectors count
for (int i = 0; i < count; i++)
{
var v = reader.ReadCompatible<TItem[]>(); // Vector
}
var lc = reader.ReadInt32(); // countLayers
_links.Deserialize(reader); // deserialize only base layer and skip another
}
}
public void Serialize(Stream stream)
{
using (var writer = new MemoryStreamWriter(stream))
{
_links.Serialize(writer);
}
}
public void Serialize(IBinaryWriter writer)
{
_links.Serialize(writer);
}
public void Deserialize(IBinaryReader reader)
{
_links.Deserialize(reader);
}
}
}

@ -1,81 +0,0 @@
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using ZeroLevel.Services.Serialization;
namespace ZeroLevel.HNSW
{
internal class LALLinks
: IBinarySerializable
{
private ConcurrentDictionary<int, int[]> _set = new ConcurrentDictionary<int, int[]>();
internal IDictionary<int, int[]> Links => _set;
private readonly int[] _empty = new int[0];
internal int Count => _set.Count;
public LALLinks()
{
}
internal IEnumerable<(int, int)> FindLinksForId(int id)
{
if (_set.ContainsKey(id))
{
return _set[id].Select(v => (id, v));
}
return Enumerable.Empty<(int, int)>();
}
internal int[] FindNeighbors(int id)
{
if (_set.ContainsKey(id))
{
return _set[id];
}
return _empty;
}
internal IEnumerable<(int, int)> Items()
{
return _set
.SelectMany(pair => _set[pair.Key]
.Select(v => (pair.Key, v)));
}
public void Dispose()
{
_set.Clear();
_set = null;
}
public void Serialize(IBinaryWriter writer)
{
writer.WriteInt32(_set.Count);
foreach (var record in _set)
{
writer.WriteInt32(record.Key);
writer.WriteCollection(record.Value);
}
}
public void Deserialize(IBinaryReader reader)
{
_set.Clear();
_set = null;
var count = reader.ReadInt32();
_set = new ConcurrentDictionary<int, int[]>(1, count);
for (int i = 0; i < count; i++)
{
var id = reader.ReadInt32();
var links_count = reader.ReadInt32();
_set[id] = new int[links_count];
for (int l = 0; l < links_count; l++)
{
_set[id][l] = reader.ReadInt32();
}
}
}
}
}

@ -1,79 +0,0 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using ZeroLevel.Services.Serialization;
namespace ZeroLevel.HNSW
{
public class SplittedLALGraph
: IBinarySerializable
{
private IDictionary<int, LALGraph> _graphs;
public SplittedLALGraph()
{
_graphs = new Dictionary<int, LALGraph>();
}
public SplittedLALGraph(string filePath)
{
using (var fs = File.OpenRead(filePath))
{
using (var bs = new BufferedStream(fs, 1024 * 1024 * 32))
{
using (var reader = new MemoryStreamReader(bs))
{
Deserialize(reader);
}
}
}
}
public void Save(string filePath)
{
using (var fs = File.OpenWrite(filePath))
{
using (var bs = new BufferedStream(fs, 1024 * 1024 * 32))
{
using (var writer = new MemoryStreamWriter(bs))
{
Serialize(writer);
}
}
}
}
public void Append(LALGraph graph, int c)
{
_graphs.Add(c, graph);
}
public IDictionary<int, List<int>> KNearest(int k, IDictionary<int, SearchContext> contexts)
{
var result = new Dictionary<int, List<int>>();
int step = 1;
foreach (var graph in _graphs)
{
result.Add(graph.Key, new List<int>());
var context = contexts[graph.Key];
if (context.EntryPoints != null)
{
var partial_k = 1 + (int)(context.PercentInTotal * k);
var r = graph.Value.KNearest(partial_k, context) as HashSet<int>;
result[graph.Key].AddRange(r);
}
step++;
}
return result;
}
public void Serialize(IBinaryWriter writer)
{
writer.WriteDictionary<int, LALGraph>(this._graphs);
}
public void Deserialize(IBinaryReader reader)
{
this._graphs = reader.ReadDictionary<int, LALGraph>();
}
}
}

@ -1,470 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using ZeroLevel.HNSW.Services;
using ZeroLevel.Services.Serialization;
namespace ZeroLevel.HNSW
{
/// <summary>
/// NSW graph
/// </summary>
internal sealed class Layer<TItem>
: IBinarySerializable
{
private readonly NSWOptions<TItem> _options;
private readonly VectorSet<TItem> _vectors;
private readonly LinksSet _links;
public readonly int M;
private readonly Dictionary<int, float> connections;
internal IDictionary<int, HashSet<int>> Links => _links.Links;
/// <summary>
/// There are links е the layer
/// </summary>
internal bool HasLinks => (_links.Count > 0);
internal IEnumerable<int> this[int vector_index] => _links.FindNeighbors(vector_index);
/// <summary>
/// HNSW layer
/// <remarks>
/// Article: Section 4.1:
/// "Selection of the Mmax0 (the maximum number of connections that an element can have in the zero layer) also
/// has a strong influence on the search performance, especially in case of high quality(high recall) search.
/// Simulations show that setting Mmax0 to M(this corresponds to kNN graphs on each layer if the neighbors
/// selection heuristic is not used) leads to a very strong performance penalty at high recall.
/// Simulations also suggest that 2∙M is a good choice for Mmax0;
/// setting the parameter higher leads to performance degradation and excessive memory usage."
/// </remarks>
/// </summary>
/// <param name="options">HNSW graph options</param>
/// <param name="vectors">General vector set</param>
internal Layer(NSWOptions<TItem> options, VectorSet<TItem> vectors, bool nswLayer)
{
_options = options;
_vectors = vectors;
M = nswLayer ? 2 * _options.M : _options.M;
_links = new LinksSet(M);
connections = new Dictionary<int, float>(M + 1);
}
internal int FindEntryPointAtLayer(Func<int, float> targetCosts)
{
if (_links.Count == 0) return EntryPoint;
var set = new HashSet<int>(_links.Items().Select(p => p.Item1));
int minId = -1;
float minDist = float.MaxValue;
foreach (var id in set)
{
var d = targetCosts(id);
if (d < minDist && Math.Abs(d) > float.Epsilon)
{
minDist = d;
minId = id;
}
}
return minId;
}
internal void Push(int q, int ep, MinHeap W, Func<int, float> distance)
{
if (HasLinks == false)
{
AddBidirectionallConnections(q, q);
}
else
{
// W ← SEARCH - LAYER(q, ep, efConstruction, lc)
foreach (var i in KNearestAtLayer(ep, distance, _options.EFConstruction))
{
W.Push(i);
}
int count = 0;
connections.Clear();
while (count < M && W.Count > 0)
{
var nearest = W.Pop();
var nearest_nearest = GetNeighbors(nearest.Item1).ToArray();
if (nearest_nearest.Length < M)
{
if (AddBidirectionallConnections(q, nearest.Item1))
{
connections.Add(nearest.Item1, nearest.Item2);
count++;
}
}
else
{
if ((M - count) < 2)
{
// remove link q - max_q
var max = connections.OrderBy(pair => pair.Value).First();
RemoveBidirectionallConnections(q, max.Key);
connections.Remove(max.Key);
}
// get nearest_nearest candidate
var mn_id = -1;
var mn_d = float.MinValue;
for (int i = 0; i < nearest_nearest.Length; i++)
{
var d = _options.Distance(_vectors[nearest.Item1], _vectors[nearest_nearest[i]]);
if (q != nearest_nearest[i] && connections.ContainsKey(nearest_nearest[i]) == false)
{
if (mn_id == -1 || d > mn_d)
{
mn_d = d;
mn_id = nearest_nearest[i];
}
}
}
// remove link neareset - nearest_nearest
RemoveBidirectionallConnections(nearest.Item1, mn_id);
// add link q - neareset
if (AddBidirectionallConnections(q, nearest.Item1))
{
connections.Add(nearest.Item1, nearest.Item2);
count++;
}
// add link q - max_nearest_nearest
if (AddBidirectionallConnections(q, mn_id))
{
connections.Add(mn_id, mn_d);
count++;
}
}
}
}
}
internal void RemoveBidirectionallConnections(int q, int p)
{
_links.RemoveIndex(q, p);
}
internal bool AddBidirectionallConnections(int q, int p)
{
if (q == p)
{
if (EntryPoint >= 0)
{
return _links.Add(q, EntryPoint);
}
else
{
EntryPoint = q;
}
}
else
{
return _links.Add(q, p);
}
return false;
}
private int EntryPoint = -1;
#region Implementation of https://arxiv.org/ftp/arxiv/papers/1603/1603.09320.pdf
/// <summary>
/// Algorithm 2
/// </summary>
/// <param name="q">query element</param>
/// <param name="ep">enter points ep</param>
/// <returns>Output: ef closest neighbors to q</returns>
internal IEnumerable<(int, float)> KNearestAtLayer(int entryPointId, Func<int, float> targetCosts, int ef)
{
/*
* v ep // set of visited elements
* C ep // set of candidates
* W ep // dynamic list of found nearest neighbors
* while C > 0
* c extract nearest element from C to q
* f get furthest element from W to q
* if distance(c, q) > distance(f, q)
* break // all elements in W are evaluated
* for each e neighbourhood(c) at layer lc // update C and W
* if e v
* v v e
* f get furthest element from W to q
* if distance(e, q) < distance(f, q) or W < ef
* C C e
* W W e
* if W > ef
* remove furthest element from W to q
* return W
*/
int farthestId;
float farthestDistance;
var d = targetCosts(entryPointId);
var v = new VisitedBitSet(_vectors.Count, _options.M);
// * v ← ep // set of visited elements
v.Add(entryPointId);
// * C ← ep // set of candidates
var C = new MinHeap(ef);
C.Push((entryPointId, d));
// * W ← ep // dynamic list of found nearest neighbors
var W = new MaxHeap(ef + 1);
W.Push((entryPointId, d));
// * while │C│ > 0
while (C.Count > 0)
{
// * c ← extract nearest element from C to q
var c = C.Pop();
// * f ← get furthest element from W to q
// * if distance(c, q) > distance(f, q)
if (W.TryPeek(out _, out farthestDistance) && c.Item2 > farthestDistance)
{
// * break // all elements in W are evaluated
break;
}
// * for each e ∈ neighbourhood(c) at layer lc // update C and W
foreach (var e in GetNeighbors(c.Item1))
{
// * if e ∉ v
if (!v.Contains(e))
{
// * v ← v e
v.Add(e);
// * f ← get furthest element from W to q
W.TryPeek(out farthestId, out farthestDistance);
var eDistance = targetCosts(e);
// * if distance(e, q) < distance(f, q) or │W│ < ef
if (W.Count < ef || (farthestId >= 0 && eDistance < farthestDistance))
{
// * C ← C e
C.Push((e, eDistance));
// * W ← W e
W.Push((e, eDistance));
// * if │W│ > ef
if (W.Count > ef)
{
// * remove furthest element from W to q
W.Pop();
}
}
}
}
}
C.Clear();
v.Clear();
return W;
}
internal IEnumerable<(int, float)> KNearestAtLayer(int entryPointId, Func<int, float> targetCosts, int ef, SearchContext context)
{
int farthestId;
float farthestDistance;
var d = targetCosts(entryPointId);
var v = new VisitedBitSet(_vectors.Count, _options.M);
// * v ← ep // set of visited elements
v.Add(entryPointId);
// * C ← ep // set of candidates
var C = new MinHeap(ef);
C.Push((entryPointId, d));
// * W ← ep // dynamic list of found nearest neighbors
var W = new MaxHeap(ef + 1);
if (context.IsActiveNode(entryPointId))
{
W.Push((entryPointId, d));
}
// * while │C│ > 0
while (C.Count > 0)
{
// * c ← extract nearest element from C to q
var c = C.Pop();
// * f ← get furthest element from W to q
// * if distance(c, q) > distance(f, q)
if (W.TryPeek(out _, out farthestDistance) && c.Item2 > farthestDistance)
{
// * break // all elements in W are evaluated
break;
}
// * for each e ∈ neighbourhood(c) at layer lc // update C and W
foreach (var e in GetNeighbors(c.Item1))
{
// * if e ∉ v
if (!v.Contains(e))
{
// * v ← v e
v.Add(e);
// * f ← get furthest element from W to q
W.TryPeek(out farthestId, out farthestDistance);
var eDistance = targetCosts(e);
// * if distance(e, q) < distance(f, q) or │W│ < ef
if (W.Count < ef || (farthestId >= 0 && eDistance < farthestDistance))
{
// * C ← C e
C.Push((e, eDistance));
// * W ← W e
if (context.IsActiveNode(e))
{
W.Push((e, eDistance));
if (W.Count > ef)
{
W.Pop();
}
}
}
}
}
}
C.Clear();
v.Clear();
return W;
}
/// <summary>
/// Algorithm 2
/// </summary>
/// <param name="q">query element</param>
/// <param name="ep">enter points ep</param>
/// <returns>Output: ef closest neighbors to q</returns>
internal IEnumerable<(int, float)> KNearestAвtLayer(int entryPointId, Func<int, float> targetCosts, int ef, SearchContext context)
{
int farthestId;
float farthestDistance;
var d = targetCosts(entryPointId);
var v = new VisitedBitSet(_vectors.Count, _options.M);
// v ← ep // set of visited elements
v.Add(entryPointId);
// C ← ep // set of candidates
var C = new MinHeap(ef);
C.Push((entryPointId, d));
// W ← ep // dynamic list of found nearest neighbors
var W = new MaxHeap(ef + 1);
// W ← ep // dynamic list of found nearest neighbors
if (context.IsActiveNode(entryPointId))
{
W.Push((entryPointId, d));
}
// run bfs
while (C.Count > 0)
{
// get next candidate to check and expand
var toExpand = C.Pop();
if (W.TryPeek(out _, out farthestDistance) && toExpand.Item2 > farthestDistance)
{
// the closest candidate is farther than farthest result
break;
}
// expand candidate
var neighboursIds = GetNeighbors(toExpand.Item1).ToArray();
for (int i = 0; i < neighboursIds.Length; ++i)
{
int neighbourId = neighboursIds[i];
if (!v.Contains(neighbourId))
{
W.TryPeek(out farthestId, out farthestDistance);
// enqueue perspective neighbours to expansion list
var neighbourDistance = targetCosts(neighbourId);
if (context.IsActiveNode(neighbourId))
{
if (W.Count < ef || (farthestId >= 0 && neighbourDistance < farthestDistance))
{
W.Push((neighbourId, neighbourDistance));
if (W.Count > ef)
{
W.Pop();
}
}
}
if (W.TryPeek(out _, out farthestDistance) && neighbourDistance < farthestDistance)
{
C.Push((neighbourId, neighbourDistance));
}
v.Add(neighbourId);
}
}
}
C.Clear();
v.Clear();
return W;
}
/// <summary>
/// Algorithm 2, modified for LookAlike
/// </summary>
/// <param name="q">query element</param>
/// <param name="ep">enter points ep</param>
/// <returns>Output: ef closest neighbors to q</returns>
internal IEnumerable<(int, float)> KNearestAtLayer(int ef, SearchContext context)
{
var distance = new Func<int, int, float>((id1, id2) => _options.Distance(_vectors[id1], _vectors[id2]));
// v ← ep // set of visited elements
var v = new VisitedBitSet(_vectors.Count, _options.M);
// C ← ep // set of candidates
var C = new MinHeap(ef);
float dist;
var W = new MaxHeap(ef + 1);
var entryPoints = context.EntryPoints;
do
{
foreach (var ep in entryPoints)
{
var neighboursIds = GetNeighbors(ep).ToArray();
for (int i = 0; i < neighboursIds.Length; ++i)
{
C.Push((ep, distance(ep, neighboursIds[i])));
}
v.Add(ep);
}
// run bfs
while (C.Count > 0)
{
// get next candidate to check and expand
var toExpand = C.Pop();
if (W.TryPeek(out _, out dist) && toExpand.Item2 > dist)
{
// the closest candidate is farther than farthest result
break;
}
if (context.IsActiveNode(toExpand.Item1))
{
if (W.Count < ef || W.Count == 0 || (W.TryPeek(out _, out dist) && toExpand.Item2 < dist))
{
W.Push((toExpand.Item1, toExpand.Item2));
if (W.Count > ef)
{
W.Pop();
}
}
}
}
entryPoints = W.Select(p => p.Item1);
}
while (W.Count < ef);
C.Clear();
v.Clear();
return W;
}
#endregion
internal IEnumerable<int> GetNeighbors(int id) => _links.FindNeighbors(id);
public void Serialize(IBinaryWriter writer)
{
_links.Serialize(writer);
}
public void Deserialize(IBinaryReader reader)
{
_links.Deserialize(reader);
}
// internal Histogram GetHistogram(HistogramMode mode) => _links.CalculateHistogram(mode);
}
}

@ -1,103 +0,0 @@
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using ZeroLevel.Services.Serialization;
namespace ZeroLevel.HNSW
{
public class LinksSet
{
private ConcurrentDictionary<int, HashSet<int>> _set = new ConcurrentDictionary<int, HashSet<int>>();
internal IDictionary<int, HashSet<int>> Links => _set;
internal int Count => _set.Count;
private readonly int _M;
public LinksSet(int M)
{
_M = M;
}
internal IEnumerable<(int, int)> FindLinksForId(int id)
{
if (_set.ContainsKey(id))
{
return _set[id].Select(v => (id, v));
}
return Enumerable.Empty<(int, int)>();
}
internal IEnumerable<int> FindNeighbors(int id)
{
if (_set.ContainsKey(id))
{
return _set[id];
}
return Enumerable.Empty<int>();
}
internal IEnumerable<(int, int)> Items()
{
return _set
.SelectMany(pair => _set[pair.Key]
.Select(v => (pair.Key, v)));
}
internal void RemoveIndex(int id1, int id2)
{
_set[id1].Remove(id2);
_set[id2].Remove(id1);
}
internal bool Add(int id1, int id2)
{
if (!_set.ContainsKey(id1))
{
_set[id1] = new HashSet<int>(_M + 1);
}
if (!_set.ContainsKey(id2))
{
_set[id2] = new HashSet<int>(_M + 1);
}
var r1 = _set[id1].Add(id2);
var r2 = _set[id2].Add(id1);
return r1 || r2;
}
public void Dispose()
{
_set.Clear();
_set = null;
}
public void Serialize(IBinaryWriter writer)
{
writer.WriteInt32(_set.Count);
foreach (var record in _set)
{
writer.WriteInt32(record.Key);
writer.WriteCollection(record.Value);
}
}
public void Deserialize(IBinaryReader reader)
{
/*if (reader.ReadBoolean() != false)
{
throw new InvalidOperationException("Incompatible format");
}*/
_set.Clear();
_set = null;
var count = reader.ReadInt32();
_set = new ConcurrentDictionary<int, HashSet<int>>();
for (int i = 0; i < count; i++)
{
var id = reader.ReadInt32();
var links_count = reader.ReadInt32();
_set[id] = new HashSet<int>(links_count);
for (var l = 0; l < links_count; l++)
{
_set[id].Add(reader.ReadInt32());
}
}
}
}
}

@ -1,130 +0,0 @@
using System;
using System.Collections;
using System.Collections.Generic;
namespace ZeroLevel.HNSW.Services
{
/// <summary>
/// Max element always on top
/// </summary>
public class MaxHeap :
IEnumerable<(int, float)>
{
private readonly List<(int, float)> _elements;
public MaxHeap(int size = -1)
{
if (size > 0)
_elements = new List<(int, float)>(size);
else
_elements = new List<(int, float)>();
}
private int GetLeftChildIndex(int elementIndex) => 2 * elementIndex + 1;
private int GetRightChildIndex(int elementIndex) => 2 * elementIndex + 2;
private int GetParentIndex(int elementIndex) => (elementIndex - 1) / 2;
private bool HasLeftChild(int elementIndex) => GetLeftChildIndex(elementIndex) < _elements.Count;
private bool HasRightChild(int elementIndex) => GetRightChildIndex(elementIndex) < _elements.Count;
private bool IsRoot(int elementIndex) => elementIndex == 0;
private (int, float) GetLeftChild(int elementIndex) => _elements[GetLeftChildIndex(elementIndex)];
private (int, float) GetRightChild(int elementIndex) => _elements[GetRightChildIndex(elementIndex)];
private (int, float) GetParent(int elementIndex) => _elements[GetParentIndex(elementIndex)];
public int Count => _elements.Count;
public void Clear()
{
_elements.Clear();
}
private void Swap(int firstIndex, int secondIndex)
{
var temp = _elements[firstIndex];
_elements[firstIndex] = _elements[secondIndex];
_elements[secondIndex] = temp;
}
public bool IsEmpty()
{
return _elements.Count == 0;
}
public bool TryPeek(out int id, out float value)
{
if (_elements.Count == 0)
{
id = -1;
value = 0;
return false;
}
id = _elements[0].Item1;
value = _elements[0].Item2;
return true;
}
public (int, float) Pop()
{
if (_elements.Count == 0)
throw new IndexOutOfRangeException();
var result = _elements[0];
_elements[0] = _elements[_elements.Count - 1];
_elements.RemoveAt(_elements.Count - 1);
ReCalculateDown();
return result;
}
public void Push((int, float) element)
{
_elements.Add(element);
ReCalculateUp();
}
private void ReCalculateDown()
{
int index = 0;
while (HasLeftChild(index))
{
var biggerIndex = GetLeftChildIndex(index);
if (HasRightChild(index) && GetRightChild(index).Item2 > GetLeftChild(index).Item2)
{
biggerIndex = GetRightChildIndex(index);
}
if (_elements[biggerIndex].Item2 < _elements[index].Item2)
{
break;
}
Swap(biggerIndex, index);
index = biggerIndex;
}
}
private void ReCalculateUp()
{
var index = _elements.Count - 1;
while (!IsRoot(index) && _elements[index].Item2 > GetParent(index).Item2)
{
var parentIndex = GetParentIndex(index);
Swap(parentIndex, index);
index = parentIndex;
}
}
public IEnumerator<(int, float)> GetEnumerator()
{
return _elements.GetEnumerator();
}
IEnumerator IEnumerable.GetEnumerator()
{
return _elements.GetEnumerator();
}
}
}

@ -1,130 +0,0 @@
using System;
using System.Collections;
using System.Collections.Generic;
namespace ZeroLevel.HNSW.Services
{
/// <summary>
/// Min element always on top
/// </summary>
public class MinHeap :
IEnumerable<(int, float)>
{
private readonly List<(int, float)> _elements;
public MinHeap(int size = -1)
{
if (size > 0)
_elements = new List<(int, float)>(size);
else
_elements = new List<(int, float)>();
}
private int GetLeftChildIndex(int elementIndex) => 2 * elementIndex + 1;
private int GetRightChildIndex(int elementIndex) => 2 * elementIndex + 2;
private int GetParentIndex(int elementIndex) => (elementIndex - 1) / 2;
private bool HasLeftChild(int elementIndex) => GetLeftChildIndex(elementIndex) < _elements.Count;
private bool HasRightChild(int elementIndex) => GetRightChildIndex(elementIndex) < _elements.Count;
private bool IsRoot(int elementIndex) => elementIndex == 0;
private (int, float) GetLeftChild(int elementIndex) => _elements[GetLeftChildIndex(elementIndex)];
private (int, float) GetRightChild(int elementIndex) => _elements[GetRightChildIndex(elementIndex)];
private (int, float) GetParent(int elementIndex) => _elements[GetParentIndex(elementIndex)];
public int Count => _elements.Count;
public void Clear()
{
_elements.Clear();
}
private void Swap(int firstIndex, int secondIndex)
{
var temp = _elements[firstIndex];
_elements[firstIndex] = _elements[secondIndex];
_elements[secondIndex] = temp;
}
public bool IsEmpty()
{
return _elements.Count == 0;
}
public bool TryPeek(out int id, out float value)
{
if (_elements.Count == 0)
{
id = -1;
value = 0;
return false;
}
id = _elements[0].Item1;
value = _elements[0].Item2;
return true;
}
public (int, float) Pop()
{
if (_elements.Count == 0)
throw new IndexOutOfRangeException();
var result = _elements[0];
_elements[0] = _elements[_elements.Count - 1];
_elements.RemoveAt(_elements.Count - 1);
ReCalculateDown();
return result;
}
public void Push((int, float) element)
{
_elements.Add(element);
ReCalculateUp();
}
private void ReCalculateDown()
{
int index = 0;
while (HasLeftChild(index))
{
var smallerIndex = GetLeftChildIndex(index);
if (HasRightChild(index) && GetRightChild(index).Item2 < GetLeftChild(index).Item2)
{
smallerIndex = GetRightChildIndex(index);
}
if (_elements[smallerIndex].Item2 >= _elements[index].Item2)
{
break;
}
Swap(smallerIndex, index);
index = smallerIndex;
}
}
private void ReCalculateUp()
{
var index = _elements.Count - 1;
while (!IsRoot(index) && _elements[index].Item2 < GetParent(index).Item2)
{
var parentIndex = GetParentIndex(index);
Swap(parentIndex, index);
index = parentIndex;
}
}
public IEnumerator<(int, float)> GetEnumerator()
{
return _elements.GetEnumerator();
}
IEnumerator IEnumerable.GetEnumerator()
{
return _elements.GetEnumerator();
}
}
}

@ -1,108 +0,0 @@
using System;
namespace ZeroLevel.HNSW.Services
{
public class Quantizator
{
private readonly float _min;
private readonly float _max;
private readonly float _diff;
public Quantizator(float min, float max)
{
_min = min;
_max = max;
_diff = _max - _min;
}
public byte[] Quantize(float[] v)
{
var result = new byte[v.Length];
for (int i = 0; i < v.Length; i++)
{
result[i] = _quantizeInRange(v[i]);
}
return result;
}
public int[] QuantizeToInt(float[] v)
{
var diff = v.Length % 4;
int count = (v.Length - diff) / 4;
var result = new int[((diff == 0) ? 0 : 1) + (v.Length / 4)];
byte[] buf = new byte[4];
int i = 0;
for (; i < count * 4; i += 4)
{
buf[0] = _quantizeInRange(v[i]);
buf[1] = _quantizeInRange(v[i + 1]);
buf[2] = _quantizeInRange(v[i + 2]);
buf[3] = _quantizeInRange(v[i + 3]);
result[(i >> 2)] = BitConverter.ToInt32(buf);
}
if (diff != 0)
{
for (var j = 0; j < diff; j++)
{
buf[j] = _quantizeInRange(v[i + j]);
}
for (var j = diff; j < 4; j++)
{
buf[j] = 0;
}
result[(i >> 2)] = BitConverter.ToInt32(buf);
}
return result;
}
public long[] QuantizeToLong(float[] v)
{
var diff = v.Length % 8;
int count = (v.Length - diff) / 8;
var result = new long[((diff == 0) ? 0 : 1) + (v.Length / 8)];
byte[] buf = new byte[8];
int i = 0;
for (; i < count * 8; i += 8)
{
buf[0] = _quantizeInRange(v[i + 0]);
buf[1] = _quantizeInRange(v[i + 1]);
buf[2] = _quantizeInRange(v[i + 2]);
buf[3] = _quantizeInRange(v[i + 3]);
buf[4] = _quantizeInRange(v[i + 4]);
buf[5] = _quantizeInRange(v[i + 5]);
buf[6] = _quantizeInRange(v[i + 6]);
buf[7] = _quantizeInRange(v[i + 7]);
result[(i >> 3)] = BitConverter.ToInt64(buf);
}
if (diff != 0)
{
for (var j = 0; j < diff; j++)
{
buf[j] = _quantizeInRange(v[i + j]);
}
for (var j = diff; j < 8; j++)
{
buf[j] = 0;
}
result[(i >> 3)] = BitConverter.ToInt64(buf);
}
return result;
}
//Map x in [0,1] to {0, 1, ..., 255}
private byte _quantize(float x)
{
x = (int)Math.Floor(256 * x);
if (x < 0) return 0;
else if (x > 255) return 255;
else return (byte)x;
}
//Map x in [min,max] to {0, 1, ..., 255}
private byte _quantizeInRange(float x)
{
return _quantize((x - _min) / (_diff));
}
}
}

@ -1,88 +0,0 @@
using System.Collections;
using System.Collections.Generic;
using System.Threading;
using ZeroLevel.Services.Serialization;
namespace ZeroLevel.HNSW
{
internal sealed class VectorSet<T>
: IEnumerable<T>, IBinarySerializable
{
private List<T> _set = new List<T>();
private SpinLock _lock = new SpinLock();
internal T this[int index] => _set[index];
internal int Count => _set.Count;
internal int Append(T vector)
{
bool gotLock = false;
gotLock = false;
try
{
_lock.Enter(ref gotLock);
_set.Add(vector);
return _set.Count - 1;
}
finally
{
// Only give up the lock if you actually acquired it
if (gotLock) _lock.Exit();
}
}
internal int[] Append(IEnumerable<T> vectors)
{
bool gotLock = false;
int startIndex, endIndex;
gotLock = false;
try
{
_lock.Enter(ref gotLock);
startIndex = _set.Count;
_set.AddRange(vectors);
endIndex = _set.Count;
}
finally
{
// Only give up the lock if you actually acquired it
if (gotLock) _lock.Exit();
}
var ids = new int[endIndex - startIndex];
for (int i = startIndex, j = 0; i < endIndex; i++, j++)
{
ids[j] = i;
}
return ids;
}
public void Deserialize(IBinaryReader reader)
{
int count = reader.ReadInt32();
_set = new List<T>(count + 1);
for (int i = 0; i < count; i++)
{
_set.Add(reader.ReadCompatible<T>());
}
}
public void Serialize(IBinaryWriter writer)
{
writer.WriteInt32(_set.Count);
foreach (var r in _set)
{
writer.WriteCompatible<T>(r);
}
}
public IEnumerator<T> GetEnumerator()
{
return _set.GetEnumerator();
}
IEnumerator IEnumerable.GetEnumerator()
{
return _set.GetEnumerator();
}
}
}

@ -1,375 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using ZeroLevel.HNSW.Services;
using ZeroLevel.Services.Serialization;
namespace ZeroLevel.HNSW
{
public class SmallWorld<TItem>
{
private readonly NSWOptions<TItem> _options;
private VectorSet<TItem> _vectors;
private Layer<TItem>[] _layers;
private int EntryPoint = 0;
private int MaxLayer = 0;
private readonly ProbabilityLayerNumberGenerator _layerLevelGenerator;
private ReaderWriterLockSlim _lockGraph = new ReaderWriterLockSlim();
public readonly Func<int, int, float> DistanceFunction;
public TItem GetVector(int id) => _vectors[id];
public IDictionary<int, HashSet<int>> GetLinks() => _layers[0].Links;
public SmallWorld(NSWOptions<TItem> options)
{
_options = options;
_vectors = new VectorSet<TItem>();
_layers = new Layer<TItem>[_options.LayersCount];
_layerLevelGenerator = new ProbabilityLayerNumberGenerator(_options.LayersCount, _options.M);
DistanceFunction = new Func<int, int, float>((id1, id2) => _options.Distance(_vectors[id1], _vectors[id2]));
for (int i = 0; i < _options.LayersCount; i++)
{
_layers[i] = new Layer<TItem>(_options, _vectors, i == 0);
}
}
public SmallWorld(NSWOptions<TItem> options, Stream stream)
{
_options = options;
_layerLevelGenerator = new ProbabilityLayerNumberGenerator(_options.LayersCount, _options.M);
DistanceFunction = new Func<int, int, float>((id1, id2) => _options.Distance(_vectors[id1], _vectors[id2]));
Deserialize(stream);
}
/// <summary>
/// Search in the graph K for vectors closest to a given vector
/// </summary>
/// <param name="vector">Given vector</param>
/// <param name="k">Count of elements for search</param>
/// <param name="activeNodes"></param>
/// <returns></returns>
public IEnumerable<(int, TItem, float)> Search(TItem vector, int k)
{
foreach (var pair in KNearest(vector, k))
{
yield return (pair.Item1, _vectors[pair.Item1], pair.Item2);
}
}
public IEnumerable<(int, TItem, float)> Search(TItem vector, int k, SearchContext context)
{
if (context == null)
{
foreach (var pair in KNearest(vector, k))
{
yield return (pair.Item1, _vectors[pair.Item1], pair.Item2);
}
}
else
{
foreach (var pair in KNearest(vector, k, context))
{
yield return (pair.Item1, _vectors[pair.Item1], pair.Item2);
}
}
}
public IEnumerable<(int, TItem, float)> Search(int k, SearchContext context)
{
if (context == null)
{
throw new ArgumentNullException(nameof(context));
}
else
{
foreach (var pair in KNearest(k, context))
{
yield return (pair.Item1, _vectors[pair.Item1], pair.Item2);
}
}
}
/// <summary>
/// Adding vectors batch
/// </summary>
/// <param name="vectors">Vectors</param>
/// <returns>Vector identifiers in a graph</returns>
public int[] AddItems(IEnumerable<TItem> vectors)
{
_lockGraph.EnterWriteLock();
try
{
var ids = _vectors.Append(vectors);
for (int i = 0; i < ids.Length; i++)
{
INSERT(ids[i]);
}
return ids;
}
finally
{
_lockGraph.ExitWriteLock();
}
}
#region https://arxiv.org/ftp/arxiv/papers/1603/1603.09320.pdf
/// <summary>
/// Algorithm 1
/// </summary>
private void INSERT(int q)
{
var distance = new Func<int, float>(candidate => _options.Distance(_vectors[q], _vectors[candidate]));
// W ← ∅ // list for the currently found nearest elements
var W = new MinHeap(_options.EFConstruction + 1);
// ep ← get enter point for hnsw
var ep = _layers[MaxLayer].FindEntryPointAtLayer(distance);
if (ep == -1)
ep = EntryPoint;
var epDist = distance(ep);
// L ← level of ep // top layer for hnsw
var L = MaxLayer;
// l ← ⌊-ln(unif(0..1))∙mL⌋ // new elements level
int l = _layerLevelGenerator.GetRandomLayer();
// Проход с верхнего уровня до уровня где появляется элемент, для нахождения точки входа
int id;
float value;
// for lc ← L … l+1
for (int lc = L; lc > l; --lc)
{
// W ← SEARCH-LAYER(q, ep, ef = 1, lc)
foreach (var i in _layers[lc].KNearestAtLayer(ep, distance, 1))
{
W.Push(i);
}
// ep ← get the nearest element from W to q
if (W.TryPeek(out id, out value))
{
ep = id;
epDist = value;
}
W.Clear();
}
//for lc ← min(L, l) … 0
// connecting new node to the small world
for (int lc = Math.Min(L, l); lc >= 0; --lc)
{
_layers[lc].Push(q, ep, W, distance);
// ep ← W
if (W.TryPeek(out id, out value))
{
ep = id;
epDist = value;
}
W.Clear();
}
// if l > L
if (l > L)
{
// set enter point for hnsw to q
L = l;
MaxLayer = l;
EntryPoint = ep;
}
}
public void TestWorld()
{
for (var v = 0; v < _vectors.Count; v++)
{
var nearest = _layers[0][v].ToArray();
if (nearest.Length > _layers[0].M)
{
Console.WriteLine($"V{v}. Count of links ({nearest.Length}) more than max ({_layers[0].M})");
}
}
// coverage test
var ep = 0;
var visited = new HashSet<int>();
var next = new Stack<int>();
next.Push(ep);
while (next.Count > 0)
{
ep = next.Pop();
visited.Add(ep);
foreach (var n in _layers[0].GetNeighbors(ep))
{
if (visited.Contains(n) == false)
{
next.Push(n);
}
}
}
if (visited.Count != _vectors.Count)
{
Console.Write($"Vectors count ({_vectors.Count}) less than BFS visited nodes count ({visited.Count})");
}
}
/// <summary>
/// Algorithm 5
/// </summary>
private IEnumerable<(int, float)> KNearest(TItem q, int k)
{
_lockGraph.EnterReadLock();
try
{
if (_vectors.Count == 0)
{
return Enumerable.Empty<(int, float)>();
}
int id;
float value;
var distance = new Func<int, float>(candidate => _options.Distance(q, _vectors[candidate]));
// W ← ∅ // set for the current nearest elements
var W = new MinHeap(k + 1);
// ep ← get enter point for hnsw
var ep = EntryPoint;
// L ← level of ep // top layer for hnsw
var L = MaxLayer;
// for lc ← L … 1
for (int layer = L; layer > 0; --layer)
{
// W ← SEARCH-LAYER(q, ep, ef = 1, lc)
foreach (var i in _layers[layer].KNearestAtLayer(ep, distance, 1))
{
W.Push(i);
}
// ep ← get nearest element from W to q
if (W.TryPeek(out id, out value))
{
ep = id;
}
W.Clear();
}
// W ← SEARCH-LAYER(q, ep, ef, lc =0)
foreach (var i in _layers[0].KNearestAtLayer(ep, distance, k))
{
W.Push(i);
}
// return K nearest elements from W to q
return W;
}
finally
{
_lockGraph.ExitReadLock();
}
}
private IEnumerable<(int, float)> KNearest(TItem q, int k, SearchContext context)
{
_lockGraph.EnterReadLock();
try
{
if (_vectors.Count == 0)
{
return Enumerable.Empty<(int, float)>();
}
int id;
float value;
var distance = new Func<int, float>(candidate => _options.Distance(q, _vectors[candidate]));
// W ← ∅ // set for the current nearest elements
var W = new MinHeap(k + 1);
// ep ← get enter point for hnsw
var ep = EntryPoint;
// L ← level of ep // top layer for hnsw
var L = MaxLayer;
// for lc ← L … 1
for (int layer = L; layer > 0; --layer)
{
// W ← SEARCH-LAYER(q, ep, ef = 1, lc)
foreach (var i in _layers[layer].KNearestAtLayer(ep, distance, 1))
{
W.Push(i);
}
// ep ← get nearest element from W to q
if (W.TryPeek(out id, out value))
{
ep = id;
}
W.Clear();
}
// W ← SEARCH-LAYER(q, ep, ef, lc =0)
foreach (var i in _layers[0].KNearestAtLayer(ep, distance, k, context))
{
W.Push(i);
}
// return K nearest elements from W to q
return W;
}
finally
{
_lockGraph.ExitReadLock();
}
}
private IEnumerable<(int, float)> KNearest(int k, SearchContext context)
{
_lockGraph.EnterReadLock();
try
{
if (_vectors.Count == 0)
{
return Enumerable.Empty<(int, float)>();
}
// W ← ∅ // set for the current nearest elements
var W = new MinHeap(k + 1);
// W ← SEARCH-LAYER(q, ep, ef, lc =0)
foreach (var i in _layers[0].KNearestAtLayer(k, context))
{
W.Push(i);
}
// return K nearest elements from W to q
return W;
}
finally
{
_lockGraph.ExitReadLock();
}
}
#endregion
public void Serialize(Stream stream)
{
using (var writer = new MemoryStreamWriter(stream))
{
writer.WriteInt32(EntryPoint);
writer.WriteInt32(MaxLayer);
_vectors.Serialize(writer);
writer.WriteInt32(_layers.Length);
foreach (var l in _layers)
{
l.Serialize(writer);
}
}
}
public void Deserialize(Stream stream)
{
using (var reader = new MemoryStreamReader(stream))
{
this.EntryPoint = reader.ReadInt32();
this.MaxLayer = reader.ReadInt32();
_vectors = new VectorSet<TItem>();
_vectors.Deserialize(reader);
var countLayers = reader.ReadInt32();
_layers = new Layer<TItem>[countLayers];
for (int i = 0; i < countLayers; i++)
{
_layers[i] = new Layer<TItem>(_options, _vectors, i == 0);
_layers[i].Deserialize(reader);
}
}
}
}
}

@ -1,12 +0,0 @@
using System.IO;
namespace ZeroLevel.HNSW
{
public static class SmallWorld
{
public static SmallWorld<TItem> CreateWorld<TItem>(NSWOptions<TItem> options)
=> new SmallWorld<TItem>(options);
public static SmallWorld<TItem> CreateWorldFrom<TItem>(NSWOptions<TItem> options, Stream stream)
=> new SmallWorld<TItem>(options, stream);
}
}

@ -1,158 +0,0 @@
using System;
using System.Numerics;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
namespace ZeroLevel.HNSW
{
/// <summary>
/// Calculates cosine similarity.
/// </summary>
/// <remarks>
/// Intuition behind selecting float as a carrier.
///
/// 1. In practice we work with vectors of dimensionality 100 and each component has value in range [-1; 1]
/// There certainly is a possibility of underflow.
/// But we assume that such cases are rare and we can rely on such underflow losses.
///
/// 2. According to the article http://www.ti3.tuhh.de/paper/rump/JeaRu13.pdf
/// the floating point rounding error is less then 100 * 2^-24 * sqrt(100) * sqrt(100) &lt; 0.0005960
/// We deem such precision is satisfactory for out needs.
/// </remarks>
public static class CosineDistance
{
/// <summary>
/// Calculates cosine distance with assumption that u and v are unit vectors.
/// </summary>
/// <param name="u">Left vector.</param>
/// <param name="v">Right vector.</param>
/// <returns>Cosine distance between u and v.</returns>
public static float ForUnits(float[] u, float[] v)
{
if (u.Length != v.Length)
{
throw new ArgumentException("Vectors have non-matching dimensions");
}
float dot = 0;
for (int i = 0; i < u.Length; ++i)
{
dot += u[i] * v[i];
}
return 1 - dot;
}
/// <summary>
/// Calculates cosine distance optimized using SIMD instructions.
/// </summary>
/// <param name="u">Left vector.</param>
/// <param name="v">Right vector.</param>
/// <returns>Cosine distance between u and v.</returns>
public static float SIMD(float[] u, float[] v)
{
if (!Vector.IsHardwareAccelerated)
{
throw new NotSupportedException($"SIMD version of {nameof(CosineDistance)} is not supported");
}
if (u.Length != v.Length)
{
throw new ArgumentException("Vectors have non-matching dimensions");
}
float dot = 0;
var norm = default(Vector2);
int step = Vector<float>.Count;
int i, to = u.Length - step;
for (i = 0; i <= to; i += step)
{
var ui = new Vector<float>(u, i);
var vi = new Vector<float>(v, i);
dot += Vector.Dot(ui, vi);
norm.X += Vector.Dot(ui, ui);
norm.Y += Vector.Dot(vi, vi);
}
for (; i < u.Length; ++i)
{
dot += u[i] * v[i];
norm.X += u[i] * u[i];
norm.Y += v[i] * v[i];
}
norm = Vector2.SquareRoot(norm);
float n = (norm.X * norm.Y);
if (n == 0)
{
return 1f;
}
var similarity = dot / n;
return 1f - similarity;
}
/// <summary>
/// Calculates cosine distance with assumption that u and v are unit vectors using SIMD instructions.
/// </summary>
/// <param name="u">Left vector.</param>
/// <param name="v">Right vector.</param>
/// <returns>Cosine distance between u and v.</returns>
public static float SIMDForUnits(float[] u, float[] v)
{
return 1f - DotProduct(ref u, ref v);
}
private static readonly int _vs1 = Vector<float>.Count;
private static readonly int _vs2 = 2 * Vector<float>.Count;
private static readonly int _vs3 = 3 * Vector<float>.Count;
private static readonly int _vs4 = 4 * Vector<float>.Count;
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static float DotProduct(ref float[] lhs, ref float[] rhs)
{
float result = 0f;
var count = lhs.Length;
var offset = 0;
while (count >= _vs4)
{
result += Vector.Dot(new Vector<float>(lhs, offset), new Vector<float>(rhs, offset));
result += Vector.Dot(new Vector<float>(lhs, offset + _vs1), new Vector<float>(rhs, offset + _vs1));
result += Vector.Dot(new Vector<float>(lhs, offset + _vs2), new Vector<float>(rhs, offset + _vs2));
result += Vector.Dot(new Vector<float>(lhs, offset + _vs3), new Vector<float>(rhs, offset + _vs3));
if (count == _vs4) return result;
count -= _vs4;
offset += _vs4;
}
if (count >= _vs2)
{
result += Vector.Dot(new Vector<float>(lhs, offset), new Vector<float>(rhs, offset));
result += Vector.Dot(new Vector<float>(lhs, offset + _vs1), new Vector<float>(rhs, offset + _vs1));
if (count == _vs2) return result;
count -= _vs2;
offset += _vs2;
}
if (count >= _vs1)
{
result += Vector.Dot(new Vector<float>(lhs, offset), new Vector<float>(rhs, offset));
if (count == _vs1) return result;
count -= _vs1;
offset += _vs1;
}
if (count > 0)
{
while (count > 0)
{
result += lhs[offset] * rhs[offset];
offset++; count--;
}
}
return result;
}
}
}

@ -1,507 +0,0 @@
using System;
using System.Runtime.CompilerServices;
namespace ZeroLevel.HNSW
{
public sealed class DefaultRandomGenerator
{
/// <summary>
/// This is the default configuration (it supports the optimization process to be executed on multiple threads)
/// </summary>
public static DefaultRandomGenerator Instance { get; } = new DefaultRandomGenerator(allowParallel: true);
/// <summary>
/// This uses the same random number generator but forces the optimization process to run on a single thread (which may be desirable if multiple requests may be processed concurrently
/// or if it is otherwise not desirable to let a single request access all of the CPUs)
/// </summary>
public static DefaultRandomGenerator DisableThreading { get; } = new DefaultRandomGenerator(allowParallel: false);
private DefaultRandomGenerator(bool allowParallel) => IsThreadSafe = allowParallel;
public bool IsThreadSafe { get; }
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public int Next(int minValue, int maxValue) => ThreadSafeFastRandom.Next(minValue, maxValue);
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public float NextFloat() => ThreadSafeFastRandom.NextFloat();
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void NextFloats(Span<float> buffer) => ThreadSafeFastRandom.NextFloats(buffer);
}
internal static class ThreadSafeFastRandom
{
private static readonly Random _global = new Random();
[ThreadStatic]
private static FastRandom _local;
private static int GetGlobalSeed()
{
int seed;
lock (_global)
{
seed = _global.Next();
}
return seed;
}
/// <summary>
/// Returns a non-negative random integer.
/// </summary>
/// <returns>A 32-bit signed integer that is greater than or equal to 0 and less than System.Int32.MaxValue.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int Next()
{
var inst = _local;
if (inst == null)
{
int seed;
seed = GetGlobalSeed();
_local = inst = new FastRandom(seed);
}
return inst.Next();
}
/// <summary>
/// Returns a non-negative random integer that is less than the specified maximum.
/// </summary>
/// <param name="maxValue">The exclusive upper bound of the random number to be generated. maxValue must be greater than or equal to 0.</param>
/// <returns>A 32-bit signed integer that is greater than or equal to 0, and less than maxValue; that is, the range of return values ordinarily includes 0 but not maxValue. However,
// if maxValue equals 0, maxValue is returned.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int Next(int maxValue)
{
var inst = _local;
if (inst == null)
{
int seed;
seed = GetGlobalSeed();
_local = inst = new FastRandom(seed);
}
int ans;
do
{
ans = inst.Next(maxValue);
} while (ans == maxValue);
return ans;
}
/// <summary>
/// Returns a random integer that is within a specified range.
/// </summary>
/// <param name="minValue">The inclusive lower bound of the random number returned.</param>
/// <param name="maxValue">The exclusive upper bound of the random number returned. maxValue must be greater than or equal to minValue.</param>
/// <returns>A 32-bit signed integer greater than or equal to minValue and less than maxValue; that is, the range of return values includes minValue but not maxValue. If minValue
// equals maxValue, minValue is returned.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int Next(int minValue, int maxValue)
{
var inst = _local;
if (inst == null)
{
int seed;
seed = GetGlobalSeed();
_local = inst = new FastRandom(seed);
}
return inst.Next(minValue, maxValue);
}
/// <summary>
/// Generates a random float. Values returned are from 0.0 up to but not including 1.0.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static float NextFloat()
{
var inst = _local;
if (inst == null)
{
int seed;
seed = GetGlobalSeed();
_local = inst = new FastRandom(seed);
}
return inst.NextFloat();
}
/// <summary>
/// Fills the elements of a specified array of bytes with random numbers.
/// </summary>
/// <param name="buffer">An array of bytes to contain random numbers.</param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void NextFloats(Span<float> buffer)
{
var inst = _local;
if (inst == null)
{
int seed;
seed = GetGlobalSeed();
_local = inst = new FastRandom(seed);
}
inst.NextFloats(buffer);
}
}
/// <summary>
/// A fast random number generator for .NET, from https://www.codeproject.com/Articles/9187/A-fast-equivalent-for-System-Random
/// Colin Green, January 2005
///
/// September 4th 2005
/// Added NextBytesUnsafe() - commented out by default.
/// Fixed bug in Reinitialise() - y,z and w variables were not being reset.
///
/// Key points:
/// 1) Based on a simple and fast xor-shift pseudo random number generator (RNG) specified in:
/// Marsaglia, George. (2003). Xorshift RNGs.
/// http://www.jstatsoft.org/v08/i14/xorshift.pdf
///
/// This particular implementation of xorshift has a period of 2^128-1. See the above paper to see
/// how this can be easily extened if you need a longer period. At the time of writing I could find no
/// information on the period of System.Random for comparison.
///
/// 2) Faster than System.Random. Up to 8x faster, depending on which methods are called.
///
/// 3) Direct replacement for System.Random. This class implements all of the methods that System.Random
/// does plus some additional methods. The like named methods are functionally equivalent.
///
/// 4) Allows fast re-initialisation with a seed, unlike System.Random which accepts a seed at construction
/// time which then executes a relatively expensive initialisation routine. This provides a vast speed improvement
/// if you need to reset the pseudo-random number sequence many times, e.g. if you want to re-generate the same
/// sequence many times. An alternative might be to cache random numbers in an array, but that approach is limited
/// by memory capacity and the fact that you may also want a large number of different sequences cached. Each sequence
/// can each be represented by a single seed value (int) when using FastRandom.
///
/// Notes.
/// A further performance improvement can be obtained by declaring local variables as static, thus avoiding
/// re-allocation of variables on each call. However care should be taken if multiple instances of
/// FastRandom are in use or if being used in a multi-threaded environment.
///
/// </summary>
internal class FastRandom
{
// The +1 ensures NextDouble doesn't generate 1.0
const float FLOAT_UNIT_INT = 1.0f / ((float)int.MaxValue + 1.0f);
const double REAL_UNIT_INT = 1.0 / ((double)int.MaxValue + 1.0);
const double REAL_UNIT_UINT = 1.0 / ((double)uint.MaxValue + 1.0);
const uint Y = 842502087, Z = 3579807591, W = 273326509;
uint x, y, z, w;
/// <summary>
/// Initialises a new instance using time dependent seed.
/// </summary>
public FastRandom()
{
// Initialise using the system tick count.
Reinitialise(Environment.TickCount);
}
/// <summary>
/// Initialises a new instance using an int value as seed.
/// This constructor signature is provided to maintain compatibility with
/// System.Random
/// </summary>
public FastRandom(int seed)
{
Reinitialise(seed);
}
/// <summary>
/// Reinitialises using an int value as a seed.
/// </summary>
public void Reinitialise(int seed)
{
// The only stipulation stated for the xorshift RNG is that at least one of
// the seeds x,y,z,w is non-zero. We fulfill that requirement by only allowing
// resetting of the x seed
x = (uint)seed;
y = Y;
z = Z;
w = W;
}
/// <summary>
/// Generates a random int over the range 0 to int.MaxValue-1.
/// MaxValue is not generated in order to remain functionally equivalent to System.Random.Next().
/// This does slightly eat into some of the performance gain over System.Random, but not much.
/// For better performance see:
///
/// Call NextInt() for an int over the range 0 to int.MaxValue.
///
/// Call NextUInt() and cast the result to an int to generate an int over the full Int32 value range
/// including negative values.
/// </summary>
public int Next()
{
uint t = (x ^ (x << 11));
x = y; y = z; z = w;
w = (w ^ (w >> 19)) ^ (t ^ (t >> 8));
// Handle the special case where the value int.MaxValue is generated. This is outside of
// the range of permitted values, so we therefore call Next() to try again.
uint rtn = w & 0x7FFFFFFF;
if (rtn == 0x7FFFFFFF)
return Next();
return (int)rtn;
}
/// <summary>
/// Generates a random int over the range 0 to upperBound-1, and not including upperBound.
/// </summary>
public int Next(int upperBound)
{
if (upperBound < 0)
throw new ArgumentOutOfRangeException("upperBound", upperBound, "upperBound must be >=0");
uint t = (x ^ (x << 11));
x = y; y = z; z = w;
// The explicit int cast before the first multiplication gives better performance.
// See comments in NextDouble.
return (int)((REAL_UNIT_INT * (int)(0x7FFFFFFF & (w = (w ^ (w >> 19)) ^ (t ^ (t >> 8))))) * upperBound);
}
/// <summary>
/// Generates a random int over the range lowerBound to upperBound-1, and not including upperBound.
/// upperBound must be >= lowerBound. lowerBound may be negative.
/// </summary>
public int Next(int lowerBound, int upperBound)
{
if (lowerBound > upperBound)
throw new ArgumentOutOfRangeException("upperBound", upperBound, "upperBound must be >=lowerBound");
uint t = (x ^ (x << 11));
x = y; y = z; z = w;
// The explicit int cast before the first multiplication gives better performance.
// See comments in NextDouble.
int range = upperBound - lowerBound;
if (range < 0)
{ // If range is <0 then an overflow has occured and must resort to using long integer arithmetic instead (slower).
// We also must use all 32 bits of precision, instead of the normal 31, which again is slower.
return lowerBound + (int)((REAL_UNIT_UINT * (double)(w = (w ^ (w >> 19)) ^ (t ^ (t >> 8)))) * (double)((long)upperBound - (long)lowerBound));
}
// 31 bits of precision will suffice if range<=int.MaxValue. This allows us to cast to an int and gain
// a little more performance.
return lowerBound + (int)((REAL_UNIT_INT * (double)(int)(0x7FFFFFFF & (w = (w ^ (w >> 19)) ^ (t ^ (t >> 8))))) * (double)range);
}
/// <summary>
/// Generates a random double. Values returned are from 0.0 up to but not including 1.0.
/// </summary>
public double NextDouble()
{
uint t = (x ^ (x << 11));
x = y; y = z; z = w;
// Here we can gain a 2x speed improvement by generating a value that can be cast to
// an int instead of the more easily available uint. If we then explicitly cast to an
// int the compiler will then cast the int to a double to perform the multiplication,
// this final cast is a lot faster than casting from a uint to a double. The extra cast
// to an int is very fast (the allocated bits remain the same) and so the overall effect
// of the extra cast is a significant performance improvement.
//
// Also note that the loss of one bit of precision is equivalent to what occurs within
// System.Random.
return (REAL_UNIT_INT * (int)(0x7FFFFFFF & (w = (w ^ (w >> 19)) ^ (t ^ (t >> 8)))));
}
/// <summary>
/// Generates a random double. Values returned are from 0.0 up to but not including 1.0.
/// </summary>
public float NextFloat()
{
uint x = this.x, y = this.y, z = this.z, w = this.w;
uint t = (x ^ (x << 11));
x = y; y = z; z = w;
w = (w ^ (w >> 19)) ^ (t ^ (t >> 8));
var value = FLOAT_UNIT_INT * (int)(0x7FFFFFFF & w);
this.x = x; this.y = y; this.z = z; this.w = w;
return value;
}
/// <summary>
/// Fills the provided byte array with random floats.
/// </summary>
public void NextFloats(Span<float> buffer)
{
uint x = this.x, y = this.y, z = this.z, w = this.w;
int i = 0;
uint t;
for (int bound = buffer.Length; i < bound;)
{
t = (x ^ (x << 11));
x = y; y = z; z = w;
w = (w ^ (w >> 19)) ^ (t ^ (t >> 8));
buffer[i++] = FLOAT_UNIT_INT * (int)(0x7FFFFFFF & w);
}
this.x = x; this.y = y; this.z = z; this.w = w;
}
/// <summary>
/// Fills the provided byte array with random bytes.
/// This method is functionally equivalent to System.Random.NextBytes().
/// </summary>
public void NextBytes(byte[] buffer)
{
// Fill up the bulk of the buffer in chunks of 4 bytes at a time.
uint x = this.x, y = this.y, z = this.z, w = this.w;
int i = 0;
uint t;
for (int bound = buffer.Length - 3; i < bound;)
{
// Generate 4 bytes.
// Increased performance is achieved by generating 4 random bytes per loop.
// Also note that no mask needs to be applied to zero out the higher order bytes before
// casting because the cast ignores thos bytes. Thanks to Stefan Troschütz for pointing this out.
t = (x ^ (x << 11));
x = y; y = z; z = w;
w = (w ^ (w >> 19)) ^ (t ^ (t >> 8));
buffer[i++] = (byte)w;
buffer[i++] = (byte)(w >> 8);
buffer[i++] = (byte)(w >> 16);
buffer[i++] = (byte)(w >> 24);
}
// Fill up any remaining bytes in the buffer.
if (i < buffer.Length)
{
// Generate 4 bytes.
t = (x ^ (x << 11));
x = y; y = z; z = w;
w = (w ^ (w >> 19)) ^ (t ^ (t >> 8));
buffer[i++] = (byte)w;
if (i < buffer.Length)
{
buffer[i++] = (byte)(w >> 8);
if (i < buffer.Length)
{
buffer[i++] = (byte)(w >> 16);
if (i < buffer.Length)
{
buffer[i] = (byte)(w >> 24);
}
}
}
}
this.x = x; this.y = y; this.z = z; this.w = w;
}
/// <summary>
/// Fills the provided byte array with random bytes.
/// This method is functionally equivalent to System.Random.NextBytes().
/// </summary>
public void NextBytes(Span<byte> buffer)
{
// Fill up the bulk of the buffer in chunks of 4 bytes at a time.
uint x = this.x, y = this.y, z = this.z, w = this.w;
int i = 0;
uint t;
for (int bound = buffer.Length - 3; i < bound;)
{
// Generate 4 bytes.
// Increased performance is achieved by generating 4 random bytes per loop.
// Also note that no mask needs to be applied to zero out the higher order bytes before
// casting because the cast ignores thos bytes. Thanks to Stefan Troschütz for pointing this out.
t = (x ^ (x << 11));
x = y; y = z; z = w;
w = (w ^ (w >> 19)) ^ (t ^ (t >> 8));
buffer[i++] = (byte)w;
buffer[i++] = (byte)(w >> 8);
buffer[i++] = (byte)(w >> 16);
buffer[i++] = (byte)(w >> 24);
}
// Fill up any remaining bytes in the buffer.
if (i < buffer.Length)
{
// Generate 4 bytes.
t = (x ^ (x << 11));
x = y; y = z; z = w;
w = (w ^ (w >> 19)) ^ (t ^ (t >> 8));
buffer[i++] = (byte)w;
if (i < buffer.Length)
{
buffer[i++] = (byte)(w >> 8);
if (i < buffer.Length)
{
buffer[i++] = (byte)(w >> 16);
if (i < buffer.Length)
{
buffer[i] = (byte)(w >> 24);
}
}
}
}
this.x = x; this.y = y; this.z = z; this.w = w;
}
/// <summary>
/// Generates a uint. Values returned are over the full range of a uint,
/// uint.MinValue to uint.MaxValue, inclusive.
///
/// This is the fastest method for generating a single random number because the underlying
/// random number generator algorithm generates 32 random bits that can be cast directly to
/// a uint.
/// </summary>
public uint NextUInt()
{
uint t = (x ^ (x << 11));
x = y; y = z; z = w;
return (w = (w ^ (w >> 19)) ^ (t ^ (t >> 8)));
}
/// <summary>
/// Generates a random int over the range 0 to int.MaxValue, inclusive.
/// This method differs from Next() only in that the range is 0 to int.MaxValue
/// and not 0 to int.MaxValue-1.
///
/// The slight difference in range means this method is slightly faster than Next()
/// but is not functionally equivalent to System.Random.Next().
/// </summary>
public int NextInt()
{
uint t = (x ^ (x << 11));
x = y; y = z; z = w;
return (int)(0x7FFFFFFF & (w = (w ^ (w >> 19)) ^ (t ^ (t >> 8))));
}
// Buffer 32 bits in bitBuffer, return 1 at a time, keep track of how many have been returned
// with bitBufferIdx.
uint bitBuffer;
uint bitMask = 1;
/// <summary>
/// Generates a single random bit.
/// This method's performance is improved by generating 32 bits in one operation and storing them
/// ready for future calls.
/// </summary>
public bool NextBool()
{
if (bitMask == 1)
{
// Generate 32 more bits.
uint t = (x ^ (x << 11));
x = y; y = z; z = w;
bitBuffer = w = (w ^ (w >> 19)) ^ (t ^ (t >> 8));
// Reset the bitMask that tells us which bit to read next.
bitMask = 0x80000000;
return (bitBuffer & bitMask) == 0;
}
return (bitBuffer & (bitMask >>= 1)) == 0;
}
}
}

@ -1,30 +0,0 @@
using System;
namespace ZeroLevel.HNSW.Services
{
internal sealed class ProbabilityLayerNumberGenerator
{
private readonly float[] _probabilities;
internal ProbabilityLayerNumberGenerator(int maxLayers, int M)
{
_probabilities = new float[maxLayers];
var m_L = 1.0f / Math.Log(M);
for (int i = 0; i < maxLayers; i++)
{
_probabilities[i] = (float)(Math.Exp(-i / m_L) * (1 - Math.Exp(-1 / m_L)));
}
}
internal int GetRandomLayer()
{
var probability = DefaultRandomGenerator.Instance.NextFloat();
for (int i = 0; i < _probabilities.Length; i++)
{
if (probability > _probabilities[i])
return i;
}
return 0;
}
}
}

@ -1,91 +0,0 @@
using System;
using System.Collections.Generic;
using System.Numerics;
namespace ZeroLevel.HNSW
{
public static class VectorUtils
{
public static List<float[]> RandomVectors(int vectorSize, int vectorsCount)
{
var vectors = new List<float[]>();
for (int i = 0; i < vectorsCount; i++)
{
var vector = new float[vectorSize];
DefaultRandomGenerator.Instance.NextFloats(vector);
VectorUtils.NormalizeSIMD(vector);
vectors.Add(vector);
}
return vectors;
}
public static float Magnitude(IList<float> vector)
{
float magnitude = 0.0f;
for (int i = 0; i < vector.Count; ++i)
{
magnitude += vector[i] * vector[i];
}
return (float)Math.Sqrt(magnitude);
}
public static void Normalize(IList<float> vector)
{
float normFactor = 1f / Magnitude(vector);
for (int i = 0; i < vector.Count; ++i)
{
vector[i] *= normFactor;
}
}
public static float MagnitudeSIMD(float[] vector)
{
if (!Vector.IsHardwareAccelerated)
{
throw new NotSupportedException($"{nameof(VectorUtils.NormalizeSIMD)} is not supported");
}
float magnitude = 0.0f;
int step = Vector<float>.Count;
int i, to = vector.Length - step;
for (i = 0; i <= to; i += Vector<float>.Count)
{
var vi = new Vector<float>(vector, i);
magnitude += Vector.Dot(vi, vi);
}
for (; i < vector.Length; ++i)
{
magnitude += vector[i] * vector[i];
}
return (float)Math.Sqrt(magnitude);
}
public static void NormalizeSIMD(float[] vector)
{
if (!Vector.IsHardwareAccelerated)
{
throw new NotSupportedException($"{nameof(VectorUtils.NormalizeSIMD)} is not supported");
}
float normFactor = 1f / MagnitudeSIMD(vector);
int step = Vector<float>.Count;
int i, to = vector.Length - step;
for (i = 0; i <= to; i += step)
{
var vi = new Vector<float>(vector, i);
vi = Vector.Multiply(normFactor, vi);
vi.CopyTo(vector, i);
}
for (; i < vector.Length; ++i)
{
vector[i] *= normFactor;
}
}
}
}

@ -1,32 +0,0 @@
using System;
namespace ZeroLevel.HNSW
{
public class VisitedBitSet
{
// bit map
private int[] Buffer;
public VisitedBitSet(int nodesCount, int M)
{
Buffer = new int[(nodesCount >> 5) + M + 1];
}
public bool Contains(int nodeId)
{
int carrier = Buffer[nodeId >> 5];
return ((1 << (nodeId & 31)) & carrier) != 0;
}
public void Add(int nodeId)
{
int mask = 1 << (nodeId & 31);
Buffer[nodeId >> 5] |= mask;
}
public void Clear()
{
Array.Clear(Buffer, 0, Buffer.Length);
}
}
}

@ -1,51 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<Platforms>AnyCPU;x64</Platforms>
<PlatformTarget>x64</PlatformTarget>
<DebugType>full</DebugType>
<Version>1.0.0.5</Version>
<Company>ogoun</Company>
<Authors>Ogoun</Authors>
<Copyright>Copyright Ogoun 2022</Copyright>
<PackageProjectUrl>https://github.com/ogoun/Zero/wiki</PackageProjectUrl>
<PackageIcon>zero.png</PackageIcon>
<RepositoryUrl>https://github.com/ogoun/Zero</RepositoryUrl>
<RepositoryType>git</RepositoryType>
<PackageReleaseNotes></PackageReleaseNotes>
<PackageLicenseExpression>MIT</PackageLicenseExpression>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
<Optimize>False</Optimize>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<Optimize>False</Optimize>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU'">
<Optimize>True</Optimize>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<Optimize>True</Optimize>
</PropertyGroup>
<ItemGroup>
<None Include="..\zero.png">
<Pack>True</Pack>
<PackagePath>\</PackagePath>
</None>
</ItemGroup>
<ItemGroup>
<PackageReference Include="System.Numerics.Vectors" Version="4.5.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\ZeroLevel\ZeroLevel.csproj" />
</ItemGroup>
</Project>

@ -2,7 +2,7 @@
<PropertyGroup> <PropertyGroup>
<OutputType>Exe</OutputType> <OutputType>Exe</OutputType>
<TargetFramework>net6.0</TargetFramework> <TargetFramework>net8.0</TargetFramework>
<Platforms>AnyCPU;x64;x86</Platforms> <Platforms>AnyCPU;x64;x86</Platforms>
</PropertyGroup> </PropertyGroup>

@ -0,0 +1,30 @@
using System.Collections.Generic;
namespace ZeroLevel.ML
{
public static class CameraPixelSizes
{
/// <summary>
/// В микрометрах
/// </summary>
private static Dictionary<string, double> _pixels = new Dictionary<string, double>
{
{ "ZenmuseP1", 4.4d },
{ "M3E", 3.35821d },
{ "L1D-20c", 2.41d },
{ "F230", 1.55d },
{ "FC3411", 2.4d },
{ "XT702", 2.4d },
{ "FC7303", 1.334d},
};
public static double GetPixelSizeByModel(string model)
{
if (_pixels.ContainsKey(model))
{
return _pixels[model];
}
return 3.3d;
}
}
}

@ -0,0 +1,50 @@
using System;
using System.Collections.Generic;
using System.Linq;
namespace ZeroLevel.ML
{
public class FeatureCluster<T>
{
private readonly List<T> _features = new List<T>();
private readonly Func<T, float[]> _vectorExtractor;
public float[] CenterOfMass => _centerOfMass;
private float[] _centerOfMass;
public FeatureCluster(Func<T, float[]> vectorExtractor)
{
_vectorExtractor = vectorExtractor;
}
public IList<T> Features => _features;
internal void Append(T item)
{
_features.Add(item);
_centerOfMass = _vectorExtractor.Invoke(_features[0]);
if (_features.Count > 1)
{
foreach (var f in _features.Skip(1))
{
var f_vector = _vectorExtractor(f);
for (int i = 0; i < f_vector.Length; i++)
{
_centerOfMass[i] += f_vector[i];
}
}
for (int i = 0; i < _centerOfMass.Length; i++)
{
_centerOfMass[i] /= _features.Count;
}
}
}
public bool IsNeighbor(T feature, Func<float[], float[], double> similarityFunction, float threshold)
{
if (_features.Count == 0) return true;
var similarity = similarityFunction(_vectorExtractor(feature), _centerOfMass);
return similarity <= threshold;
}
}
}

@ -0,0 +1,33 @@
using System;
using System.Collections.Generic;
namespace ZeroLevel.ML
{
public class FeatureClusterBulder
{
public FeatureClusterCollection<T> Build<T>(IEnumerable<T> items, Func<T, float[]> vectorExtractor, Func<float[], float[], double> similarityFunction, float threshold)
{
var collection = new FeatureClusterCollection<T>();
foreach (var item in items)
{
bool isAdded = false;
foreach (var cluster in collection.Clusters)
{
if (cluster.Value.IsNeighbor(item, similarityFunction, threshold))
{
cluster.Value.Append(item);
isAdded = true;
break;
}
}
if (false == isAdded)
{
var cluster = new FeatureCluster<T>(vectorExtractor);
cluster.Append(item);
collection.Add(cluster);
}
}
return collection;
}
}
}

@ -0,0 +1,67 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
namespace ZeroLevel.ML
{
public class FeatureClusterCollection<T>
{
private int _clusterKey = 0;
private IDictionary<int, FeatureCluster<T>> _clusters = new Dictionary<int, FeatureCluster<T>>();
public IDictionary<int, FeatureCluster<T>> Clusters => _clusters;
internal void Add(FeatureCluster<T> cluster)
{
_clusters.Add(Interlocked.Increment(ref _clusterKey), cluster);
}
public void RemoveByDistance(Func<float[], float[], double> similarityFunction, Func<FeatureCluster<T>, double> winnerValue, double distance)
{
bool removed = false;
do
{
removed = false;
var keys = _clusters.Keys.ToArray();
var to_remove = new HashSet<int>();
for (int i = 0; i < keys.Length - 1; i++)
{
for (int j = i + 1; j < keys.Length; j++)
{
if (to_remove.Contains(j)) continue;
if(i == j) continue;
var ki = keys[i];
var kj = keys[j];
var sim = similarityFunction.Invoke(_clusters[ki].CenterOfMass, _clusters[kj].CenterOfMass);
if (sim < distance)
{
var scorei = winnerValue(_clusters[ki]);
var scorej = winnerValue(_clusters[kj]);
if (scorei < scorej)
{
to_remove.Add(ki);
}
else
{
to_remove.Add(kj);
}
}
}
}
if (to_remove.Any())
{
removed = true;
foreach (var k in to_remove)
{
_clusters.Remove(k);
}
}
} while (removed == true);
}
}
}

@ -0,0 +1,103 @@
using Microsoft.ML.OnnxRuntime.Tensors;
using ZeroLevel.ML.DNN.Models;
using System.Runtime.CompilerServices;
using System.Collections.Generic;
using System.Linq;
using System;
namespace ZeroLevel.ML.DNN.Classify
{
public sealed class EfficientnetLiteClassifier
: SSDNN, IClassifier
{
public int InputSize => 224;
public float[] MEAN_RGB = new float[3] { 0.498f, 0.498f, 0.498f };
public float[] STDDEV_RGB = new float[3] { 0.502f, 0.502f, 0.502f };
public float RNorm(float x) => ImageConverter.MeanStdNormilize(x, MEAN_RGB[0], STDDEV_RGB[0]);
public float GNorm(float x) => ImageConverter.MeanStdNormilize(x, MEAN_RGB[1], STDDEV_RGB[1]);
public float BNorm(float x) => ImageConverter.MeanStdNormilize(x, MEAN_RGB[2], STDDEV_RGB[2]);
public EfficientnetLiteClassifier(string modelPath, int deviceId = 0)
: base(modelPath, deviceId)
{
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static unsafe float[] Softmax(float[] input)
{
var sum = 0f;
var dst = new float[input.Length];
for (var i = 0; i < input.Length; ++i)
{
var e = (float)Math.Exp(input[i]);
dst[i] = e;
sum += e;
}
var sumInv = 1f / sum;
for (var i = 0; i < input.Length; ++i)
dst[i] *= sumInv;
return dst;
}
public List<float[]> Predict(FastTensorPool inputs)
{
var result = new List<float[]>();
Extract(new Dictionary<string, Tensor<float>> { { "input", inputs.Tensor } }, d =>
{
Tensor<float> output;
if (d.ContainsKey("output"))
{
output = d["output"];
}
else
{
output = d.First().Value;
}
if (output != null && output != null)
{
for (int tensorIndex = 0; tensorIndex < inputs.TensorSize; tensorIndex++)
{
var scores = new float[output.Dimensions[1]];
for (int objclass = 0; objclass < output.Dimensions[1]; objclass++)
{
scores[objclass] = output[tensorIndex, objclass];
}
var probs = Softmax(scores);
result.Add(probs);
}
}
});
return result;
}
public List<(int, float)> DetectClass(FastTensorPool inputs)
{
var classes = new List<(int, float)>();
var scores = Predict(inputs);
foreach (var score in scores)
{
if (score.Length > 0)
{
int index = 0;
float max = score[0];
for (int i = 1; i < score.Length; i++)
{
if (score[i] > max)
{
max = score[i];
index = i;
}
}
classes.Add((index, max));
}
else
{
classes.Add((-1, 0f));
}
}
return classes.OrderByDescending(x => x.Item2).ToList();
}
}
}

@ -0,0 +1,17 @@
using System;
using System.Collections.Generic;
using ZeroLevel.ML.DNN.Models;
namespace ZeroLevel.ML.DNN.Classify
{
public interface IClassifier
: IDisposable
{
float RNorm(float x);
float GNorm(float x);
float BNorm(float x);
int InputSize { get; }
List<float[]> Predict(FastTensorPool inputs);
List<(int, float)> DetectClass(FastTensorPool inputs);
}
}

@ -0,0 +1,79 @@
using Microsoft.ML.OnnxRuntime.Tensors;
using System.Collections.Generic;
using System.Linq;
using ZeroLevel.ML.DNN.Models;
namespace ZeroLevel.ML.DNN.Classify
{
public class Yolov8Classifier
: SSDNN, IClassifier
{
public int InputSize => 224;
public float BNorm(float x) => ImageConverter.StandartNormalizator(x);
public float GNorm(float x) => ImageConverter.StandartNormalizator(x);
public float RNorm(float x) => ImageConverter.StandartNormalizator(x);
public Yolov8Classifier(string modelPath, int deviceId = 0)
: base(modelPath, deviceId)
{
}
public List<float[]> Predict(FastTensorPool inputs)
{
var result = new List<float[]>();
Extract(new Dictionary<string, Tensor<float>> { { "images", inputs.Tensor } }, d =>
{
Tensor<float> output;
if (d.ContainsKey("output0"))
{
output = d["output0"];
}
else
{
output = d.First().Value;
}
if (output != null && output != null)
{
for (int tensorIndex = 0; tensorIndex < inputs.TensorSize; tensorIndex++)
{
var scores = new float[output.Dimensions[1]];
for (int objclass = 0; objclass < output.Dimensions[1]; objclass++)
{
scores[objclass] = output[tensorIndex, objclass];
}
result.Add(scores);
}
}
});
return result;
}
public List<(int, float)> DetectClass(FastTensorPool inputs)
{
var classes = new List<(int, float)>();
var scores = Predict(inputs);
foreach (var score in scores)
{
if (score.Length > 0)
{
int index = 0;
float max = score[0];
for (int i = 1; i < score.Length; i++)
{
if (score[i] > max)
{
max = score[i];
index = i;
}
}
classes.Add((index, max));
}
else
{
classes.Add((-1, 0f));
}
}
return classes;
}
}
}

@ -0,0 +1,70 @@
using Microsoft.ML.OnnxRuntime.Tensors;
using System.Collections.Generic;
using ZeroLevel.ML.DNN.Models;
namespace ZeroLevel.ML.DNN.Detectors
{
public class DamoYoloDetector
: SSDNN, IObjectDetector
{
public float BNorm(float x) => x;// ImageConverter.StandartNormalizator(x);
public float GNorm(float x) => x;// ImageConverter.StandartNormalizator(x);
public float RNorm(float x) => x;// ImageConverter.StandartNormalizator(x);
public DamoYoloDetector(string modelPath, int deviceId = 0)
: base(modelPath, deviceId)
{
}
public List<YoloPrediction> Predict(FastTensorPool inputs, float threshold)
{
var result = new List<YoloPrediction>();
var relative_koef_x = 1.0f / inputs.Width;
var relative_koef_y = 1.0f / inputs.Height;
Extract(new Dictionary<string, Tensor<float>> { { "images", inputs.Tensor } }, d =>
{
Tensor<float> scores = d["scores"];
Tensor<float> boxes = d["boxes"];
for (int tensorIndex = 0; tensorIndex < inputs.TensorSize; tensorIndex++)
{
var tensor = inputs.GetTensor(tensorIndex);
for (int box = 0; box < scores.Dimensions[1]; box++)
{
var conf = scores[tensorIndex, box, 0]; // уверенность в наличии любого объекта
if (conf > threshold)
{
// Перевод относительно входа модели в относительные координаты
var x1 = boxes[tensorIndex, box, 1];
var y1 = boxes[tensorIndex, box, 0];
var x2 = boxes[tensorIndex, box, 3];
var y2 = boxes[tensorIndex, box, 2];
var cx = (x1 + x2) / 2;
var cy = (y1 + y2) / 2;
var w = x2 - x1;
var h = y2 - y1;
// Перевод в координаты отнисительно текущего смещения
cx += tensor.StartX;
cy += tensor.StartY;
result.Add(new YoloPrediction
{
Cx = cx * relative_koef_x,
Cy = cy * relative_koef_y,
W = w * relative_koef_x,
H = h * relative_koef_y,
Class = 0,
Label = "0",
Score = conf
});
}
}
}
});
NMS.Apply(result);
return result;
}
}
}

@ -0,0 +1,142 @@
using Microsoft.ML.OnnxRuntime.Tensors;
using System;
using System.Collections.Generic;
using ZeroLevel.ML.DNN.Models;
namespace ZeroLevel.ML.DNN.Detectors
{
/// <summary>
/// DamoYolo and FastestDet models combination
/// </summary>
public class DamodetDetector
: SSDNN, IObjectDetector
{
private const float SIZE = 640;
public DamodetDetector(string modelPath, int deviceId)
: base(modelPath, deviceId)
{
}
public float RNorm(float x) => x;
public float BNorm(float x) => x;
public float GNorm(float x) => x;
#region FastestDet
private static double sigmoid(double x)
{
return 1d / (1d + Math.Exp(-x));
}
private static double tanh(double x)
{
return 2d / (1d + Math.Exp(-2d * x)) - 1d;
}
private void FastestDetPostprocess(FastTensorPool inputs, Tensor<float> output, List<YoloPrediction> result, float threshold)
{
var relative_koef_x = 1.0f / inputs.Width;
var relative_koef_y = 1.0f / inputs.Height;
var feature_map_height = output.Dimensions[2];
var feature_map_width = output.Dimensions[3];
for (int tensorIndex = 0; tensorIndex < inputs.TensorSize; tensorIndex++)
{
var tensor = inputs.GetTensor(tensorIndex);
for (int h = 0; h < feature_map_height; h++)
{
for (int w = 0; w < feature_map_width; w++)
{
var obj_score = output[tensorIndex, 0, h, w];
var cls_score = output[tensorIndex, 5, h, w];
var score = Math.Pow(obj_score, 0.6) * Math.Pow(cls_score, 0.4);
if (score > threshold)
{
var x_offset = tanh(output[tensorIndex, 1, h, w]);
var y_offset = tanh(output[tensorIndex, 2, h, w]);
var box_width = sigmoid(output[tensorIndex, 3, h, w]) * SIZE;
var box_height = sigmoid(output[tensorIndex, 4, h, w]) * SIZE;
var box_cx = ((w + x_offset) / feature_map_width) * SIZE + tensor.StartX;
var box_cy = ((h + y_offset) / feature_map_height) * SIZE + tensor.StartY;
result.Add(new YoloPrediction
{
Cx = (float)box_cx * relative_koef_x,
Cy = (float)box_cy * relative_koef_y,
W = (float)box_width * relative_koef_x,
H = (float)box_height * relative_koef_y,
Class = 0,
Score = (float)score
});
}
}
}
}
}
#endregion
#region DamoYolo
private void DamoYoloPostprocess(FastTensorPool inputs, Tensor<float> scores, Tensor<float> boxes, List<YoloPrediction> result, float threshold)
{
var relative_koef_x = 1.0f / inputs.Width;
var relative_koef_y = 1.0f / inputs.Height;
for (int tensorIndex = 0; tensorIndex < inputs.TensorSize; tensorIndex++)
{
var tensor = inputs.GetTensor(tensorIndex);
for (int box = 0; box < scores.Dimensions[1]; box++)
{
var conf = scores[tensorIndex, box, 0]; // уверенность в наличии любого объекта
if (conf > threshold)
{
// Перевод относительно входа модели в относительные координаты
var x1 = boxes[tensorIndex, box, 1];
var y1 = boxes[tensorIndex, box, 0];
var x2 = boxes[tensorIndex, box, 3];
var y2 = boxes[tensorIndex, box, 2];
var cx = (x1 + x2) / 2;
var cy = (y1 + y2) / 2;
var w = x2 - x1;
var h = y2 - y1;
// Перевод в координаты отнисительно текущего смещения
cx += tensor.StartX;
cy += tensor.StartY;
result.Add(new YoloPrediction
{
Cx = cx * relative_koef_x,
Cy = cy * relative_koef_y,
W = w * relative_koef_x,
H = h * relative_koef_y,
Class = 0,
Label = "0",
Score = conf
});
}
}
}
}
#endregion
private static float _fastest_threshold = 0.932f;
public List<YoloPrediction> Predict(FastTensorPool inputs, float threshold)
{
var result = new List<YoloPrediction>();
var relative_koef_x = 1.0f / inputs.Width;
var relative_koef_y = 1.0f / inputs.Height;
Extract(new Dictionary<string, Tensor<float>> { { "images", inputs.Tensor } }, d =>
{
Tensor<float> damo_scores = d["scores"];
Tensor<float> damo_boxes = d["boxes"];
Tensor<float> fastest_output = d["output"];
DamoYoloPostprocess(inputs, damo_scores, damo_boxes, result, threshold);
FastestDetPostprocess(inputs, fastest_output, result, _fastest_threshold);
});
NMS.Apply(result);
return result;
}
}
}

@ -0,0 +1,76 @@
extern alias CoreDrawing;
using System.Collections.Generic;
using System.IO;
using ZeroLevel.ML.DNN.Models;
namespace ZeroLevel.ML.DNN.Detectors
{
internal sealed class Detector
: IDetector
{
private readonly IImageConverter _imageConverter;
private readonly IObjectDetector _model;
private readonly float _threshold;
private readonly bool _invertAxes = false;
internal Detector(IObjectDetector model,
float threshold,
ImageToTensorConversionOptions imageConverterOptions,
bool invertAxes = false)
{
_imageConverter = new ImageConverter(imageConverterOptions);
_model = model;
_threshold = threshold;
_invertAxes = invertAxes;
}
public FastTensorPool CreateInput(string filePath)
{
FastTensorPool input;
if (_invertAxes)
input = _imageConverter.ImageToFastTensorsV2Inverted(filePath);
else
input = _imageConverter.ImageToFastTensorsV2(filePath);
input.Name = Path.GetFileNameWithoutExtension(filePath);
input.Path = filePath;
return input;
}
public FastTensorPool CreateInput(CoreDrawing.System.Drawing.Bitmap image, string filePath = null!)
{
var input = _imageConverter.ImageToFastTensorsV2(image);
if (string.IsNullOrWhiteSpace(filePath) == false)
{
input.Name = Path.GetFileNameWithoutExtension(filePath);
input.Path = filePath;
}
return input;
}
public List<YoloPrediction> Detect(string filePath)
{
var input = _imageConverter.ImageToFastTensorsV2(filePath);
input.Name = Path.GetFileNameWithoutExtension(filePath);
input.Path = filePath;
return _model.Predict(input, _threshold);
}
public List<YoloPrediction> Detect(CoreDrawing.System.Drawing.Bitmap image, string filePath = null!)
{
var input = _imageConverter.ImageToFastTensorsV2(image);
if (string.IsNullOrWhiteSpace(filePath) == false)
{
input.Name = Path.GetFileNameWithoutExtension(filePath);
input.Path = filePath;
}
return _model.Predict(input, _threshold);
}
public List<YoloPrediction> Detect(FastTensorPool input) => _model.Predict(input, _threshold);
public void Dispose()
{
_model.Dispose();
}
}
}

@ -0,0 +1,78 @@
using System;
namespace ZeroLevel.ML.DNN.Detectors
{
public static class DetectorFactory
{
public static IDetector Create(IObjectDetector model, float threshold, ImageToTensorConversionOptions imageConverterOptions)
{
return new Detector(model, threshold, imageConverterOptions);
}
public static ObjectDetectionModels GetDetectorModel(string depectorPath)
{
var detectorType = ObjectDetectionModels.YoloV7;
if (depectorPath.Contains("nanodet", StringComparison.OrdinalIgnoreCase))
{
detectorType = ObjectDetectionModels.Nanodet;
}
else if (depectorPath.Contains("yolov8", StringComparison.OrdinalIgnoreCase))
{
detectorType = ObjectDetectionModels.YoloV8;
}
else if (depectorPath.Contains("yolov6", StringComparison.OrdinalIgnoreCase))
{
detectorType = ObjectDetectionModels.YoloV6;
}
else if (depectorPath.Contains("yolov5", StringComparison.OrdinalIgnoreCase))
{
detectorType = ObjectDetectionModels.YoloV5;
}
else if (depectorPath.Contains("mmyolo", StringComparison.OrdinalIgnoreCase))
{
detectorType = ObjectDetectionModels.MMYolo;
}
else if (depectorPath.Contains("damoyolo", StringComparison.OrdinalIgnoreCase))
{
detectorType = ObjectDetectionModels.DamoYolo;
}
else if (depectorPath.Contains("edgeyolo", StringComparison.OrdinalIgnoreCase))
{
detectorType = ObjectDetectionModels.EdgeYolo;
}
else if (depectorPath.Contains("fastestdet", StringComparison.OrdinalIgnoreCase))
{
detectorType = ObjectDetectionModels.FastestDet;
}
else if (depectorPath.Contains("damodet", StringComparison.OrdinalIgnoreCase))
{
detectorType = ObjectDetectionModels.DamoDet;
}
return detectorType;
}
public static IDetector Create(ObjectDetectionModels modelType, float threshold, string modelPath, ImageToTensorConversionOptions imageConverterOptions, int deviceId = 0)
{
IObjectDetector model;
bool invertAxes = false;
switch (modelType)
{
case ObjectDetectionModels.YoloV5: { model = new Yolov5Detector(modelPath, deviceId); break; }
case ObjectDetectionModels.YoloV6: { model = new Yolov6Detector(modelPath, deviceId); break; }
case ObjectDetectionModels.YoloV7: { model = new Yolov7Detector(modelPath, deviceId); break; }
case ObjectDetectionModels.YoloV8: { model = new Yolov8Detector(modelPath, deviceId); break; }
case ObjectDetectionModels.MMYolo: { model = new MMYoloDetector(modelPath, deviceId); break; }
case ObjectDetectionModels.Nanodet: { model = new NanodetDetector(modelPath, deviceId); break; }
case ObjectDetectionModels.DamoYolo: { model = new DamoYoloDetector(modelPath, deviceId); break; }
case ObjectDetectionModels.EdgeYolo: { model = new EdgeYoloDetector(modelPath, deviceId); break; }
case ObjectDetectionModels.DamoDet: { model = new DamodetDetector(modelPath, deviceId); break; }
case ObjectDetectionModels.FastestDet: { model = new FastestDetDetector(modelPath, deviceId); invertAxes = modelPath.Contains("modified", StringComparison.OrdinalIgnoreCase) == false; break; }
default:
throw new Exception($"Model type '{modelType}' not implemented yet");
}
return new Detector(model, threshold, imageConverterOptions, invertAxes);
}
}
}

@ -0,0 +1,68 @@
using Microsoft.ML.OnnxRuntime.Tensors;
using System.Collections.Generic;
using ZeroLevel.ML.DNN.Models;
namespace ZeroLevel.ML.DNN.Detectors
{
public class EdgeYoloDetector
: SSDNN, IObjectDetector
{
public EdgeYoloDetector(string modelPath, int deviceId)
: base(modelPath, deviceId)
{
}
public float BNorm(float x) => ImageConverter.StandartNormalizator(x);
public float GNorm(float x) => ImageConverter.StandartNormalizator(x);
public float RNorm(float x) => ImageConverter.StandartNormalizator(x);
public List<YoloPrediction> Predict(FastTensorPool inputs, float threshold)
{
var result = new List<YoloPrediction>();
var relative_koef_x = 1.0f / inputs.Width;
var relative_koef_y = 1.0f / inputs.Height;
Extract(new Dictionary<string, Tensor<float>> { { "images", inputs.Tensor } }, d =>
{
Tensor<float> output = d["output"];
if (output != null)
{
for (int tensorIndex = 0; tensorIndex < inputs.TensorSize; tensorIndex++)
{
var tensor = inputs.GetTensor(tensorIndex);
for (int box = 0; box < output.Dimensions[1]; box++)
{
var conf = output[tensorIndex, box, 4]; // уверенность в наличии любого объекта
if (conf > threshold)
{
var class_score = output[tensorIndex, box, 5];
if (class_score > threshold)
{
// Перевод относительно входа модели в относительные координаты
var cx = output[tensorIndex, box, 1];
var cy = output[tensorIndex, box, 0];
var w = output[tensorIndex, box, 3];
var h = output[tensorIndex, box, 2];
// Перевод в координаты отнисительно текущего смещения
cx += tensor.StartX;
cy += tensor.StartY;
result.Add(new YoloPrediction
{
Cx = cx * relative_koef_x,
Cy = cy * relative_koef_y,
W = w * relative_koef_x,
H = h * relative_koef_y,
Class = 0,
Label = "0",
Score = conf
});
}
}
}
}
}
});
NMS.Apply(result);
return result;
}
}
}

@ -0,0 +1,87 @@
using Microsoft.ML.OnnxRuntime.Tensors;
using System;
using System.Collections.Generic;
using System.Linq;
using ZeroLevel.ML.DNN.Models;
namespace ZeroLevel.ML.DNN.Detectors
{
public class FastestDetDetector
: SSDNN, IObjectDetector
{
private const float SIZE = 640;
public FastestDetDetector(string modelPath, int deviceId)
: base(modelPath, deviceId)
{
}
public float RNorm(float x) => ImageConverter.StandartNormalizator(x);
public float BNorm(float x) => ImageConverter.StandartNormalizator(x);
public float GNorm(float x) => ImageConverter.StandartNormalizator(x);
private static double sigmoid(double x)
{
return 1d / (1d + Math.Exp(-x));
}
private static double tanh(double x)
{
return 2d / (1d + Math.Exp(-2d * x)) - 1d;
}
public List<YoloPrediction> Predict(FastTensorPool inputs, float threshold)
{
var result = new List<YoloPrediction>();
var relative_koef_x = 1.0f / inputs.Width;
var relative_koef_y = 1.0f / inputs.Height;
Extract(new Dictionary<string, Tensor<float>> { { "images", inputs.Tensor } }, d =>
{
var output = d.First().Value;
var feature_map_height = output.Dimensions[2];
var feature_map_width = output.Dimensions[3];
for (int tensorIndex = 0; tensorIndex < inputs.TensorSize; tensorIndex++)
{
var tensor = inputs.GetTensor(tensorIndex);
for (int h = 0; h < feature_map_height; h++)
{
for (int w = 0; w < feature_map_width; w++)
{
var obj_score = output[tensorIndex, 0, h, w];
var cls_score = output[tensorIndex, 5, h, w];
var score = Math.Pow(obj_score, 0.6) * Math.Pow(cls_score, 0.4);
if (score > threshold)
{
var x_offset = tanh(output[tensorIndex, 1, h, w]);
var y_offset = tanh(output[tensorIndex, 2, h, w]);
var box_width = sigmoid(output[tensorIndex, 3, h, w]) * SIZE;
var box_height = sigmoid(output[tensorIndex, 4, h, w]) * SIZE;
var box_cx = ((w + x_offset) / feature_map_width) * SIZE + tensor.StartX;
var box_cy = ((h + y_offset) / feature_map_height) * SIZE + tensor.StartY;
result.Add(new YoloPrediction
{
Cx = (float)box_cx * relative_koef_x,
Cy = (float)box_cy * relative_koef_y,
W = (float)box_width * relative_koef_x,
H = (float)box_height * relative_koef_y,
Class = 0,
Score = (float)score
});
}
}
}
}
});
NMS.Apply(result);
return result;
}
}
}

@ -0,0 +1,18 @@
extern alias CoreDrawing;
using System;
using System.Collections.Generic;
using ZeroLevel.ML.DNN.Models;
namespace ZeroLevel.ML.DNN.Detectors
{
public interface IDetector
: IDisposable
{
FastTensorPool CreateInput(string filePath);
FastTensorPool CreateInput(CoreDrawing.System.Drawing.Bitmap image, string filePath = null!);
List<YoloPrediction> Detect(FastTensorPool input);
List<YoloPrediction> Detect(string filePath);
List<YoloPrediction> Detect(CoreDrawing.System.Drawing.Bitmap image, string filePath = null!);
}
}

@ -0,0 +1,15 @@
using System;
using System.Collections.Generic;
using ZeroLevel.ML.DNN.Models;
namespace ZeroLevel.ML.DNN.Detectors
{
public interface IObjectDetector
: IDisposable
{
float RNorm(float x);
float GNorm(float x);
float BNorm(float x);
List<YoloPrediction> Predict(FastTensorPool inputs, float threshold);
}
}

@ -0,0 +1,72 @@
using Microsoft.ML.OnnxRuntime.Tensors;
using System.Collections.Generic;
using ZeroLevel.ML.DNN.Models;
namespace ZeroLevel.ML.DNN.Detectors
{
public sealed class MMYoloDetector
: SSDNN, IObjectDetector
{
public MMYoloDetector(string modelPath, int deviceId = 0)
: base(modelPath, deviceId)
{
}
public float BNorm(float x) => ImageConverter.StandartNormalizator(x);
public float GNorm(float x) => ImageConverter.StandartNormalizator(x);
public float RNorm(float x) => ImageConverter.StandartNormalizator(x);
public List<YoloPrediction> Predict(FastTensorPool inputs, float threshold)
{
var result = new List<YoloPrediction>();
var relative_koef_x = 1.0f / inputs.Width;
var relative_koef_y = 1.0f / inputs.Height;
Extract(new Dictionary<string, Tensor<float>> { { "images", inputs.Tensor } }, d =>
{
Tensor<float> boxes = d["boxes"];
Tensor<float> scores = d["scores"];
if (boxes != null && scores != null)
{
for (int tensorIndex = 0; tensorIndex < inputs.TensorSize; tensorIndex++)
{
var tensor = inputs.GetTensor(tensorIndex);
for (int box = 0; box < scores.Dimensions[1]; box++)
{
var conf = scores[tensorIndex, box]; // уверенность в наличии любого объекта
if (conf > threshold)
{
// Перевод относительно входа модели в относительные координаты
var tlx = boxes[tensorIndex, box, 1];
var tly = boxes[tensorIndex, box, 0];
var brx = boxes[tensorIndex, box, 3];
var bry = boxes[tensorIndex, box, 2];
var cx = (tlx + brx) * 0.5f;
var cy = (tly + bry) * 0.5f;
var w = brx - tlx;
var h = bry - tly;
// Перевод в координаты отнисительно текущего смещения
cx += tensor.StartX;
cy += tensor.StartY;
result.Add(new YoloPrediction
{
Cx = cx * relative_koef_x,
Cy = cy * relative_koef_y,
W = w * relative_koef_x,
H = h * relative_koef_y,
Class = 0,
Label = "0",
Score = conf
});
}
}
}
}
});
NMS.Apply(result);
return result;
}
}
}

@ -4,10 +4,9 @@ using System.Linq;
using System.Text; using System.Text;
using System.Threading.Tasks; using System.Threading.Tasks;
namespace ZeroLevel.HNSW.PHNSW namespace ZeroLevel.ML.DNN.Detectors
{ {
public class UWLevel<TPayload> internal class NanodetDamoyoloComposeDetector
:IPHNSWLevel<TPayload>
{ {
} }
} }

@ -0,0 +1,421 @@
using Microsoft.ML.OnnxRuntime.Tensors;
using ZeroLevel.ML.DNN.Models;
using System.Runtime.CompilerServices;
using System;
using System.Collections.Generic;
namespace ZeroLevel.ML.DNN.Detectors
{
public class NanodetDetector
: SSDNN, IObjectDetector
{
private static float[] mean_vals = new float[3] { 103.53f, 116.28f, 123.675f };
private static float[] norm_vals = new float[3] { 0.017429f, 0.017507f, 0.017125f };
private static int[] strides = new int[4] { 8, 16, 32, 64 };
private static int reg_max = 7;
private static int input_s = 640;
public float RNorm(float x) => (x + mean_vals[0]) * norm_vals[0];
public float GNorm(float x) => (x + mean_vals[1]) * norm_vals[1];
public float BNorm(float x) => (x + mean_vals[2]) * norm_vals[2];
public NanodetDetector(string modelPath, int deviceId = 0)
: base(modelPath, deviceId)
{
}
#region Fastexp dict
static double[] ExpAdjustment = new double[256] {
1.040389835,
1.039159306,
1.037945888,
1.036749401,
1.035569671,
1.034406528,
1.033259801,
1.032129324,
1.031014933,
1.029916467,
1.028833767,
1.027766676,
1.02671504,
1.025678708,
1.02465753,
1.023651359,
1.022660049,
1.021683458,
1.020721446,
1.019773873,
1.018840604,
1.017921503,
1.017016438,
1.016125279,
1.015247897,
1.014384165,
1.013533958,
1.012697153,
1.011873629,
1.011063266,
1.010265947,
1.009481555,
1.008709975,
1.007951096,
1.007204805,
1.006470993,
1.005749552,
1.005040376,
1.004343358,
1.003658397,
1.002985389,
1.002324233,
1.001674831,
1.001037085,
1.000410897,
0.999796173,
0.999192819,
0.998600742,
0.998019851,
0.997450055,
0.996891266,
0.996343396,
0.995806358,
0.995280068,
0.99476444,
0.994259393,
0.993764844,
0.993280711,
0.992806917,
0.992343381,
0.991890026,
0.991446776,
0.991013555,
0.990590289,
0.990176903,
0.989773325,
0.989379484,
0.988995309,
0.988620729,
0.988255677,
0.987900083,
0.987553882,
0.987217006,
0.98688939,
0.98657097,
0.986261682,
0.985961463,
0.985670251,
0.985387985,
0.985114604,
0.984850048,
0.984594259,
0.984347178,
0.984108748,
0.983878911,
0.983657613,
0.983444797,
0.983240409,
0.983044394,
0.982856701,
0.982677276,
0.982506066,
0.982343022,
0.982188091,
0.982041225,
0.981902373,
0.981771487,
0.981648519,
0.981533421,
0.981426146,
0.981326648,
0.98123488,
0.981150798,
0.981074356,
0.981005511,
0.980944219,
0.980890437,
0.980844122,
0.980805232,
0.980773726,
0.980749562,
0.9807327,
0.9807231,
0.980720722,
0.980725528,
0.980737478,
0.980756534,
0.98078266,
0.980815817,
0.980855968,
0.980903079,
0.980955475,
0.981017942,
0.981085714,
0.981160303,
0.981241675,
0.981329796,
0.981424634,
0.981526154,
0.981634325,
0.981749114,
0.981870489,
0.981998419,
0.982132873,
0.98227382,
0.982421229,
0.982575072,
0.982735318,
0.982901937,
0.983074902,
0.983254183,
0.983439752,
0.983631582,
0.983829644,
0.984033912,
0.984244358,
0.984460956,
0.984683681,
0.984912505,
0.985147403,
0.985388349,
0.98563532,
0.98588829,
0.986147234,
0.986412128,
0.986682949,
0.986959673,
0.987242277,
0.987530737,
0.987825031,
0.988125136,
0.98843103,
0.988742691,
0.989060098,
0.989383229,
0.989712063,
0.990046579,
0.990386756,
0.990732574,
0.991084012,
0.991441052,
0.991803672,
0.992171854,
0.992545578,
0.992924825,
0.993309578,
0.993699816,
0.994095522,
0.994496677,
0.994903265,
0.995315266,
0.995732665,
0.996155442,
0.996583582,
0.997017068,
0.997455883,
0.99790001,
0.998349434,
0.998804138,
0.999264107,
0.999729325,
1.000199776,
1.000675446,
1.001156319,
1.001642381,
1.002133617,
1.002630011,
1.003131551,
1.003638222,
1.00415001,
1.004666901,
1.005188881,
1.005715938,
1.006248058,
1.006785227,
1.007327434,
1.007874665,
1.008426907,
1.008984149,
1.009546377,
1.010113581,
1.010685747,
1.011262865,
1.011844922,
1.012431907,
1.013023808,
1.013620615,
1.014222317,
1.014828902,
1.01544036,
1.016056681,
1.016677853,
1.017303866,
1.017934711,
1.018570378,
1.019210855,
1.019856135,
1.020506206,
1.02116106,
1.021820687,
1.022485078,
1.023154224,
1.023828116,
1.024506745,
1.025190103,
1.02587818,
1.026570969,
1.027268461,
1.027970647,
1.02867752,
1.029389072,
1.030114973,
1.030826088,
1.03155163,
1.032281819,
1.03301665,
1.033756114,
1.034500204,
1.035248913,
1.036002235,
1.036760162,
1.037522688,
1.038289806,
1.039061509,
1.039837792,
1.040618648
};
#endregion
static double FastExp(double x)
{
var tmp = (long)(1512775 * x + 1072632447);
int index = (int)(tmp >> 12) & 0xFF;
return BitConverter.Int64BitsToDouble(tmp << 32) * ExpAdjustment[index];
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static float Sigmoid(double value)
{
float k = (float)Math.Exp(value);
return k / (1.0f + k);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static unsafe void Softmax(float[] input, int start, float[] dst)
{
var sum = 0f;
for (var i = 0; i < 8; ++i)
{
var e = (float)Math.Exp(input[start + i]);
dst[i] = e;
sum += e;
}
var sumInv = 1f / sum;
for (var i = 0; i < 8; ++i)
dst[i] *= sumInv;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private float[] GetCurrentRow(Tensor<float> output, int b, int s)
{
float[] row = new float[33];
for (int i = 0; i < row.Length; i++)
{
row[i] = output[b, s, i];
}
return row;
}
private List<YoloPrediction> Decoder(int batchIndex, Tensor<float> output, float threshold)
{
var result = new List<YoloPrediction>();
var proceedLength = 0;
for (int i = 0; i < strides.Length; i++)
{
var stride = strides[i];
int feature_size = (int)Math.Ceiling((float)input_s / stride);
for (int y = 0; y < feature_size; y++)
{
for (int x = 0; x < feature_size; x++)
{
var startIndex = proceedLength + y * feature_size + x;
var row = GetCurrentRow(output, batchIndex, startIndex);
var score = row[0];
if (score > threshold)
{
float ct_y = (x + 0.5f) * stride;
float ct_x = (y + 0.5f) * stride;
float[] dis_pred = new float[4];
for (int di = 0; di < 4; di++)
{
float dis = 0;
float[] dis_after_sm = new float[reg_max + 1];
Softmax(row, 1 + (reg_max + 1) * di, dis_after_sm);
for (int j = 0; j < reg_max + 1; j++)
{
dis += j * dis_after_sm[j];
}
dis *= stride;
dis_pred[di] = dis;
}
float xmin = Math.Max(ct_x - dis_pred[0], .0f);
float ymin = Math.Max(ct_y - dis_pred[1], .0f);
float xmax = Math.Min(ct_x + dis_pred[2], (float)(input_s));
float ymax = Math.Min(ct_y + dis_pred[3], (float)(input_s));
var cx = (xmin + xmax) * 0.5f;
var cy = (ymin + ymax) * 0.5f;
var h = (xmax - xmin);
var w = (ymax - ymin);
result.Add(new YoloPrediction
{
Cx = cx,
Cy = cy,
W = w,
H = h,
Class = 0,
Label = "0",
Score = score
});
}
}
}
proceedLength += feature_size * feature_size;
}
return result;
}
public List<YoloPrediction> Predict(FastTensorPool inputs, float threshold)
{
var result = new List<YoloPrediction>();
float normalization_koef_x = 1.0f / inputs.Width;
float normalization_koef_y = 1.0f / inputs.Height;
Extract(new Dictionary<string, Tensor<float>> { { "data", inputs.Tensor } }, d =>
{
Tensor<float> output = d["output"];
if (output != null)
{
for (int tensorIndex = 0; tensorIndex < inputs.TensorSize; tensorIndex++)
{
var tensor = inputs.GetTensor(tensorIndex);
foreach (var box in Decoder(tensorIndex, output, threshold))
{
box.Cx += tensor.StartX;
box.Cy += tensor.StartY;
box.Cx *= normalization_koef_x;
box.Cy *= normalization_koef_y;
box.W *= normalization_koef_x;
box.H *= normalization_koef_y;
result.Add(box);
}
}
}
});
NMS.Apply(result);
return result;
}
}
}

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save

Powered by TurnKey Linux.