diff --git a/AutoLoader/AppContainer/AppContainer.csproj b/AutoLoader/AppContainer/AppContainer.csproj
deleted file mode 100644
index 132c02c..0000000
--- a/AutoLoader/AppContainer/AppContainer.csproj
+++ /dev/null
@@ -1,9 +0,0 @@
-
-
-
- net6.0
- enable
- enable
-
-
-
diff --git a/AutoLoader/AppContainer/Class1.cs b/AutoLoader/AppContainer/Class1.cs
deleted file mode 100644
index 9b0c6de..0000000
--- a/AutoLoader/AppContainer/Class1.cs
+++ /dev/null
@@ -1,51 +0,0 @@
-namespace AppContainer
-{
-
- public class Endpoint
- {
- public string VersionEndpoint { get; set; }
- public string AppEndpoint { get; set; }
- public string PingEndpoint { get; set; }
- public string Token { get; set; }
- }
- public class HostSettings
- {
- public IEnumerable Endpoints { get; set; }
- public string EntryClass { get; set; }
- public string EntryMethod { get; set; }
- }
-
- public class Host
- {
- private readonly HostSettings _settings;
- public Host(HostSettings settings)
- {
- if (settings == null) throw new ArgumentNullException(nameof(settings));
- _settings = settings;
- }
-
- public async Task HasUpdate()
- {
- if (_settings.Endpoints != null)
- {
- foreach (var ep in _settings.Endpoints)
- {
- try
- {
- using (var client = new HttpClient())
- {
- client.DefaultRequestHeaders.Add("token", ep.Token);
- var response = await client.GetAsync(ep.PingEndpoint);
- response.EnsureSuccessStatusCode();
-
- }
- }
- catch (Exception ex)
- {
- }
- }
- }
- return false;
- }
- }
-}
\ No newline at end of file
diff --git a/ConnectionTest/Client/Client.csproj b/Tests/ConnectionTest/Client/Client.csproj
similarity index 79%
rename from ConnectionTest/Client/Client.csproj
rename to Tests/ConnectionTest/Client/Client.csproj
index fc81762..f18c94b 100644
--- a/ConnectionTest/Client/Client.csproj
+++ b/Tests/ConnectionTest/Client/Client.csproj
@@ -8,7 +8,7 @@
-
+
diff --git a/ConnectionTest/Client/Program.cs b/Tests/ConnectionTest/Client/Program.cs
similarity index 100%
rename from ConnectionTest/Client/Program.cs
rename to Tests/ConnectionTest/Client/Program.cs
diff --git a/ConnectionTest/Server/Program.cs b/Tests/ConnectionTest/Server/Program.cs
similarity index 100%
rename from ConnectionTest/Server/Program.cs
rename to Tests/ConnectionTest/Server/Program.cs
diff --git a/ConnectionTest/Server/Properties/PublishProfiles/FolderProfile.pubxml b/Tests/ConnectionTest/Server/Properties/PublishProfiles/FolderProfile.pubxml
similarity index 100%
rename from ConnectionTest/Server/Properties/PublishProfiles/FolderProfile.pubxml
rename to Tests/ConnectionTest/Server/Properties/PublishProfiles/FolderProfile.pubxml
diff --git a/ConnectionTest/Server/Server.csproj b/Tests/ConnectionTest/Server/Server.csproj
similarity index 77%
rename from ConnectionTest/Server/Server.csproj
rename to Tests/ConnectionTest/Server/Server.csproj
index 992ba2f..9c71c66 100644
--- a/ConnectionTest/Server/Server.csproj
+++ b/Tests/ConnectionTest/Server/Server.csproj
@@ -7,7 +7,7 @@
-
+
diff --git a/FileTransferTest/FileTransferClient/App.xaml b/Tests/FileTransferTest/FileTransferClient/App.xaml
similarity index 100%
rename from FileTransferTest/FileTransferClient/App.xaml
rename to Tests/FileTransferTest/FileTransferClient/App.xaml
diff --git a/FileTransferTest/FileTransferClient/App.xaml.cs b/Tests/FileTransferTest/FileTransferClient/App.xaml.cs
similarity index 100%
rename from FileTransferTest/FileTransferClient/App.xaml.cs
rename to Tests/FileTransferTest/FileTransferClient/App.xaml.cs
diff --git a/FileTransferTest/FileTransferClient/AssemblyInfo.cs b/Tests/FileTransferTest/FileTransferClient/AssemblyInfo.cs
similarity index 100%
rename from FileTransferTest/FileTransferClient/AssemblyInfo.cs
rename to Tests/FileTransferTest/FileTransferClient/AssemblyInfo.cs
diff --git a/FileTransferTest/FileTransferClient/FileTransferClient.csproj b/Tests/FileTransferTest/FileTransferClient/FileTransferClient.csproj
similarity index 88%
rename from FileTransferTest/FileTransferClient/FileTransferClient.csproj
rename to Tests/FileTransferTest/FileTransferClient/FileTransferClient.csproj
index 4e76b0c..51f959c 100644
--- a/FileTransferTest/FileTransferClient/FileTransferClient.csproj
+++ b/Tests/FileTransferTest/FileTransferClient/FileTransferClient.csproj
@@ -9,7 +9,7 @@
-
+
diff --git a/FileTransferTest/FileTransferClient/MainWindow.xaml b/Tests/FileTransferTest/FileTransferClient/MainWindow.xaml
similarity index 100%
rename from FileTransferTest/FileTransferClient/MainWindow.xaml
rename to Tests/FileTransferTest/FileTransferClient/MainWindow.xaml
diff --git a/FileTransferTest/FileTransferClient/MainWindow.xaml.cs b/Tests/FileTransferTest/FileTransferClient/MainWindow.xaml.cs
similarity index 100%
rename from FileTransferTest/FileTransferClient/MainWindow.xaml.cs
rename to Tests/FileTransferTest/FileTransferClient/MainWindow.xaml.cs
diff --git a/FileTransferTest/FileTransferServer/App.xaml b/Tests/FileTransferTest/FileTransferServer/App.xaml
similarity index 100%
rename from FileTransferTest/FileTransferServer/App.xaml
rename to Tests/FileTransferTest/FileTransferServer/App.xaml
diff --git a/FileTransferTest/FileTransferServer/App.xaml.cs b/Tests/FileTransferTest/FileTransferServer/App.xaml.cs
similarity index 100%
rename from FileTransferTest/FileTransferServer/App.xaml.cs
rename to Tests/FileTransferTest/FileTransferServer/App.xaml.cs
diff --git a/FileTransferTest/FileTransferServer/AssemblyInfo.cs b/Tests/FileTransferTest/FileTransferServer/AssemblyInfo.cs
similarity index 100%
rename from FileTransferTest/FileTransferServer/AssemblyInfo.cs
rename to Tests/FileTransferTest/FileTransferServer/AssemblyInfo.cs
diff --git a/FileTransferTest/FileTransferServer/FileTransferServer.csproj b/Tests/FileTransferTest/FileTransferServer/FileTransferServer.csproj
similarity index 88%
rename from FileTransferTest/FileTransferServer/FileTransferServer.csproj
rename to Tests/FileTransferTest/FileTransferServer/FileTransferServer.csproj
index 4e76b0c..51f959c 100644
--- a/FileTransferTest/FileTransferServer/FileTransferServer.csproj
+++ b/Tests/FileTransferTest/FileTransferServer/FileTransferServer.csproj
@@ -9,7 +9,7 @@
-
+
diff --git a/FileTransferTest/FileTransferServer/MainWindow.xaml b/Tests/FileTransferTest/FileTransferServer/MainWindow.xaml
similarity index 100%
rename from FileTransferTest/FileTransferServer/MainWindow.xaml
rename to Tests/FileTransferTest/FileTransferServer/MainWindow.xaml
diff --git a/FileTransferTest/FileTransferServer/MainWindow.xaml.cs b/Tests/FileTransferTest/FileTransferServer/MainWindow.xaml.cs
similarity index 93%
rename from FileTransferTest/FileTransferServer/MainWindow.xaml.cs
rename to Tests/FileTransferTest/FileTransferServer/MainWindow.xaml.cs
index 9f67960..99cff4e 100644
--- a/FileTransferTest/FileTransferServer/MainWindow.xaml.cs
+++ b/Tests/FileTransferTest/FileTransferServer/MainWindow.xaml.cs
@@ -11,15 +11,15 @@ namespace FileTransferServer
///
public partial class MainWindow : Window
{
+ private FileReceiver _server = null!;
+ private readonly IExchange _exchange;
+
public MainWindow()
{
InitializeComponent();
_exchange = Bootstrap.CreateExchange();
}
- private FileReceiver _server;
- private IExchange _exchange;
-
private void Button_Click(object sender, RoutedEventArgs e)
{
int port = -1;
diff --git a/TestHNSW/HNSWDemo/HNSWDemo.csproj b/Tests/HNSWDemo/HNSWDemo.csproj
similarity index 100%
rename from TestHNSW/HNSWDemo/HNSWDemo.csproj
rename to Tests/HNSWDemo/HNSWDemo.csproj
diff --git a/TestHNSW/HNSWDemo/Model/Gender.cs b/Tests/HNSWDemo/Model/Gender.cs
similarity index 100%
rename from TestHNSW/HNSWDemo/Model/Gender.cs
rename to Tests/HNSWDemo/Model/Gender.cs
diff --git a/TestHNSW/HNSWDemo/Model/Person.cs b/Tests/HNSWDemo/Model/Person.cs
similarity index 100%
rename from TestHNSW/HNSWDemo/Model/Person.cs
rename to Tests/HNSWDemo/Model/Person.cs
diff --git a/TestHNSW/HNSWDemo/Program.cs b/Tests/HNSWDemo/Program.cs
similarity index 84%
rename from TestHNSW/HNSWDemo/Program.cs
rename to Tests/HNSWDemo/Program.cs
index 74ffc2f..6ede5ff 100644
--- a/TestHNSW/HNSWDemo/Program.cs
+++ b/Tests/HNSWDemo/Program.cs
@@ -9,9 +9,9 @@ namespace HNSWDemo
{
static void Main(string[] args)
{
- new QuantizatorTest().Run();
- // new AutoClusteringMNISTTest().Run();
- // new AccuracityTest().Run();
+ //new QuantizatorTest().Run();
+ //new AutoClusteringMNISTTest().Run();
+ new AccuracityTest().Run();
Console.WriteLine("Completed");
Console.ReadKey();
}
diff --git a/TestHNSW/HNSWDemo/Properties/launchSettings.json b/Tests/HNSWDemo/Properties/launchSettings.json
similarity index 100%
rename from TestHNSW/HNSWDemo/Properties/launchSettings.json
rename to Tests/HNSWDemo/Properties/launchSettings.json
diff --git a/TestHNSW/HNSWDemo/Tests/AccuracityTest.cs b/Tests/HNSWDemo/Tests/AccuracityTest.cs
similarity index 97%
rename from TestHNSW/HNSWDemo/Tests/AccuracityTest.cs
rename to Tests/HNSWDemo/Tests/AccuracityTest.cs
index cbd4fc5..e04ec64 100644
--- a/TestHNSW/HNSWDemo/Tests/AccuracityTest.cs
+++ b/Tests/HNSWDemo/Tests/AccuracityTest.cs
@@ -27,7 +27,7 @@ namespace HNSWDemo.Tests
var sw = new Stopwatch();
var test = new VectorsDirectCompare(samples, Metrics.CosineDistance);
- var world = new SmallWorld(NSWOptions.Create(8, 12, 100, 100, Metrics.CosineDistance));
+ var world = new SmallWorld(NSWOptions.Create(8, 16, 100, 100, (a, b) => (float)Metrics.DotProductDistance(a, b)));
sw.Start();
var ids = world.AddItems(samples.ToArray());
diff --git a/TestHNSW/HNSWDemo/Tests/AutoClusteringMNISTTest.cs b/Tests/HNSWDemo/Tests/AutoClusteringMNISTTest.cs
similarity index 99%
rename from TestHNSW/HNSWDemo/Tests/AutoClusteringMNISTTest.cs
rename to Tests/HNSWDemo/Tests/AutoClusteringMNISTTest.cs
index faa8c91..f8b470a 100644
--- a/TestHNSW/HNSWDemo/Tests/AutoClusteringMNISTTest.cs
+++ b/Tests/HNSWDemo/Tests/AutoClusteringMNISTTest.cs
@@ -83,7 +83,7 @@ namespace HNSWDemo.Tests
var exists = links.Where(n => n > 0).ToArray();
var histogram = new Histogram(HistogramMode.LOG, links);
- DrawHistogram(histogram, @"D:\Mnist\histogram.jpg");
+ DrawHistogram(histogram, @"D:\histogram.jpg");
var clusters = AutomaticGraphClusterer.DetectClusters(world);
Console.WriteLine($"Found {clusters.Count} clusters");
diff --git a/TestHNSW/HNSWDemo/Tests/AutoClusteringTest.cs b/Tests/HNSWDemo/Tests/AutoClusteringTest.cs
similarity index 100%
rename from TestHNSW/HNSWDemo/Tests/AutoClusteringTest.cs
rename to Tests/HNSWDemo/Tests/AutoClusteringTest.cs
diff --git a/TestHNSW/HNSWDemo/Tests/FilterTest.cs b/Tests/HNSWDemo/Tests/FilterTest.cs
similarity index 100%
rename from TestHNSW/HNSWDemo/Tests/FilterTest.cs
rename to Tests/HNSWDemo/Tests/FilterTest.cs
diff --git a/TestHNSW/HNSWDemo/Tests/HistogramTest.cs b/Tests/HNSWDemo/Tests/HistogramTest.cs
similarity index 100%
rename from TestHNSW/HNSWDemo/Tests/HistogramTest.cs
rename to Tests/HNSWDemo/Tests/HistogramTest.cs
diff --git a/TestHNSW/HNSWDemo/Tests/ITest.cs b/Tests/HNSWDemo/Tests/ITest.cs
similarity index 100%
rename from TestHNSW/HNSWDemo/Tests/ITest.cs
rename to Tests/HNSWDemo/Tests/ITest.cs
diff --git a/TestHNSW/HNSWDemo/Tests/InsertTimeExplosionTest.cs b/Tests/HNSWDemo/Tests/InsertTimeExplosionTest.cs
similarity index 100%
rename from TestHNSW/HNSWDemo/Tests/InsertTimeExplosionTest.cs
rename to Tests/HNSWDemo/Tests/InsertTimeExplosionTest.cs
diff --git a/TestHNSW/HNSWDemo/Tests/LALTest.cs b/Tests/HNSWDemo/Tests/LALTest.cs
similarity index 100%
rename from TestHNSW/HNSWDemo/Tests/LALTest.cs
rename to Tests/HNSWDemo/Tests/LALTest.cs
diff --git a/TestHNSW/HNSWDemo/Tests/QuantizatorTest.cs b/Tests/HNSWDemo/Tests/QuantizatorTest.cs
similarity index 100%
rename from TestHNSW/HNSWDemo/Tests/QuantizatorTest.cs
rename to Tests/HNSWDemo/Tests/QuantizatorTest.cs
diff --git a/TestHNSW/HNSWDemo/Tests/QuantizeAccuracityTest.cs b/Tests/HNSWDemo/Tests/QuantizeAccuracityTest.cs
similarity index 100%
rename from TestHNSW/HNSWDemo/Tests/QuantizeAccuracityTest.cs
rename to Tests/HNSWDemo/Tests/QuantizeAccuracityTest.cs
diff --git a/TestHNSW/HNSWDemo/Tests/QuantizeHistogramTest.cs b/Tests/HNSWDemo/Tests/QuantizeHistogramTest.cs
similarity index 100%
rename from TestHNSW/HNSWDemo/Tests/QuantizeHistogramTest.cs
rename to Tests/HNSWDemo/Tests/QuantizeHistogramTest.cs
diff --git a/TestHNSW/HNSWDemo/Tests/QuantizeInsertTimeExplosionTest.cs b/Tests/HNSWDemo/Tests/QuantizeInsertTimeExplosionTest.cs
similarity index 100%
rename from TestHNSW/HNSWDemo/Tests/QuantizeInsertTimeExplosionTest.cs
rename to Tests/HNSWDemo/Tests/QuantizeInsertTimeExplosionTest.cs
diff --git a/TestHNSW/HNSWDemo/Tests/SaveRestoreTest.cs b/Tests/HNSWDemo/Tests/SaveRestoreTest.cs
similarity index 100%
rename from TestHNSW/HNSWDemo/Tests/SaveRestoreTest.cs
rename to Tests/HNSWDemo/Tests/SaveRestoreTest.cs
diff --git a/TestHNSW/HNSWDemo/Utils/QLVectorsDirectCompare.cs b/Tests/HNSWDemo/Utils/QLVectorsDirectCompare.cs
similarity index 100%
rename from TestHNSW/HNSWDemo/Utils/QLVectorsDirectCompare.cs
rename to Tests/HNSWDemo/Utils/QLVectorsDirectCompare.cs
diff --git a/TestHNSW/HNSWDemo/Utils/QVectorsDirectCompare.cs b/Tests/HNSWDemo/Utils/QVectorsDirectCompare.cs
similarity index 100%
rename from TestHNSW/HNSWDemo/Utils/QVectorsDirectCompare.cs
rename to Tests/HNSWDemo/Utils/QVectorsDirectCompare.cs
diff --git a/TestHNSW/HNSWDemo/Utils/VectorsDirectCompare.cs b/Tests/HNSWDemo/Utils/VectorsDirectCompare.cs
similarity index 100%
rename from TestHNSW/HNSWDemo/Utils/VectorsDirectCompare.cs
rename to Tests/HNSWDemo/Utils/VectorsDirectCompare.cs
diff --git a/TestHNSW/HNSWDemo/t10k-images.idx3-ubyte b/Tests/HNSWDemo/t10k-images.idx3-ubyte
similarity index 100%
rename from TestHNSW/HNSWDemo/t10k-images.idx3-ubyte
rename to Tests/HNSWDemo/t10k-images.idx3-ubyte
diff --git a/PartitionFileStorageTest/Compressor.cs b/Tests/PartitionFileStorageTest/Compressor.cs
similarity index 94%
rename from PartitionFileStorageTest/Compressor.cs
rename to Tests/PartitionFileStorageTest/Compressor.cs
index cbdf771..24ed517 100644
--- a/PartitionFileStorageTest/Compressor.cs
+++ b/Tests/PartitionFileStorageTest/Compressor.cs
@@ -2,9 +2,6 @@
{
public static class Compressor
{
- ///
- /// Упаковка набора чисел в массив байтов
- ///
public static byte[] GetEncodedBytes(IEnumerable list, ref ulong last)
{
byte[] segmentsBytes;
@@ -38,7 +35,6 @@
}
}
-
public static void Write7BitEncodedULong(this MemoryStream writer, ulong value)
{
var first = true;
diff --git a/PartitionFileStorageTest/MsisdnHelper.cs b/Tests/PartitionFileStorageTest/MsisdnHelper.cs
similarity index 72%
rename from PartitionFileStorageTest/MsisdnHelper.cs
rename to Tests/PartitionFileStorageTest/MsisdnHelper.cs
index b705f70..f4a8562 100644
--- a/PartitionFileStorageTest/MsisdnHelper.cs
+++ b/Tests/PartitionFileStorageTest/MsisdnHelper.cs
@@ -17,14 +17,9 @@
{
public static MsisdnParts SplitParts(this ulong msisdn)
{
- //расчитываем только на номера российской нумерации ("7" и 10 цифр)
- //это числа от 70_000_000_000 до 79_999_999_999
-
if (msisdn < 70_000_000_000 || msisdn > 79_999_999_999) throw new ArgumentException(nameof(msisdn));
-
var firstDigit = (int)((msisdn / 1_000_000_000L) % 10);
var otherDigits = (int)(msisdn % 1_000_000_000L);
-
return new MsisdnParts(firstDigit, otherDigits);
}
@@ -45,42 +40,25 @@
}
}
- ///
- /// возвращаются только номера российской нумерации ("7" и 10 цифр) в виде long
- ///
- ///
- ///
- ///
public static bool TryParseMsisdn(this string source, out ulong msisdn)
{
var line = source.Trim();
var length = line.Length;
-
msisdn = 0;
-
- //допустимы форматы номеров "+71234567890", "71234567890", "1234567890"
if (length < 10 || length > 12) return false;
var start = 0;
- if (length == 12) //"+71234567890"
+ if (length == 12)
{
if (line[0] != '+' || line[1] != '7') return false;
start = 2;
}
- if (length == 11) //"71234567890" и "81234567890"
+ if (length == 11)
{
if (line[0] != '7') return false;
start = 1;
}
- /*
- else if (length == 10) //"1234567890"
- {
- start = 0;
- }
- */
-
ulong number = 7;
-
for (var i = start; i < length; i++)
{
var c = line[i];
@@ -93,7 +71,6 @@
return false;
}
}
-
msisdn = number;
return true;
}
diff --git a/PartitionFileStorageTest/PartitionFileStorageTest.csproj b/Tests/PartitionFileStorageTest/PartitionFileStorageTest.csproj
similarity index 80%
rename from PartitionFileStorageTest/PartitionFileStorageTest.csproj
rename to Tests/PartitionFileStorageTest/PartitionFileStorageTest.csproj
index b95bc06..a75da7d 100644
--- a/PartitionFileStorageTest/PartitionFileStorageTest.csproj
+++ b/Tests/PartitionFileStorageTest/PartitionFileStorageTest.csproj
@@ -8,11 +8,11 @@
-
+
-
+
\ No newline at end of file
diff --git a/PartitionFileStorageTest/Program.cs b/Tests/PartitionFileStorageTest/Program.cs
similarity index 100%
rename from PartitionFileStorageTest/Program.cs
rename to Tests/PartitionFileStorageTest/Program.cs
diff --git a/Tests/Qdrant.Test/Program.cs b/Tests/Qdrant.Test/Program.cs
new file mode 100644
index 0000000..9b8878a
--- /dev/null
+++ b/Tests/Qdrant.Test/Program.cs
@@ -0,0 +1,41 @@
+using Grpc.Net.Client;
+using static Qdrant.Collections;
+
+namespace Qdrant.Test
+{
+ // QDRANT VERSION 1.15.1
+ internal class Program
+ {
+ const string COLLECTION_NAME = "my_test_collection";
+ static void Main(string[] args)
+ {
+ var address = @"http://localhost:6334";
+ var channel = GrpcChannel.ForAddress(address);
+ var collections = new CollectionsClient(channel);
+ var response = collections.Create(new CreateCollection
+ {
+ CollectionName = COLLECTION_NAME,
+ VectorsConfig = new VectorsConfig
+ {
+ Params = new VectorParams
+ {
+ Distance = Distance.Dot,
+ Size = 32,
+ HnswConfig = new HnswConfigDiff
+ {
+ OnDisk = false
+ }
+ }
+ }
+ });
+
+ Console.WriteLine($"CREATED: {response.Result}");
+
+ var d_response = collections.Delete(new DeleteCollection
+ {
+ CollectionName = COLLECTION_NAME
+ });
+ Console.WriteLine($"DELETED: {d_response.Result}");
+ }
+ }
+}
\ No newline at end of file
diff --git a/Tests/Qdrant.Test/Qdrant.Test.csproj b/Tests/Qdrant.Test/Qdrant.Test.csproj
new file mode 100644
index 0000000..2ec3ae0
--- /dev/null
+++ b/Tests/Qdrant.Test/Qdrant.Test.csproj
@@ -0,0 +1,21 @@
+
+
+
+ Exe
+ net6.0
+ enable
+ enable
+
+
+
+
+
+
+
+
+
+ ..\..\ZeroLevel.Qdrant.GrpcClient\bin\Release\net6.0\ZeroLevel.Qdrant.GrpcClient.dll
+
+
+
+
diff --git a/TestApp/AppSettings.cs b/Tests/TestApp/AppSettings.cs
similarity index 100%
rename from TestApp/AppSettings.cs
rename to Tests/TestApp/AppSettings.cs
diff --git a/TestApp/Program.cs b/Tests/TestApp/Program.cs
similarity index 100%
rename from TestApp/Program.cs
rename to Tests/TestApp/Program.cs
diff --git a/TestApp/Properties/PublishProfiles/FolderProfile.pubxml b/Tests/TestApp/Properties/PublishProfiles/FolderProfile.pubxml
similarity index 100%
rename from TestApp/Properties/PublishProfiles/FolderProfile.pubxml
rename to Tests/TestApp/Properties/PublishProfiles/FolderProfile.pubxml
diff --git a/TestApp/TestApp.csproj b/Tests/TestApp/TestApp.csproj
similarity index 87%
rename from TestApp/TestApp.csproj
rename to Tests/TestApp/TestApp.csproj
index e7c793c..375d4ee 100644
--- a/TestApp/TestApp.csproj
+++ b/Tests/TestApp/TestApp.csproj
@@ -8,7 +8,7 @@
-
+
diff --git a/TestApp/config.ini b/Tests/TestApp/config.ini
similarity index 100%
rename from TestApp/config.ini
rename to Tests/TestApp/config.ini
diff --git a/TestPipeLine/Consumer/Consumer.csproj b/Tests/TestPipeLine/Consumer/Consumer.csproj
similarity index 81%
rename from TestPipeLine/Consumer/Consumer.csproj
rename to Tests/TestPipeLine/Consumer/Consumer.csproj
index 94808a4..bfff132 100644
--- a/TestPipeLine/Consumer/Consumer.csproj
+++ b/Tests/TestPipeLine/Consumer/Consumer.csproj
@@ -9,7 +9,7 @@
-
+
diff --git a/TestPipeLine/Consumer/ConsumerService.cs b/Tests/TestPipeLine/Consumer/ConsumerService.cs
similarity index 100%
rename from TestPipeLine/Consumer/ConsumerService.cs
rename to Tests/TestPipeLine/Consumer/ConsumerService.cs
diff --git a/TestPipeLine/Consumer/Program.cs b/Tests/TestPipeLine/Consumer/Program.cs
similarity index 100%
rename from TestPipeLine/Consumer/Program.cs
rename to Tests/TestPipeLine/Consumer/Program.cs
diff --git a/TestPipeLine/Processor/Processor.csproj b/Tests/TestPipeLine/Processor/Processor.csproj
similarity index 81%
rename from TestPipeLine/Processor/Processor.csproj
rename to Tests/TestPipeLine/Processor/Processor.csproj
index 94808a4..bfff132 100644
--- a/TestPipeLine/Processor/Processor.csproj
+++ b/Tests/TestPipeLine/Processor/Processor.csproj
@@ -9,7 +9,7 @@
-
+
diff --git a/TestPipeLine/Processor/ProcessorService.cs b/Tests/TestPipeLine/Processor/ProcessorService.cs
similarity index 100%
rename from TestPipeLine/Processor/ProcessorService.cs
rename to Tests/TestPipeLine/Processor/ProcessorService.cs
diff --git a/TestPipeLine/Processor/Program.cs b/Tests/TestPipeLine/Processor/Program.cs
similarity index 100%
rename from TestPipeLine/Processor/Program.cs
rename to Tests/TestPipeLine/Processor/Program.cs
diff --git a/TestPipeLine/Source/Program.cs b/Tests/TestPipeLine/Source/Program.cs
similarity index 100%
rename from TestPipeLine/Source/Program.cs
rename to Tests/TestPipeLine/Source/Program.cs
diff --git a/TestPipeLine/Source/Source.csproj b/Tests/TestPipeLine/Source/Source.csproj
similarity index 81%
rename from TestPipeLine/Source/Source.csproj
rename to Tests/TestPipeLine/Source/Source.csproj
index 94808a4..bfff132 100644
--- a/TestPipeLine/Source/Source.csproj
+++ b/Tests/TestPipeLine/Source/Source.csproj
@@ -9,7 +9,7 @@
-
+
diff --git a/TestPipeLine/Source/SourceService.cs b/Tests/TestPipeLine/Source/SourceService.cs
similarity index 100%
rename from TestPipeLine/Source/SourceService.cs
rename to Tests/TestPipeLine/Source/SourceService.cs
diff --git a/TestPipeLine/Watcher/Program.cs b/Tests/TestPipeLine/Watcher/Program.cs
similarity index 100%
rename from TestPipeLine/Watcher/Program.cs
rename to Tests/TestPipeLine/Watcher/Program.cs
diff --git a/TestPipeLine/Watcher/Watcher.csproj b/Tests/TestPipeLine/Watcher/Watcher.csproj
similarity index 81%
rename from TestPipeLine/Watcher/Watcher.csproj
rename to Tests/TestPipeLine/Watcher/Watcher.csproj
index 94808a4..bfff132 100644
--- a/TestPipeLine/Watcher/Watcher.csproj
+++ b/Tests/TestPipeLine/Watcher/Watcher.csproj
@@ -9,7 +9,7 @@
-
+
diff --git a/TestPipeLine/Watcher/WatcherService.cs b/Tests/TestPipeLine/Watcher/WatcherService.cs
similarity index 100%
rename from TestPipeLine/Watcher/WatcherService.cs
rename to Tests/TestPipeLine/Watcher/WatcherService.cs
diff --git a/ZeroLevel.UnitTests/ArrayToolsTest.cs b/Tests/ZeroLevel.UnitTests/ArrayToolsTest.cs
similarity index 100%
rename from ZeroLevel.UnitTests/ArrayToolsTest.cs
rename to Tests/ZeroLevel.UnitTests/ArrayToolsTest.cs
diff --git a/ZeroLevel.UnitTests/AsyncSerializationTest.cs b/Tests/ZeroLevel.UnitTests/AsyncSerializationTest.cs
similarity index 100%
rename from ZeroLevel.UnitTests/AsyncSerializationTest.cs
rename to Tests/ZeroLevel.UnitTests/AsyncSerializationTest.cs
diff --git a/ZeroLevel.UnitTests/BloomFilterTest.cs b/Tests/ZeroLevel.UnitTests/BloomFilterTest.cs
similarity index 100%
rename from ZeroLevel.UnitTests/BloomFilterTest.cs
rename to Tests/ZeroLevel.UnitTests/BloomFilterTest.cs
diff --git a/ZeroLevel.UnitTests/CollectionsTests.cs b/Tests/ZeroLevel.UnitTests/CollectionsTests.cs
similarity index 100%
rename from ZeroLevel.UnitTests/CollectionsTests.cs
rename to Tests/ZeroLevel.UnitTests/CollectionsTests.cs
diff --git a/ZeroLevel.UnitTests/ConfigurationTest.cs b/Tests/ZeroLevel.UnitTests/ConfigurationTest.cs
similarity index 100%
rename from ZeroLevel.UnitTests/ConfigurationTest.cs
rename to Tests/ZeroLevel.UnitTests/ConfigurationTest.cs
diff --git a/ZeroLevel.UnitTests/DSAUnitTest.cs b/Tests/ZeroLevel.UnitTests/DSAUnitTest.cs
similarity index 100%
rename from ZeroLevel.UnitTests/DSAUnitTest.cs
rename to Tests/ZeroLevel.UnitTests/DSAUnitTest.cs
diff --git a/ZeroLevel.UnitTests/DependencyInjectionTests.cs b/Tests/ZeroLevel.UnitTests/DependencyInjectionTests.cs
similarity index 100%
rename from ZeroLevel.UnitTests/DependencyInjectionTests.cs
rename to Tests/ZeroLevel.UnitTests/DependencyInjectionTests.cs
diff --git a/ZeroLevel.UnitTests/DumpTests.cs b/Tests/ZeroLevel.UnitTests/DumpTests.cs
similarity index 100%
rename from ZeroLevel.UnitTests/DumpTests.cs
rename to Tests/ZeroLevel.UnitTests/DumpTests.cs
diff --git a/ZeroLevel.UnitTests/EncryptionTests.cs b/Tests/ZeroLevel.UnitTests/EncryptionTests.cs
similarity index 100%
rename from ZeroLevel.UnitTests/EncryptionTests.cs
rename to Tests/ZeroLevel.UnitTests/EncryptionTests.cs
diff --git a/ZeroLevel.UnitTests/ExchangeTests.cs b/Tests/ZeroLevel.UnitTests/ExchangeTests.cs
similarity index 100%
rename from ZeroLevel.UnitTests/ExchangeTests.cs
rename to Tests/ZeroLevel.UnitTests/ExchangeTests.cs
diff --git a/ZeroLevel.UnitTests/FSUtilsTest.cs b/Tests/ZeroLevel.UnitTests/FSUtilsTest.cs
similarity index 100%
rename from ZeroLevel.UnitTests/FSUtilsTest.cs
rename to Tests/ZeroLevel.UnitTests/FSUtilsTest.cs
diff --git a/ZeroLevel.UnitTests/InvokingTest.cs b/Tests/ZeroLevel.UnitTests/InvokingTest.cs
similarity index 100%
rename from ZeroLevel.UnitTests/InvokingTest.cs
rename to Tests/ZeroLevel.UnitTests/InvokingTest.cs
diff --git a/ZeroLevel.UnitTests/LCSTest.cs b/Tests/ZeroLevel.UnitTests/LCSTest.cs
similarity index 100%
rename from ZeroLevel.UnitTests/LCSTest.cs
rename to Tests/ZeroLevel.UnitTests/LCSTest.cs
diff --git a/ZeroLevel.UnitTests/MappingTest.cs b/Tests/ZeroLevel.UnitTests/MappingTest.cs
similarity index 100%
rename from ZeroLevel.UnitTests/MappingTest.cs
rename to Tests/ZeroLevel.UnitTests/MappingTest.cs
diff --git a/ZeroLevel.UnitTests/Models/BaseClass.cs b/Tests/ZeroLevel.UnitTests/Models/BaseClass.cs
similarity index 100%
rename from ZeroLevel.UnitTests/Models/BaseClass.cs
rename to Tests/ZeroLevel.UnitTests/Models/BaseClass.cs
diff --git a/ZeroLevel.UnitTests/Models/BaseFakeClass.cs b/Tests/ZeroLevel.UnitTests/Models/BaseFakeClass.cs
similarity index 100%
rename from ZeroLevel.UnitTests/Models/BaseFakeClass.cs
rename to Tests/ZeroLevel.UnitTests/Models/BaseFakeClass.cs
diff --git a/ZeroLevel.UnitTests/Models/ChildClass.cs b/Tests/ZeroLevel.UnitTests/Models/ChildClass.cs
similarity index 100%
rename from ZeroLevel.UnitTests/Models/ChildClass.cs
rename to Tests/ZeroLevel.UnitTests/Models/ChildClass.cs
diff --git a/ZeroLevel.UnitTests/Models/CompositeInstanceFactory.cs b/Tests/ZeroLevel.UnitTests/Models/CompositeInstanceFactory.cs
similarity index 100%
rename from ZeroLevel.UnitTests/Models/CompositeInstanceFactory.cs
rename to Tests/ZeroLevel.UnitTests/Models/CompositeInstanceFactory.cs
diff --git a/ZeroLevel.UnitTests/Models/FakeClass.cs b/Tests/ZeroLevel.UnitTests/Models/FakeClass.cs
similarity index 100%
rename from ZeroLevel.UnitTests/Models/FakeClass.cs
rename to Tests/ZeroLevel.UnitTests/Models/FakeClass.cs
diff --git a/ZeroLevel.UnitTests/Models/PocoFields.cs b/Tests/ZeroLevel.UnitTests/Models/PocoFields.cs
similarity index 100%
rename from ZeroLevel.UnitTests/Models/PocoFields.cs
rename to Tests/ZeroLevel.UnitTests/Models/PocoFields.cs
diff --git a/ZeroLevel.UnitTests/Models/PocoProperties.cs b/Tests/ZeroLevel.UnitTests/Models/PocoProperties.cs
similarity index 100%
rename from ZeroLevel.UnitTests/Models/PocoProperties.cs
rename to Tests/ZeroLevel.UnitTests/Models/PocoProperties.cs
diff --git a/ZeroLevel.UnitTests/Models/Specifications/IsFlagSpecification.cs b/Tests/ZeroLevel.UnitTests/Models/Specifications/IsFlagSpecification.cs
similarity index 100%
rename from ZeroLevel.UnitTests/Models/Specifications/IsFlagSpecification.cs
rename to Tests/ZeroLevel.UnitTests/Models/Specifications/IsFlagSpecification.cs
diff --git a/ZeroLevel.UnitTests/Models/Specifications/LongNumberSpecification.cs b/Tests/ZeroLevel.UnitTests/Models/Specifications/LongNumberSpecification.cs
similarity index 100%
rename from ZeroLevel.UnitTests/Models/Specifications/LongNumberSpecification.cs
rename to Tests/ZeroLevel.UnitTests/Models/Specifications/LongNumberSpecification.cs
diff --git a/ZeroLevel.UnitTests/Models/Specifications/NumberSpecification.cs b/Tests/ZeroLevel.UnitTests/Models/Specifications/NumberSpecification.cs
similarity index 100%
rename from ZeroLevel.UnitTests/Models/Specifications/NumberSpecification.cs
rename to Tests/ZeroLevel.UnitTests/Models/Specifications/NumberSpecification.cs
diff --git a/ZeroLevel.UnitTests/Models/Specifications/RealSpecification.cs b/Tests/ZeroLevel.UnitTests/Models/Specifications/RealSpecification.cs
similarity index 100%
rename from ZeroLevel.UnitTests/Models/Specifications/RealSpecification.cs
rename to Tests/ZeroLevel.UnitTests/Models/Specifications/RealSpecification.cs
diff --git a/ZeroLevel.UnitTests/Models/Specifications/SummarySpecification.cs b/Tests/ZeroLevel.UnitTests/Models/Specifications/SummarySpecification.cs
similarity index 100%
rename from ZeroLevel.UnitTests/Models/Specifications/SummarySpecification.cs
rename to Tests/ZeroLevel.UnitTests/Models/Specifications/SummarySpecification.cs
diff --git a/ZeroLevel.UnitTests/Models/Specifications/TitleSpecification.cs b/Tests/ZeroLevel.UnitTests/Models/Specifications/TitleSpecification.cs
similarity index 100%
rename from ZeroLevel.UnitTests/Models/Specifications/TitleSpecification.cs
rename to Tests/ZeroLevel.UnitTests/Models/Specifications/TitleSpecification.cs
diff --git a/ZeroLevel.UnitTests/Models/StaticFakeClass.cs b/Tests/ZeroLevel.UnitTests/Models/StaticFakeClass.cs
similarity index 100%
rename from ZeroLevel.UnitTests/Models/StaticFakeClass.cs
rename to Tests/ZeroLevel.UnitTests/Models/StaticFakeClass.cs
diff --git a/ZeroLevel.UnitTests/Models/TestDTO.cs b/Tests/ZeroLevel.UnitTests/Models/TestDTO.cs
similarity index 100%
rename from ZeroLevel.UnitTests/Models/TestDTO.cs
rename to Tests/ZeroLevel.UnitTests/Models/TestDTO.cs
diff --git a/ZeroLevel.UnitTests/Models/TestSerializableDTO.cs b/Tests/ZeroLevel.UnitTests/Models/TestSerializableDTO.cs
similarity index 100%
rename from ZeroLevel.UnitTests/Models/TestSerializableDTO.cs
rename to Tests/ZeroLevel.UnitTests/Models/TestSerializableDTO.cs
diff --git a/ZeroLevel.UnitTests/NetworkTest.cs b/Tests/ZeroLevel.UnitTests/NetworkTest.cs
similarity index 100%
rename from ZeroLevel.UnitTests/NetworkTest.cs
rename to Tests/ZeroLevel.UnitTests/NetworkTest.cs
diff --git a/ZeroLevel.UnitTests/PartitionStorageTests.cs b/Tests/ZeroLevel.UnitTests/PartitionStorageTests.cs
similarity index 100%
rename from ZeroLevel.UnitTests/PartitionStorageTests.cs
rename to Tests/ZeroLevel.UnitTests/PartitionStorageTests.cs
diff --git a/ZeroLevel.UnitTests/PredicateBuilderTests.cs b/Tests/ZeroLevel.UnitTests/PredicateBuilderTests.cs
similarity index 100%
rename from ZeroLevel.UnitTests/PredicateBuilderTests.cs
rename to Tests/ZeroLevel.UnitTests/PredicateBuilderTests.cs
diff --git a/ZeroLevel.UnitTests/QueriesTests.cs b/Tests/ZeroLevel.UnitTests/QueriesTests.cs
similarity index 100%
rename from ZeroLevel.UnitTests/QueriesTests.cs
rename to Tests/ZeroLevel.UnitTests/QueriesTests.cs
diff --git a/ZeroLevel.UnitTests/SemanticTests.cs b/Tests/ZeroLevel.UnitTests/SemanticTests.cs
similarity index 100%
rename from ZeroLevel.UnitTests/SemanticTests.cs
rename to Tests/ZeroLevel.UnitTests/SemanticTests.cs
diff --git a/ZeroLevel.UnitTests/SerializationTests.cs b/Tests/ZeroLevel.UnitTests/SerializationTests.cs
similarity index 100%
rename from ZeroLevel.UnitTests/SerializationTests.cs
rename to Tests/ZeroLevel.UnitTests/SerializationTests.cs
diff --git a/ZeroLevel.UnitTests/SpecificationPatternTest.cs b/Tests/ZeroLevel.UnitTests/SpecificationPatternTest.cs
similarity index 100%
rename from ZeroLevel.UnitTests/SpecificationPatternTest.cs
rename to Tests/ZeroLevel.UnitTests/SpecificationPatternTest.cs
diff --git a/ZeroLevel.UnitTests/SuffixAutomataTests.cs b/Tests/ZeroLevel.UnitTests/SuffixAutomataTests.cs
similarity index 100%
rename from ZeroLevel.UnitTests/SuffixAutomataTests.cs
rename to Tests/ZeroLevel.UnitTests/SuffixAutomataTests.cs
diff --git a/ZeroLevel.UnitTests/TokenEncryptorTest.cs b/Tests/ZeroLevel.UnitTests/TokenEncryptorTest.cs
similarity index 100%
rename from ZeroLevel.UnitTests/TokenEncryptorTest.cs
rename to Tests/ZeroLevel.UnitTests/TokenEncryptorTest.cs
diff --git a/ZeroLevel.UnitTests/TrieTests.cs b/Tests/ZeroLevel.UnitTests/TrieTests.cs
similarity index 100%
rename from ZeroLevel.UnitTests/TrieTests.cs
rename to Tests/ZeroLevel.UnitTests/TrieTests.cs
diff --git a/ZeroLevel.UnitTests/ZeroLevel.UnitTests.csproj b/Tests/ZeroLevel.UnitTests/ZeroLevel.UnitTests.csproj
similarity index 79%
rename from ZeroLevel.UnitTests/ZeroLevel.UnitTests.csproj
rename to Tests/ZeroLevel.UnitTests/ZeroLevel.UnitTests.csproj
index 990ceec..aa67297 100644
--- a/ZeroLevel.UnitTests/ZeroLevel.UnitTests.csproj
+++ b/Tests/ZeroLevel.UnitTests/ZeroLevel.UnitTests.csproj
@@ -9,16 +9,16 @@
-
-
-
+
+
+
all
runtime; build; native; contentfiles; analyzers; buildtransitive
-
+
diff --git a/ZeroLevel.NN/Services/Clusterization/FeatureClusterBulder.cs b/ZeroLevel.NN/Services/Clusterization/FeatureClusterBulder.cs
index 800c304..78c642d 100644
--- a/ZeroLevel.NN/Services/Clusterization/FeatureClusterBulder.cs
+++ b/ZeroLevel.NN/Services/Clusterization/FeatureClusterBulder.cs
@@ -2,17 +2,17 @@
{
public class FeatureClusterBulder
{
- public FeatureClusterCollection Build(IEnumerable faces, Func vectorExtractor, Func similarityFunction, float threshold, float clusterThreshold = 0.5f)
+ public FeatureClusterCollection Build(IEnumerable items, Func vectorExtractor, Func similarityFunction, float threshold, float clusterThreshold = 0.5f)
{
var collection = new FeatureClusterCollection();
- foreach (var face in faces)
+ foreach (var item in items)
{
bool isAdded = false;
foreach (var cluster in collection.Clusters)
{
- if (cluster.Value.IsNeighbor(face, similarityFunction, threshold, clusterThreshold))
+ if (cluster.Value.IsNeighbor(item, similarityFunction, threshold, clusterThreshold))
{
- cluster.Value.Append(face);
+ cluster.Value.Append(item);
isAdded = true;
break;
}
@@ -20,7 +20,7 @@
if (false == isAdded)
{
var cluster = new FeatureCluster(vectorExtractor);
- cluster.Append(face);
+ cluster.Append(item);
collection.Add(cluster);
}
}
diff --git a/ZeroLevel.NN/Services/ImagePreprocessor.cs b/ZeroLevel.NN/Services/ImagePreprocessor.cs
index 5823ce3..3aafc0e 100644
--- a/ZeroLevel.NN/Services/ImagePreprocessor.cs
+++ b/ZeroLevel.NN/Services/ImagePreprocessor.cs
@@ -1,5 +1,4 @@
-using Aurigma.GraphicsMill;
-using Microsoft.ML.OnnxRuntime.Tensors;
+using Microsoft.ML.OnnxRuntime.Tensors;
using ZeroLevel.NN.Models;
namespace ZeroLevel.NN
diff --git a/ZeroLevel.NN/ZeroLevel.NN.csproj b/ZeroLevel.NN/ZeroLevel.NN.csproj
index 6ef8a8a..396c6ba 100644
--- a/ZeroLevel.NN/ZeroLevel.NN.csproj
+++ b/ZeroLevel.NN/ZeroLevel.NN.csproj
@@ -35,10 +35,10 @@
-
+
-
-
+
+
diff --git a/ZeroLevel.Qdrant.GrpcClient/Protos/collections.proto b/ZeroLevel.Qdrant.GrpcClient/Protos/collections.proto
new file mode 100644
index 0000000..794f3ee
--- /dev/null
+++ b/ZeroLevel.Qdrant.GrpcClient/Protos/collections.proto
@@ -0,0 +1,444 @@
+syntax = "proto3";
+package qdrant;
+
+message VectorParams {
+ uint64 size = 1; // Size of the vectors
+ Distance distance = 2; // Distance function used for comparing vectors
+ optional HnswConfigDiff hnsw_config = 3; // Configuration of vector HNSW graph. If omitted - the collection configuration will be used
+ optional QuantizationConfig quantization_config = 4; // Configuration of vector quantization config. If omitted - the collection configuration will be used
+ optional bool on_disk = 5; // If true - serve vectors from disk. If set to false, the vectors will be loaded in RAM.
+}
+
+message VectorParamsDiff {
+ optional HnswConfigDiff hnsw_config = 1; // Update params for HNSW index. If empty object - it will be unset
+ optional QuantizationConfigDiff quantization_config = 2; // Update quantization params. If none - it is left unchanged.
+ optional bool on_disk = 3; // If true - serve vectors from disk. If set to false, the vectors will be loaded in RAM.
+}
+
+message VectorParamsMap {
+ map map = 1;
+}
+
+message VectorParamsDiffMap {
+ map map = 1;
+}
+
+message VectorsConfig {
+ oneof config {
+ VectorParams params = 1;
+ VectorParamsMap params_map = 2;
+ }
+}
+
+message VectorsConfigDiff {
+ oneof config {
+ VectorParamsDiff params = 1;
+ VectorParamsDiffMap params_map = 2;
+ }
+}
+
+message GetCollectionInfoRequest {
+ string collection_name = 1; // Name of the collection
+}
+
+message ListCollectionsRequest {
+}
+
+message CollectionDescription {
+ string name = 1; // Name of the collection
+}
+
+message GetCollectionInfoResponse {
+ CollectionInfo result = 1;
+ double time = 2; // Time spent to process
+}
+
+message ListCollectionsResponse {
+ repeated CollectionDescription collections = 1;
+ double time = 2; // Time spent to process
+}
+
+enum Distance {
+ UnknownDistance = 0;
+ Cosine = 1;
+ Euclid = 2;
+ Dot = 3;
+}
+
+enum CollectionStatus {
+ UnknownCollectionStatus = 0;
+ Green = 1; // All segments are ready
+ Yellow = 2; // Optimization in process
+ Red = 3; // Something went wrong
+}
+
+enum PayloadSchemaType {
+ UnknownType = 0;
+ Keyword = 1;
+ Integer = 2;
+ Float = 3;
+ Geo = 4;
+ Text = 5;
+ Bool = 6;
+}
+
+enum QuantizationType {
+ UnknownQuantization = 0;
+ Int8 = 1;
+}
+
+enum CompressionRatio {
+ x4 = 0;
+ x8 = 1;
+ x16 = 2;
+ x32 = 3;
+ x64 = 4;
+}
+
+message OptimizerStatus {
+ bool ok = 1;
+ string error = 2;
+}
+
+message HnswConfigDiff {
+ /*
+ Number of edges per node in the index graph. Larger the value - more accurate the search, more space required.
+ */
+ optional uint64 m = 1;
+ /*
+ Number of neighbours to consider during the index building. Larger the value - more accurate the search, more time required to build the index.
+ */
+ optional uint64 ef_construct = 2;
+ /*
+ Minimal size (in KiloBytes) of vectors for additional payload-based indexing.
+ If the payload chunk is smaller than `full_scan_threshold` additional indexing won't be used -
+ in this case full-scan search should be preferred by query planner and additional indexing is not required.
+ Note: 1 Kb = 1 vector of size 256
+ */
+ optional uint64 full_scan_threshold = 3;
+ /*
+ Number of parallel threads used for background index building. If 0 - auto selection.
+ */
+ optional uint64 max_indexing_threads = 4;
+ /*
+ Store HNSW index on disk. If set to false, the index will be stored in RAM.
+ */
+ optional bool on_disk = 5;
+ /*
+ Number of additional payload-aware links per node in the index graph. If not set - regular M parameter will be used.
+ */
+ optional uint64 payload_m = 6;
+}
+
+message WalConfigDiff {
+ optional uint64 wal_capacity_mb = 1; // Size of a single WAL block file
+ optional uint64 wal_segments_ahead = 2; // Number of segments to create in advance
+}
+
+message OptimizersConfigDiff {
+ /*
+ The minimal fraction of deleted vectors in a segment, required to perform segment optimization
+ */
+ optional double deleted_threshold = 1;
+ /*
+ The minimal number of vectors in a segment, required to perform segment optimization
+ */
+ optional uint64 vacuum_min_vector_number = 2;
+ /*
+ Target amount of segments the optimizer will try to keep.
+ Real amount of segments may vary depending on multiple parameters:
+
+ - Amount of stored points.
+ - Current write RPS.
+
+ It is recommended to select the default number of segments as a factor of the number of search threads,
+ so that each segment would be handled evenly by one of the threads.
+ */
+ optional uint64 default_segment_number = 3;
+ /*
+ Do not create segments larger this size (in kilobytes).
+ Large segments might require disproportionately long indexation times,
+ therefore it makes sense to limit the size of segments.
+
+ If indexing speed is more important - make this parameter lower.
+ If search speed is more important - make this parameter higher.
+ Note: 1Kb = 1 vector of size 256
+ If not set, will be automatically selected considering the number of available CPUs.
+ */
+ optional uint64 max_segment_size = 4;
+ /*
+ Maximum size (in kilobytes) of vectors to store in-memory per segment.
+ Segments larger than this threshold will be stored as read-only memmaped file.
+
+ Memmap storage is disabled by default, to enable it, set this threshold to a reasonable value.
+
+ To disable memmap storage, set this to `0`.
+
+ Note: 1Kb = 1 vector of size 256
+ */
+ optional uint64 memmap_threshold = 5;
+ /*
+ Maximum size (in kilobytes) of vectors allowed for plain index, exceeding this threshold will enable vector indexing
+
+ Default value is 20,000, based on .
+
+ To disable vector indexing, set to `0`.
+
+ Note: 1kB = 1 vector of size 256.
+ */
+ optional uint64 indexing_threshold = 6;
+ /*
+ Interval between forced flushes.
+ */
+ optional uint64 flush_interval_sec = 7;
+ /*
+ Max number of threads, which can be used for optimization. If 0 - `NUM_CPU - 1` will be used
+ */
+ optional uint64 max_optimization_threads = 8;
+}
+
+message ScalarQuantization {
+ QuantizationType type = 1; // Type of quantization
+ optional float quantile = 2; // Number of bits to use for quantization
+ optional bool always_ram = 3; // If true - quantized vectors always will be stored in RAM, ignoring the config of main storage
+}
+
+message ProductQuantization {
+ CompressionRatio compression = 1; // Compression ratio
+ optional bool always_ram = 2; // If true - quantized vectors always will be stored in RAM, ignoring the config of main storage
+}
+
+message BinaryQuantization {
+ optional bool always_ram = 1; // If true - quantized vectors always will be stored in RAM, ignoring the config of main storage
+}
+
+message QuantizationConfig {
+ oneof quantization {
+ ScalarQuantization scalar = 1;
+ ProductQuantization product = 2;
+ BinaryQuantization binary = 3;
+ }
+}
+
+message Disabled {
+
+}
+
+message QuantizationConfigDiff {
+ oneof quantization {
+ ScalarQuantization scalar = 1;
+ ProductQuantization product = 2;
+ Disabled disabled = 3;
+ BinaryQuantization binary = 4;
+ }
+}
+
+message CreateCollection {
+ string collection_name = 1; // Name of the collection
+ reserved 2; // Deprecated
+ reserved 3; // Deprecated
+ optional HnswConfigDiff hnsw_config = 4; // Configuration of vector index
+ optional WalConfigDiff wal_config = 5; // Configuration of the Write-Ahead-Log
+ optional OptimizersConfigDiff optimizers_config = 6; // Configuration of the optimizers
+ optional uint32 shard_number = 7; // Number of shards in the collection, default is 1 for standalone, otherwise equal to the number of nodes. Minimum is 1
+ optional bool on_disk_payload = 8; // If true - point's payload will not be stored in memory
+ optional uint64 timeout = 9; // Wait timeout for operation commit in seconds, if not specified - default value will be supplied
+ optional VectorsConfig vectors_config = 10; // Configuration for vectors
+ optional uint32 replication_factor = 11; // Number of replicas of each shard that network tries to maintain, default = 1
+ optional uint32 write_consistency_factor = 12; // How many replicas should apply the operation for us to consider it successful, default = 1
+ optional string init_from_collection = 13; // Specify name of the other collection to copy data from
+ optional QuantizationConfig quantization_config = 14; // Quantization configuration of vector
+}
+
+message UpdateCollection {
+ string collection_name = 1; // Name of the collection
+ optional OptimizersConfigDiff optimizers_config = 2; // New configuration parameters for the collection. This operation is blocking, it will only proceed once all current optimizations are complete
+ optional uint64 timeout = 3; // Wait timeout for operation commit in seconds if blocking, if not specified - default value will be supplied
+ optional CollectionParamsDiff params = 4; // New configuration parameters for the collection
+ optional HnswConfigDiff hnsw_config = 5; // New HNSW parameters for the collection index
+ optional VectorsConfigDiff vectors_config = 6; // New vector parameters
+ optional QuantizationConfigDiff quantization_config = 7; // Quantization configuration of vector
+}
+
+message DeleteCollection {
+ string collection_name = 1; // Name of the collection
+ optional uint64 timeout = 2; // Wait timeout for operation commit in seconds, if not specified - default value will be supplied
+}
+
+message CollectionOperationResponse {
+ bool result = 1; // if operation made changes
+ double time = 2; // Time spent to process
+}
+
+message CollectionParams {
+ reserved 1; // Deprecated
+ reserved 2; // Deprecated
+ uint32 shard_number = 3; // Number of shards in collection
+ bool on_disk_payload = 4; // If true - point's payload will not be stored in memory
+ optional VectorsConfig vectors_config = 5; // Configuration for vectors
+ optional uint32 replication_factor = 6; // Number of replicas of each shard that network tries to maintain
+ optional uint32 write_consistency_factor = 7; // How many replicas should apply the operation for us to consider it successful
+}
+
+message CollectionParamsDiff {
+ optional uint32 replication_factor = 1; // Number of replicas of each shard that network tries to maintain
+ optional uint32 write_consistency_factor = 2; // How many replicas should apply the operation for us to consider it successful
+ optional bool on_disk_payload = 3; // If true - point's payload will not be stored in memory
+}
+
+message CollectionConfig {
+ CollectionParams params = 1; // Collection parameters
+ HnswConfigDiff hnsw_config = 2; // Configuration of vector index
+ OptimizersConfigDiff optimizer_config = 3; // Configuration of the optimizers
+ WalConfigDiff wal_config = 4; // Configuration of the Write-Ahead-Log
+ optional QuantizationConfig quantization_config = 5; // Configuration of the vector quantization
+}
+
+enum TokenizerType {
+ Unknown = 0;
+ Prefix = 1;
+ Whitespace = 2;
+ Word = 3;
+ Multilingual = 4;
+}
+
+message TextIndexParams {
+ TokenizerType tokenizer = 1; // Tokenizer type
+ optional bool lowercase = 2; // If true - all tokens will be lowercase
+ optional uint64 min_token_len = 3; // Minimal token length
+ optional uint64 max_token_len = 4; // Maximal token length
+}
+
+message PayloadIndexParams {
+ oneof index_params {
+ TextIndexParams text_index_params = 1; // Parameters for text index
+ }
+}
+
+message PayloadSchemaInfo {
+ PayloadSchemaType data_type = 1; // Field data type
+ optional PayloadIndexParams params = 2; // Field index parameters
+ optional uint64 points = 3; // Number of points indexed within this field indexed
+}
+
+message CollectionInfo {
+ CollectionStatus status = 1; // operating condition of the collection
+ OptimizerStatus optimizer_status = 2; // status of collection optimizers
+ uint64 vectors_count = 3; // number of vectors in the collection
+ uint64 segments_count = 4; // Number of independent segments
+ reserved 5; // Deprecated
+ reserved 6; // Deprecated
+ CollectionConfig config = 7; // Configuration
+ map payload_schema = 8; // Collection data types
+ uint64 points_count = 9; // number of points in the collection
+ optional uint64 indexed_vectors_count = 10; // number of indexed vectors in the collection.
+}
+
+message ChangeAliases {
+ repeated AliasOperations actions = 1; // List of actions
+ optional uint64 timeout = 2; // Wait timeout for operation commit in seconds, if not specified - default value will be supplied
+}
+
+message AliasOperations {
+ oneof action {
+ CreateAlias create_alias = 1;
+ RenameAlias rename_alias = 2;
+ DeleteAlias delete_alias = 3;
+ }
+}
+
+message CreateAlias {
+ string collection_name = 1; // Name of the collection
+ string alias_name = 2; // New name of the alias
+}
+
+message RenameAlias {
+ string old_alias_name = 1; // Name of the alias to rename
+ string new_alias_name = 2; // Name of the alias
+}
+
+message DeleteAlias {
+ string alias_name = 1; // Name of the alias
+}
+
+message ListAliasesRequest {
+}
+
+message ListCollectionAliasesRequest {
+ string collection_name = 1; // Name of the collection
+}
+
+message AliasDescription {
+ string alias_name = 1; // Name of the alias
+ string collection_name = 2; // Name of the collection
+}
+
+message ListAliasesResponse {
+ repeated AliasDescription aliases = 1;
+ double time = 2; // Time spent to process
+}
+
+message CollectionClusterInfoRequest {
+ string collection_name = 1; // Name of the collection
+}
+
+enum ReplicaState {
+ Active = 0; // Active and sound
+ Dead = 1; // Failed for some reason
+ Partial = 2; // The shard is partially loaded and is currently receiving data from other shards
+ Initializing = 3; // Collection is being created
+ Listener = 4; // A shard which receives data, but is not used for search; Useful for backup shards
+}
+
+message LocalShardInfo {
+ uint32 shard_id = 1; // Local shard id
+ uint64 points_count = 2; // Number of points in the shard
+ ReplicaState state = 3; // Is replica active
+}
+
+message RemoteShardInfo {
+ uint32 shard_id = 1; // Local shard id
+ uint64 peer_id = 2; // Remote peer id
+ ReplicaState state = 3; // Is replica active
+}
+
+message ShardTransferInfo {
+ uint32 shard_id = 1; // Local shard id
+ uint64 from = 2;
+ uint64 to = 3;
+ bool sync = 4; // If `true` transfer is a synchronization of a replicas; If `false` transfer is a moving of a shard from one peer to another
+}
+
+message CollectionClusterInfoResponse {
+ uint64 peer_id = 1; // ID of this peer
+ uint64 shard_count = 2; // Total number of shards
+ repeated LocalShardInfo local_shards = 3; // Local shards
+ repeated RemoteShardInfo remote_shards = 4; // Remote shards
+ repeated ShardTransferInfo shard_transfers = 5; // Shard transfers
+}
+
+message MoveShard {
+ uint32 shard_id = 1; // Local shard id
+ uint64 from_peer_id = 2;
+ uint64 to_peer_id = 3;
+}
+
+message Replica {
+ uint32 shard_id = 1;
+ uint64 peer_id = 2;
+}
+
+message UpdateCollectionClusterSetupRequest {
+ string collection_name = 1; // Name of the collection
+ oneof operation {
+ MoveShard move_shard = 2;
+ MoveShard replicate_shard = 3;
+ MoveShard abort_transfer = 4;
+ Replica drop_replica = 5;
+ }
+ optional uint64 timeout = 6; // Wait timeout for operation commit in seconds, if not specified - default value will be supplied
+}
+
+message UpdateCollectionClusterSetupResponse {
+ bool result = 1;
+}
diff --git a/ZeroLevel.Qdrant.GrpcClient/Protos/collections_internal_service.proto b/ZeroLevel.Qdrant.GrpcClient/Protos/collections_internal_service.proto
new file mode 100644
index 0000000..465a511
--- /dev/null
+++ b/ZeroLevel.Qdrant.GrpcClient/Protos/collections_internal_service.proto
@@ -0,0 +1,26 @@
+syntax = "proto3";
+
+import "Protos/collections.proto";
+
+package qdrant;
+
+service CollectionsInternal {
+ /*
+ Get collection info
+ */
+ rpc Get (GetCollectionInfoRequestInternal) returns (GetCollectionInfoResponse) {}
+ /*
+ Initiate shard transfer
+ */
+ rpc Initiate (InitiateShardTransferRequest) returns (CollectionOperationResponse) {}
+}
+
+message GetCollectionInfoRequestInternal {
+ GetCollectionInfoRequest get_collectionInfoRequest = 1;
+ uint32 shard_id = 2;
+}
+
+message InitiateShardTransferRequest {
+ string collection_name = 1; // Name of the collection
+ uint32 shard_id = 2; // Id of the temporary shard
+}
\ No newline at end of file
diff --git a/ZeroLevel.Qdrant.GrpcClient/Protos/collections_service.proto b/ZeroLevel.Qdrant.GrpcClient/Protos/collections_service.proto
new file mode 100644
index 0000000..2e33674
--- /dev/null
+++ b/ZeroLevel.Qdrant.GrpcClient/Protos/collections_service.proto
@@ -0,0 +1,48 @@
+syntax = "proto3";
+
+import "Protos/collections.proto";
+
+package qdrant;
+
+service Collections {
+ /*
+ Get detailed information about specified existing collection
+ */
+ rpc Get (GetCollectionInfoRequest) returns (GetCollectionInfoResponse) {}
+ /*
+ Get list name of all existing collections
+ */
+ rpc List (ListCollectionsRequest) returns (ListCollectionsResponse) {}
+ /*
+ Create new collection with given parameters
+ */
+ rpc Create (CreateCollection) returns (CollectionOperationResponse) {}
+ /*
+ Update parameters of the existing collection
+ */
+ rpc Update (UpdateCollection) returns (CollectionOperationResponse) {}
+ /*
+ Drop collection and all associated data
+ */
+ rpc Delete (DeleteCollection) returns (CollectionOperationResponse) {}
+ /*
+ Update Aliases of the existing collection
+ */
+ rpc UpdateAliases (ChangeAliases) returns (CollectionOperationResponse) {}
+ /*
+ Get list of all aliases for a collection
+ */
+ rpc ListCollectionAliases (ListCollectionAliasesRequest) returns (ListAliasesResponse) {}
+ /*
+ Get list of all aliases for all existing collections
+ */
+ rpc ListAliases (ListAliasesRequest) returns (ListAliasesResponse) {}
+ /*
+ Get cluster information for a collection
+ */
+ rpc CollectionClusterInfo (CollectionClusterInfoRequest) returns (CollectionClusterInfoResponse) {}
+ /*
+ Update cluster setup for a collection
+ */
+ rpc UpdateCollectionClusterSetup (UpdateCollectionClusterSetupRequest) returns (UpdateCollectionClusterSetupResponse) {}
+}
diff --git a/ZeroLevel.Qdrant.GrpcClient/Protos/json_with_int.proto b/ZeroLevel.Qdrant.GrpcClient/Protos/json_with_int.proto
new file mode 100644
index 0000000..3fc496e
--- /dev/null
+++ b/ZeroLevel.Qdrant.GrpcClient/Protos/json_with_int.proto
@@ -0,0 +1,61 @@
+// Fork of the google.protobuf.Value with explicit support for integer values
+
+syntax = "proto3";
+
+package qdrant;
+
+// `Struct` represents a structured data value, consisting of fields
+// which map to dynamically typed values. In some languages, `Struct`
+// might be supported by a native representation. For example, in
+// scripting languages like JS a struct is represented as an
+// object. The details of that representation are described together
+// with the proto support for the language.
+//
+// The JSON representation for `Struct` is a JSON object.
+message Struct {
+ // Unordered map of dynamically typed values.
+ map fields = 1;
+}
+
+// `Value` represents a dynamically typed value which can be either
+// null, a number, a string, a boolean, a recursive struct value, or a
+// list of values. A producer of value is expected to set one of those
+// variants, absence of any variant indicates an error.
+//
+// The JSON representation for `Value` is a JSON value.
+message Value {
+ // The kind of value.
+ oneof kind {
+ // Represents a null value.
+ NullValue null_value = 1;
+ // Represents a double value.
+ double double_value = 2;
+ // Represents an integer value
+ int64 integer_value = 3;
+ // Represents a string value.
+ string string_value = 4;
+ // Represents a boolean value.
+ bool bool_value = 5;
+ // Represents a structured value.
+ Struct struct_value = 6;
+ // Represents a repeated `Value`.
+ ListValue list_value = 7;
+ }
+}
+
+// `NullValue` is a singleton enumeration to represent the null value for the
+// `Value` type union.
+//
+// The JSON representation for `NullValue` is JSON `null`.
+enum NullValue {
+ // Null value.
+ NULL_VALUE = 0;
+}
+
+// `ListValue` is a wrapper around a repeated field of values.
+//
+// The JSON representation for `ListValue` is a JSON array.
+message ListValue {
+ // Repeated field of dynamically typed values.
+ repeated Value values = 1;
+}
diff --git a/ZeroLevel.Qdrant.GrpcClient/Protos/points.proto b/ZeroLevel.Qdrant.GrpcClient/Protos/points.proto
new file mode 100644
index 0000000..9d4c985
--- /dev/null
+++ b/ZeroLevel.Qdrant.GrpcClient/Protos/points.proto
@@ -0,0 +1,622 @@
+syntax = "proto3";
+
+package qdrant;
+
+import "Protos/json_with_int.proto";
+import "Protos/collections.proto";
+
+
+enum WriteOrderingType {
+ Weak = 0; // Write operations may be reordered, works faster, default
+ Medium = 1; // Write operations go through dynamically selected leader, may be inconsistent for a short period of time in case of leader change
+ Strong = 2; // Write operations go through the permanent leader, consistent, but may be unavailable if leader is down
+}
+
+message WriteOrdering {
+ WriteOrderingType type = 1; // Write ordering guarantees
+}
+
+enum ReadConsistencyType {
+ All = 0; // Send request to all nodes and return points which are present on all of them
+ Majority = 1; // Send requests to all nodes and return points which are present on majority of them
+ Quorum = 2; // Send requests to half + 1 nodes, return points which are present on all of them
+}
+
+message ReadConsistency {
+ oneof value {
+ ReadConsistencyType type = 1; // Common read consistency configurations
+ uint64 factor = 2; // Send request to a specified number of nodes, and return points which are present on all of them
+ }
+}
+
+// ---------------------------------------------
+// ------------- Point Id Requests -------------
+// ---------------------------------------------
+
+message PointId {
+ oneof point_id_options {
+ uint64 num = 1; // Numerical ID of the point
+ string uuid = 2; // UUID
+ }
+}
+
+message Vector {
+ repeated float data = 1;
+}
+
+// ---------------------------------------------
+// ---------------- RPC Requests ---------------
+// ---------------------------------------------
+
+message UpsertPoints {
+ string collection_name = 1; // name of the collection
+ optional bool wait = 2; // Wait until the changes have been applied?
+ repeated PointStruct points = 3;
+ optional WriteOrdering ordering = 4; // Write ordering guarantees
+}
+
+message DeletePoints {
+ string collection_name = 1; // name of the collection
+ optional bool wait = 2; // Wait until the changes have been applied?
+ PointsSelector points = 3; // Affected points
+ optional WriteOrdering ordering = 4; // Write ordering guarantees
+}
+
+message GetPoints {
+ string collection_name = 1; // name of the collection
+ repeated PointId ids = 2; // List of points to retrieve
+ reserved 3; // deprecated "with_vector" field
+ WithPayloadSelector with_payload = 4; // Options for specifying which payload to include or not
+ optional WithVectorsSelector with_vectors = 5; // Options for specifying which vectors to include into response
+ optional ReadConsistency read_consistency = 6; // Options for specifying read consistency guarantees
+}
+
+message UpdatePointVectors {
+ string collection_name = 1; // name of the collection
+ optional bool wait = 2; // Wait until the changes have been applied?
+ repeated PointVectors points = 3; // List of points and vectors to update
+ optional WriteOrdering ordering = 4; // Write ordering guarantees
+}
+
+message PointVectors {
+ PointId id = 1; // ID to update vectors for
+ Vectors vectors = 2; // Named vectors to update, leave others intact
+}
+
+message DeletePointVectors {
+ string collection_name = 1; // name of the collection
+ optional bool wait = 2; // Wait until the changes have been applied?
+ PointsSelector points_selector = 3; // Affected points
+ VectorsSelector vectors = 4; // List of vector names to delete
+ optional WriteOrdering ordering = 5; // Write ordering guarantees
+}
+
+message SetPayloadPoints {
+ string collection_name = 1; // name of the collection
+ optional bool wait = 2; // Wait until the changes have been applied?
+ map payload = 3; // New payload values
+ reserved 4; // List of point to modify, deprecated
+ optional PointsSelector points_selector = 5; // Affected points
+ optional WriteOrdering ordering = 6; // Write ordering guarantees
+}
+
+message DeletePayloadPoints {
+ string collection_name = 1; // name of the collection
+ optional bool wait = 2; // Wait until the changes have been applied?
+ repeated string keys = 3; // List of keys to delete
+ reserved 4; // Affected points, deprecated
+ optional PointsSelector points_selector = 5; // Affected points
+ optional WriteOrdering ordering = 6; // Write ordering guarantees
+}
+
+message ClearPayloadPoints {
+ string collection_name = 1; // name of the collection
+ optional bool wait = 2; // Wait until the changes have been applied?
+ PointsSelector points = 3; // Affected points
+ optional WriteOrdering ordering = 4; // Write ordering guarantees
+}
+
+enum FieldType {
+ FieldTypeKeyword = 0;
+ FieldTypeInteger = 1;
+ FieldTypeFloat = 2;
+ FieldTypeGeo = 3;
+ FieldTypeText = 4;
+ FieldTypeBool = 5;
+}
+
+message CreateFieldIndexCollection {
+ string collection_name = 1; // name of the collection
+ optional bool wait = 2; // Wait until the changes have been applied?
+ string field_name = 3; // Field name to index
+ optional FieldType field_type = 4; // Field type.
+ optional PayloadIndexParams field_index_params = 5; // Payload index params.
+ optional WriteOrdering ordering = 6; // Write ordering guarantees
+}
+
+message DeleteFieldIndexCollection {
+ string collection_name = 1; // name of the collection
+ optional bool wait = 2; // Wait until the changes have been applied?
+ string field_name = 3; // Field name to delete
+ optional WriteOrdering ordering = 4; // Write ordering guarantees
+}
+
+message PayloadIncludeSelector {
+ repeated string fields = 1; // List of payload keys to include into result
+}
+
+message PayloadExcludeSelector {
+ repeated string fields = 1; // List of payload keys to exclude from the result
+}
+
+message WithPayloadSelector {
+ oneof selector_options {
+ bool enable = 1; // If `true` - return all payload, if `false` - none
+ PayloadIncludeSelector include = 2;
+ PayloadExcludeSelector exclude = 3;
+ }
+}
+
+message NamedVectors {
+ map vectors = 1;
+}
+
+message Vectors {
+ oneof vectors_options {
+ Vector vector = 1;
+ NamedVectors vectors = 2;
+ }
+}
+
+message VectorsSelector {
+ repeated string names = 1; // List of vectors to include into result
+}
+
+message WithVectorsSelector {
+ oneof selector_options {
+ bool enable = 1; // If `true` - return all vectors, if `false` - none
+ VectorsSelector include = 2; // List of payload keys to include into result
+ }
+}
+
+message QuantizationSearchParams {
+ /*
+ If set to true, search will ignore quantized vector data
+ */
+ optional bool ignore = 1;
+
+ /*
+ If true, use original vectors to re-score top-k results. Default is true.
+ */
+ optional bool rescore = 2;
+
+ /*
+ Oversampling factor for quantization.
+
+ Defines how many extra vectors should be pre-selected using quantized index,
+ and then re-scored using original vectors.
+
+ For example, if `oversampling` is 2.4 and `limit` is 100, then 240 vectors will be pre-selected using quantized index,
+ and then top-100 will be returned after re-scoring.
+ */
+ optional double oversampling = 3;
+}
+
+message SearchParams {
+ /*
+ Params relevant to HNSW index. Size of the beam in a beam-search.
+ Larger the value - more accurate the result, more time required for search.
+ */
+ optional uint64 hnsw_ef = 1;
+
+ /*
+ Search without approximation. If set to true, search may run long but with exact results.
+ */
+ optional bool exact = 2;
+
+ /*
+ If set to true, search will ignore quantized vector data
+ */
+ optional QuantizationSearchParams quantization = 3;
+ /*
+ If enabled, the engine will only perform search among indexed or small segments.
+ Using this option prevents slow searches in case of delayed index, but does not
+ guarantee that all uploaded vectors will be included in search results
+ */
+ optional bool indexed_only = 4;
+}
+
+message SearchPoints {
+ string collection_name = 1; // name of the collection
+ repeated float vector = 2; // vector
+ Filter filter = 3; // Filter conditions - return only those points that satisfy the specified conditions
+ uint64 limit = 4; // Max number of result
+ reserved 5; // deprecated "with_vector" field
+ WithPayloadSelector with_payload = 6; // Options for specifying which payload to include or not
+ SearchParams params = 7; // Search config
+ optional float score_threshold = 8; // If provided - cut off results with worse scores
+ optional uint64 offset = 9; // Offset of the result
+ optional string vector_name = 10; // Which vector to use for search, if not specified - use default vector
+ optional WithVectorsSelector with_vectors = 11; // Options for specifying which vectors to include into response
+ optional ReadConsistency read_consistency = 12; // Options for specifying read consistency guarantees
+}
+
+message SearchBatchPoints {
+ string collection_name = 1; // Name of the collection
+ repeated SearchPoints search_points = 2;
+ optional ReadConsistency read_consistency = 3; // Options for specifying read consistency guarantees
+}
+
+message WithLookup {
+ string collection = 1; // Name of the collection to use for points lookup
+ optional WithPayloadSelector with_payload = 2; // Options for specifying which payload to include (or not)
+ optional WithVectorsSelector with_vectors = 3; // Options for specifying which vectors to include (or not)
+}
+
+
+message SearchPointGroups {
+ string collection_name = 1; // Name of the collection
+ repeated float vector = 2; // Vector to compare against
+ Filter filter = 3; // Filter conditions - return only those points that satisfy the specified conditions
+ uint32 limit = 4; // Max number of result
+ WithPayloadSelector with_payload = 5; // Options for specifying which payload to include or not
+ SearchParams params = 6; // Search config
+ optional float score_threshold = 7; // If provided - cut off results with worse scores
+ optional string vector_name = 8; // Which vector to use for search, if not specified - use default vector
+ optional WithVectorsSelector with_vectors = 9; // Options for specifying which vectors to include into response
+ string group_by = 10; // Payload field to group by, must be a string or number field. If there are multiple values for the field, all of them will be used. One point can be in multiple groups.
+ uint32 group_size = 11; // Maximum amount of points to return per group
+ optional ReadConsistency read_consistency = 12; // Options for specifying read consistency guarantees
+ optional WithLookup with_lookup = 13; // Options for specifying how to use the group id to lookup points in another collection
+}
+
+message ScrollPoints {
+ string collection_name = 1;
+ Filter filter = 2; // Filter conditions - return only those points that satisfy the specified conditions
+ optional PointId offset = 3; // Start with this ID
+ optional uint32 limit = 4; // Max number of result
+ reserved 5; // deprecated "with_vector" field
+ WithPayloadSelector with_payload = 6; // Options for specifying which payload to include or not
+ optional WithVectorsSelector with_vectors = 7; // Options for specifying which vectors to include into response
+ optional ReadConsistency read_consistency = 8; // Options for specifying read consistency guarantees
+}
+
+message LookupLocation {
+ string collection_name = 1;
+ optional string vector_name = 2; // Which vector to use for search, if not specified - use default vector
+}
+
+message RecommendPoints {
+ string collection_name = 1; // name of the collection
+ repeated PointId positive = 2; // Look for vectors closest to those
+ repeated PointId negative = 3; // Try to avoid vectors like this
+ Filter filter = 4; // Filter conditions - return only those points that satisfy the specified conditions
+ uint64 limit = 5; // Max number of result
+ reserved 6; // deprecated "with_vector" field
+ WithPayloadSelector with_payload = 7; // Options for specifying which payload to include or not
+ SearchParams params = 8; // Search config
+ optional float score_threshold = 9; // If provided - cut off results with worse scores
+ optional uint64 offset = 10; // Offset of the result
+ optional string using = 11; // Define which vector to use for recommendation, if not specified - default vector
+ optional WithVectorsSelector with_vectors = 12; // Options for specifying which vectors to include into response
+ optional LookupLocation lookup_from = 13; // Name of the collection to use for points lookup, if not specified - use current collection
+ optional ReadConsistency read_consistency = 14; // Options for specifying read consistency guarantees
+}
+
+message RecommendBatchPoints {
+ string collection_name = 1; // Name of the collection
+ repeated RecommendPoints recommend_points = 2;
+ optional ReadConsistency read_consistency = 3; // Options for specifying read consistency guarantees
+}
+
+message RecommendPointGroups {
+ string collection_name = 1; // Name of the collection
+ repeated PointId positive = 2; // Look for vectors closest to those
+ repeated PointId negative = 3; // Try to avoid vectors like this
+ Filter filter = 4; // Filter conditions - return only those points that satisfy the specified conditions
+ uint32 limit = 5; // Max number of groups in result
+ WithPayloadSelector with_payload = 6; // Options for specifying which payload to include or not
+ SearchParams params = 7; // Search config
+ optional float score_threshold = 8; // If provided - cut off results with worse scores
+ optional string using = 9; // Define which vector to use for recommendation, if not specified - default vector
+ optional WithVectorsSelector with_vectors = 10; // Options for specifying which vectors to include into response
+ optional LookupLocation lookup_from = 11; // Name of the collection to use for points lookup, if not specified - use current collection
+ string group_by = 12; // Payload field to group by, must be a string or number field. If there are multiple values for the field, all of them will be used. One point can be in multiple groups.
+ uint32 group_size = 13; // Maximum amount of points to return per group
+ optional ReadConsistency read_consistency = 14; // Options for specifying read consistency guarantees
+ optional WithLookup with_lookup = 15; // Options for specifying how to use the group id to lookup points in another collection
+}
+
+message CountPoints {
+ string collection_name = 1; // name of the collection
+ Filter filter = 2; // Filter conditions - return only those points that satisfy the specified conditions
+ optional bool exact = 3; // If `true` - return exact count, if `false` - return approximate count
+}
+
+message PointsUpdateOperation {
+ message PointStructList {
+ repeated PointStruct points = 1;
+ }
+ message SetPayload {
+ map payload = 1;
+ optional PointsSelector points_selector = 2; // Affected points
+ }
+ message DeletePayload {
+ repeated string keys = 1;
+ optional PointsSelector points_selector = 2; // Affected points
+ }
+ message UpdateVectors {
+ repeated PointVectors points = 1; // List of points and vectors to update
+ }
+ message DeleteVectors {
+ PointsSelector points_selector = 1; // Affected points
+ VectorsSelector vectors = 2; // List of vector names to delete
+ }
+
+ oneof operation {
+ PointStructList upsert = 1;
+ PointsSelector delete = 2;
+ SetPayload set_payload = 3;
+ SetPayload overwrite_payload = 4;
+ DeletePayload delete_payload = 5;
+ PointsSelector clear_payload = 6;
+ UpdateVectors update_vectors = 7;
+ DeleteVectors delete_vectors = 8;
+ }
+}
+
+message UpdateBatchPoints {
+ string collection_name = 1; // name of the collection
+ optional bool wait = 2; // Wait until the changes have been applied?
+ repeated PointsUpdateOperation operations = 3;
+ optional WriteOrdering ordering = 4; // Write ordering guarantees
+}
+
+// ---------------------------------------------
+// ---------------- RPC Response ---------------
+// ---------------------------------------------
+
+message PointsOperationResponse {
+ UpdateResult result = 1;
+ double time = 2; // Time spent to process
+}
+
+message UpdateResult {
+ uint64 operation_id = 1; // Number of operation
+ UpdateStatus status = 2; // Operation status
+}
+
+enum UpdateStatus {
+ UnknownUpdateStatus = 0;
+ Acknowledged = 1; // Update is received, but not processed yet
+ Completed = 2; // Update is applied and ready for search
+}
+
+message ScoredPoint {
+ PointId id = 1; // Point id
+ map payload = 2; // Payload
+ float score = 3; // Similarity score
+ reserved 4; // deprecated "vector" field
+ uint64 version = 5; // Last update operation applied to this point
+ optional Vectors vectors = 6; // Vectors to search
+}
+
+message GroupId {
+ oneof kind {
+ // Represents a double value.
+ uint64 unsigned_value = 1;
+ // Represents an integer value
+ int64 integer_value = 2;
+ // Represents a string value.
+ string string_value = 3;
+ }
+}
+
+message PointGroup {
+ GroupId id = 1; // Group id
+ repeated ScoredPoint hits = 2; // Points in the group
+ RetrievedPoint lookup = 3; // Point(s) from the lookup collection that matches the group id
+}
+
+message GroupsResult {
+ repeated PointGroup groups = 1; // Groups
+}
+
+message SearchResponse {
+ repeated ScoredPoint result = 1;
+ double time = 2; // Time spent to process
+}
+
+message BatchResult {
+ repeated ScoredPoint result = 1;
+}
+
+message SearchBatchResponse {
+ repeated BatchResult result = 1;
+ double time = 2; // Time spent to process
+}
+
+message SearchGroupsResponse {
+ GroupsResult result = 1;
+ double time = 2; // Time spent to process
+}
+
+message CountResponse {
+ CountResult result = 1;
+ double time = 2; // Time spent to process
+}
+
+message ScrollResponse {
+ optional PointId next_page_offset = 1; // Use this offset for the next query
+ repeated RetrievedPoint result = 2;
+ double time = 3; // Time spent to process
+}
+
+message CountResult {
+ uint64 count = 1;
+}
+
+message RetrievedPoint {
+ PointId id = 1;
+ map payload = 2;
+ reserved 3; // deprecated "vector" field
+ optional Vectors vectors = 4;
+}
+
+message GetResponse {
+ repeated RetrievedPoint result = 1;
+ double time = 2; // Time spent to process
+}
+
+message RecommendResponse {
+ repeated ScoredPoint result = 1;
+ double time = 2; // Time spent to process
+}
+
+message RecommendBatchResponse {
+ repeated BatchResult result = 1;
+ double time = 2; // Time spent to process
+}
+
+message RecommendGroupsResponse {
+ GroupsResult result = 1;
+ double time = 2; // Time spent to process
+}
+
+message UpdateBatchResponse {
+ repeated UpdateResult result = 1;
+ double time = 2; // Time spent to process
+}
+
+// ---------------------------------------------
+// ------------- Filter Conditions -------------
+// ---------------------------------------------
+
+message Filter {
+ repeated Condition should = 1; // At least one of those conditions should match
+ repeated Condition must = 2; // All conditions must match
+ repeated Condition must_not = 3; // All conditions must NOT match
+}
+
+message Condition {
+ oneof condition_one_of {
+ FieldCondition field = 1;
+ IsEmptyCondition is_empty = 2;
+ HasIdCondition has_id = 3;
+ Filter filter = 4;
+ IsNullCondition is_null = 5;
+ NestedCondition nested = 6;
+ }
+}
+
+message IsEmptyCondition {
+ string key = 1;
+}
+
+message IsNullCondition {
+ string key = 1;
+}
+
+message HasIdCondition {
+ repeated PointId has_id = 1;
+}
+
+message NestedCondition {
+ string key = 1; // Path to nested object
+ Filter filter = 2; // Filter condition
+}
+
+message FieldCondition {
+ string key = 1;
+ Match match = 2; // Check if point has field with a given value
+ Range range = 3; // Check if points value lies in a given range
+ GeoBoundingBox geo_bounding_box = 4; // Check if points geolocation lies in a given area
+ GeoRadius geo_radius = 5; // Check if geo point is within a given radius
+ ValuesCount values_count = 6; // Check number of values for a specific field
+ // GeoPolygon geo_polygon = 7; // Check if geo point is within a given polygon
+}
+
+message Match {
+ oneof match_value {
+ string keyword = 1; // Match string keyword
+ int64 integer = 2; // Match integer
+ bool boolean = 3; // Match boolean
+ string text = 4; // Match text
+ RepeatedStrings keywords = 5; // Match multiple keywords
+ RepeatedIntegers integers = 6; // Match multiple integers
+ RepeatedIntegers except_integers = 7; // Match any other value except those integers
+ RepeatedStrings except_keywords = 8; // Match any other value except those keywords
+ }
+}
+
+message RepeatedStrings {
+ repeated string strings = 1;
+}
+
+message RepeatedIntegers {
+ repeated int64 integers = 1;
+}
+
+message Range {
+ optional double lt = 1;
+ optional double gt = 2;
+ optional double gte = 3;
+ optional double lte = 4;
+}
+
+message GeoBoundingBox {
+ GeoPoint top_left = 1; // north-west corner
+ GeoPoint bottom_right = 2; // south-east corner
+}
+
+message GeoRadius {
+ GeoPoint center = 1; // Center of the circle
+ float radius = 2; // In meters
+}
+
+message GeoPolygon {
+ // Ordered list of coordinates representing the vertices of a polygon.
+ // The minimum size is 4, and the first coordinate and the last coordinate
+ // should be the same to form a closed polygon.
+ repeated GeoPoint points = 1;
+}
+
+message ValuesCount {
+ optional uint64 lt = 1;
+ optional uint64 gt = 2;
+ optional uint64 gte = 3;
+ optional uint64 lte = 4;
+}
+
+// ---------------------------------------------
+// -------------- Points Selector --------------
+// ---------------------------------------------
+
+message PointsSelector {
+ oneof points_selector_one_of {
+ PointsIdsList points = 1;
+ Filter filter = 2;
+ }
+}
+
+message PointsIdsList {
+ repeated PointId ids = 1;
+}
+
+// ---------------------------------------------
+// ------------------- Point -------------------
+// ---------------------------------------------
+
+
+message PointStruct {
+ PointId id = 1;
+ reserved 2; // deprecated "vector" field
+ map payload = 3;
+ optional Vectors vectors = 4;
+}
+
+
+message GeoPoint {
+ double lon = 1;
+ double lat = 2;
+}
diff --git a/ZeroLevel.Qdrant.GrpcClient/Protos/points_internal_service.proto b/ZeroLevel.Qdrant.GrpcClient/Protos/points_internal_service.proto
new file mode 100644
index 0000000..5720e74
--- /dev/null
+++ b/ZeroLevel.Qdrant.GrpcClient/Protos/points_internal_service.proto
@@ -0,0 +1,118 @@
+syntax = "proto3";
+
+import "Protos/points.proto";
+
+package qdrant;
+
+import "google/protobuf/struct.proto";
+
+service PointsInternal {
+ rpc Upsert (UpsertPointsInternal) returns (PointsOperationResponse) {}
+ rpc Sync (SyncPointsInternal) returns (PointsOperationResponse) {}
+ rpc Delete (DeletePointsInternal) returns (PointsOperationResponse) {}
+ rpc UpdateVectors (UpdateVectorsInternal) returns (PointsOperationResponse) {}
+ rpc DeleteVectors (DeleteVectorsInternal) returns (PointsOperationResponse) {}
+ rpc SetPayload (SetPayloadPointsInternal) returns (PointsOperationResponse) {}
+ rpc OverwritePayload (SetPayloadPointsInternal) returns (PointsOperationResponse) {}
+ rpc DeletePayload (DeletePayloadPointsInternal) returns (PointsOperationResponse) {}
+ rpc ClearPayload (ClearPayloadPointsInternal) returns (PointsOperationResponse) {}
+ rpc CreateFieldIndex (CreateFieldIndexCollectionInternal) returns (PointsOperationResponse) {}
+ rpc DeleteFieldIndex (DeleteFieldIndexCollectionInternal) returns (PointsOperationResponse) {}
+ rpc Search (SearchPointsInternal) returns (SearchResponse) {}
+ rpc SearchBatch (SearchBatchPointsInternal) returns (SearchBatchResponse) {}
+ rpc Scroll (ScrollPointsInternal) returns (ScrollResponse) {}
+ rpc Count (CountPointsInternal) returns (CountResponse) {}
+ rpc Recommend (RecommendPointsInternal) returns (RecommendResponse) {}
+ rpc Get (GetPointsInternal) returns (GetResponse) {}
+}
+
+
+message SyncPoints {
+ string collection_name = 1; // name of the collection
+ optional bool wait = 2; // Wait until the changes have been applied?
+ repeated PointStruct points = 3;
+ optional PointId from_id = 4; // Start of the sync range
+ optional PointId to_id = 5; // End of the sync range
+ optional WriteOrdering ordering = 6;
+}
+
+message SyncPointsInternal {
+ SyncPoints sync_points = 1;
+ optional uint32 shard_id = 2;
+}
+
+message UpsertPointsInternal {
+ UpsertPoints upsert_points = 1;
+ optional uint32 shard_id = 2;
+}
+
+message DeletePointsInternal {
+ DeletePoints delete_points = 1;
+ optional uint32 shard_id = 2;
+}
+
+message UpdateVectorsInternal {
+ UpdatePointVectors update_vectors = 1;
+ optional uint32 shard_id = 2;
+}
+
+message DeleteVectorsInternal {
+ DeletePointVectors delete_vectors = 1;
+ optional uint32 shard_id = 2;
+}
+
+message SetPayloadPointsInternal {
+ SetPayloadPoints set_payload_points = 1;
+ optional uint32 shard_id = 2;
+}
+
+message DeletePayloadPointsInternal {
+ DeletePayloadPoints delete_payload_points = 1;
+ optional uint32 shard_id = 2;
+}
+
+message ClearPayloadPointsInternal {
+ ClearPayloadPoints clear_payload_points = 1;
+ optional uint32 shard_id = 2;
+}
+
+message CreateFieldIndexCollectionInternal {
+ CreateFieldIndexCollection create_field_index_collection = 1;
+ optional uint32 shard_id = 2;
+}
+
+message DeleteFieldIndexCollectionInternal {
+ DeleteFieldIndexCollection delete_field_index_collection = 1;
+ optional uint32 shard_id = 2;
+}
+
+message SearchPointsInternal {
+ SearchPoints search_points = 1;
+ optional uint32 shard_id = 2;
+}
+
+message SearchBatchPointsInternal {
+ string collection_name = 1;
+ repeated SearchPoints search_points = 2;
+ optional uint32 shard_id = 3;
+}
+
+message ScrollPointsInternal {
+ ScrollPoints scroll_points = 1;
+ optional uint32 shard_id = 2;
+}
+
+message RecommendPointsInternal {
+ RecommendPoints recommend_points = 1;
+ optional uint32 shard_id = 2;
+}
+
+message GetPointsInternal {
+ GetPoints get_points = 1;
+ optional uint32 shard_id = 2;
+}
+
+message CountPointsInternal {
+ CountPoints count_points = 1;
+ optional uint32 shard_id = 2;
+}
diff --git a/ZeroLevel.Qdrant.GrpcClient/Protos/points_service.proto b/ZeroLevel.Qdrant.GrpcClient/Protos/points_service.proto
new file mode 100644
index 0000000..14ac19f
--- /dev/null
+++ b/ZeroLevel.Qdrant.GrpcClient/Protos/points_service.proto
@@ -0,0 +1,91 @@
+syntax = "proto3";
+
+import "Protos/points.proto";
+
+package qdrant;
+
+import "google/protobuf/struct.proto";
+
+service Points {
+ /*
+ Perform insert + updates on points. If a point with a given ID already exists - it will be overwritten.
+ */
+ rpc Upsert (UpsertPoints) returns (PointsOperationResponse) {}
+ /*
+ Delete points
+ */
+ rpc Delete (DeletePoints) returns (PointsOperationResponse) {}
+ /*
+ Retrieve points
+ */
+ rpc Get (GetPoints) returns (GetResponse) {}
+ /*
+ Update named vectors for point
+ */
+ rpc UpdateVectors (UpdatePointVectors) returns (PointsOperationResponse) {}
+ /*
+ Delete named vectors for points
+ */
+ rpc DeleteVectors (DeletePointVectors) returns (PointsOperationResponse) {}
+ /*
+ Set payload for points
+ */
+ rpc SetPayload (SetPayloadPoints) returns (PointsOperationResponse) {}
+ /*
+ Overwrite payload for points
+ */
+ rpc OverwritePayload (SetPayloadPoints) returns (PointsOperationResponse) {}
+ /*
+ Delete specified key payload for points
+ */
+ rpc DeletePayload (DeletePayloadPoints) returns (PointsOperationResponse) {}
+ /*
+ Remove all payload for specified points
+ */
+ rpc ClearPayload (ClearPayloadPoints) returns (PointsOperationResponse) {}
+ /*
+ Create index for field in collection
+ */
+ rpc CreateFieldIndex (CreateFieldIndexCollection) returns (PointsOperationResponse) {}
+ /*
+ Delete field index for collection
+ */
+ rpc DeleteFieldIndex (DeleteFieldIndexCollection) returns (PointsOperationResponse) {}
+ /*
+ Retrieve closest points based on vector similarity and given filtering conditions
+ */
+ rpc Search (SearchPoints) returns (SearchResponse) {}
+ /*
+ Retrieve closest points based on vector similarity and given filtering conditions
+ */
+ rpc SearchBatch (SearchBatchPoints) returns (SearchBatchResponse) {}
+ /*
+ Retrieve closest points based on vector similarity and given filtering conditions, grouped by a given field
+ */
+ rpc SearchGroups (SearchPointGroups) returns (SearchGroupsResponse) {}
+ /*
+ Iterate over all or filtered points points
+ */
+ rpc Scroll (ScrollPoints) returns (ScrollResponse) {}
+ /*
+ Look for the points which are closer to stored positive examples and at the same time further to negative examples.
+ */
+ rpc Recommend (RecommendPoints) returns (RecommendResponse) {}
+ /*
+ Look for the points which are closer to stored positive examples and at the same time further to negative examples.
+ */
+ rpc RecommendBatch (RecommendBatchPoints) returns (RecommendBatchResponse) {}
+ /*
+ Look for the points which are closer to stored positive examples and at the same time further to negative examples, grouped by a given field
+ */
+ rpc RecommendGroups (RecommendPointGroups) returns (RecommendGroupsResponse) {}
+ /*
+ Count points in collection with given filtering conditions
+ */
+ rpc Count (CountPoints) returns (CountResponse) {}
+
+ /*
+ Perform multiple update operations in one request
+ */
+ rpc UpdateBatch (UpdateBatchPoints) returns (UpdateBatchResponse) {}
+}
diff --git a/ZeroLevel.Qdrant.GrpcClient/Protos/qdrant.proto b/ZeroLevel.Qdrant.GrpcClient/Protos/qdrant.proto
new file mode 100644
index 0000000..8e5ffd0
--- /dev/null
+++ b/ZeroLevel.Qdrant.GrpcClient/Protos/qdrant.proto
@@ -0,0 +1,22 @@
+syntax = "proto3";
+
+import "Protos/collections_service.proto";
+import "Protos/collections_internal_service.proto";
+import "Protos/points_service.proto";
+import "Protos/points_internal_service.proto";
+import "Protos/qdrant_internal_service.proto";
+import "Protos/raft_service.proto";
+import "Protos/snapshots_service.proto";
+
+package qdrant;
+
+service Qdrant {
+ rpc HealthCheck (HealthCheckRequest) returns (HealthCheckReply) {}
+}
+
+message HealthCheckRequest {}
+
+message HealthCheckReply {
+ string title = 1;
+ string version = 2;
+}
diff --git a/ZeroLevel.Qdrant.GrpcClient/Protos/qdrant_internal_service.proto b/ZeroLevel.Qdrant.GrpcClient/Protos/qdrant_internal_service.proto
new file mode 100644
index 0000000..e8c1ab7
--- /dev/null
+++ b/ZeroLevel.Qdrant.GrpcClient/Protos/qdrant_internal_service.proto
@@ -0,0 +1,16 @@
+syntax = "proto3";
+
+package qdrant;
+
+service QdrantInternal {
+ /*
+ Get HTTP port for remote host.
+ */
+ rpc GetHttpPort (HttpPortRequest) returns (HttpPortResponse) {}
+}
+
+message HttpPortRequest {}
+
+message HttpPortResponse {
+ int32 port = 1;
+}
diff --git a/ZeroLevel.Qdrant.GrpcClient/Protos/raft_service.proto b/ZeroLevel.Qdrant.GrpcClient/Protos/raft_service.proto
new file mode 100644
index 0000000..b1d6caf
--- /dev/null
+++ b/ZeroLevel.Qdrant.GrpcClient/Protos/raft_service.proto
@@ -0,0 +1,51 @@
+syntax = "proto3";
+
+package qdrant;
+
+import "google/protobuf/empty.proto";
+
+service Raft {
+ // Send Raft message to another peer
+ rpc Send (RaftMessage) returns (google.protobuf.Empty);
+ // Send to bootstrap peer
+ // Returns uri by id if bootstrap knows this peer
+ rpc WhoIs (PeerId) returns (Uri);
+ // Send to bootstrap peer
+ // Adds peer to the network
+ // Returns all peers
+ rpc AddPeerToKnown (AddPeerToKnownMessage) returns (AllPeers);
+ // DEPRECATED
+ // Its functionality is now included in `AddPeerToKnown`
+ //
+ // Send to bootstrap peer
+ // Proposes to add this peer as participant of consensus
+ rpc AddPeerAsParticipant (PeerId) returns (google.protobuf.Empty);
+}
+
+message RaftMessage {
+ bytes message = 1;
+}
+
+message AllPeers {
+ repeated Peer all_peers = 1;
+ uint64 first_peer_id = 2;
+}
+
+message Peer {
+ string uri = 1;
+ uint64 id = 2;
+}
+
+message AddPeerToKnownMessage {
+ optional string uri = 1;
+ optional uint32 port = 2;
+ uint64 id = 3;
+}
+
+message PeerId {
+ uint64 id = 1;
+}
+
+message Uri {
+ string uri = 1;
+}
\ No newline at end of file
diff --git a/ZeroLevel.Qdrant.GrpcClient/Protos/snapshots_service.proto b/ZeroLevel.Qdrant.GrpcClient/Protos/snapshots_service.proto
new file mode 100644
index 0000000..5770268
--- /dev/null
+++ b/ZeroLevel.Qdrant.GrpcClient/Protos/snapshots_service.proto
@@ -0,0 +1,75 @@
+syntax = "proto3";
+
+package qdrant;
+
+import "google/protobuf/struct.proto";
+import "google/protobuf/timestamp.proto";
+
+service Snapshots {
+ /*
+ Create collection snapshot
+ */
+ rpc Create (CreateSnapshotRequest) returns (CreateSnapshotResponse) {}
+ /*
+ List collection snapshots
+ */
+ rpc List (ListSnapshotsRequest) returns (ListSnapshotsResponse) {}
+ /*
+ Delete collection snapshots
+ */
+ rpc Delete (DeleteSnapshotRequest) returns (DeleteSnapshotResponse) {}
+ /*
+ Create full storage snapshot
+ */
+ rpc CreateFull (CreateFullSnapshotRequest) returns (CreateSnapshotResponse) {}
+ /*
+ List full storage snapshots
+ */
+ rpc ListFull (ListFullSnapshotsRequest) returns (ListSnapshotsResponse) {}
+ /*
+ List full storage snapshots
+ */
+ rpc DeleteFull (DeleteFullSnapshotRequest) returns (DeleteSnapshotResponse) {}
+
+}
+
+message CreateFullSnapshotRequest {}
+
+message ListFullSnapshotsRequest {}
+
+message DeleteFullSnapshotRequest {
+ string snapshot_name = 1; // Name of the full snapshot
+}
+
+message CreateSnapshotRequest {
+ string collection_name = 1; // Name of the collection
+}
+
+message ListSnapshotsRequest {
+ string collection_name = 1; // Name of the collection
+}
+
+message DeleteSnapshotRequest {
+ string collection_name = 1; // Name of the collection
+ string snapshot_name = 2; // Name of the collection snapshot
+}
+
+message SnapshotDescription {
+ string name = 1; // Name of the snapshot
+ google.protobuf.Timestamp creation_time = 2; // Creation time of the snapshot
+ int64 size = 3; // Size of the snapshot in bytes
+}
+
+message CreateSnapshotResponse {
+ SnapshotDescription snapshot_description = 1;
+ double time = 2; // Time spent to process
+}
+
+message ListSnapshotsResponse {
+ repeated SnapshotDescription snapshot_descriptions = 1;
+ double time = 2; // Time spent to process
+}
+
+message DeleteSnapshotResponse {
+ double time = 1; // Time spent to process
+}
diff --git a/ZeroLevel.Qdrant.GrpcClient/ZeroLevel.Qdrant.GrpcClient.csproj b/ZeroLevel.Qdrant.GrpcClient/ZeroLevel.Qdrant.GrpcClient.csproj
new file mode 100644
index 0000000..a3c63f0
--- /dev/null
+++ b/ZeroLevel.Qdrant.GrpcClient/ZeroLevel.Qdrant.GrpcClient.csproj
@@ -0,0 +1,37 @@
+
+
+
+ net6.0
+ enable
+ enable
+ False
+ 1.15.1
+ 1.15.1
+ 1.15.1
+
+
+
+
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/ZeroLevel.Qdrant/DataAttributes/FloatAttribute.cs b/ZeroLevel.Qdrant/DataAttributes/FloatAttribute.cs
deleted file mode 100644
index c9c0534..0000000
--- a/ZeroLevel.Qdrant/DataAttributes/FloatAttribute.cs
+++ /dev/null
@@ -1,10 +0,0 @@
-namespace ZeroLevel.Qdrant.DataAttributes
-{
- ///
- /// Attribute for directly specifying the 'float' data type of qdrant
- ///
- public class FloatAttribute
- : QdrantAttribute
- {
- }
-}
diff --git a/ZeroLevel.Qdrant/DataAttributes/GeoAttribute.cs b/ZeroLevel.Qdrant/DataAttributes/GeoAttribute.cs
deleted file mode 100644
index 85fa4f4..0000000
--- a/ZeroLevel.Qdrant/DataAttributes/GeoAttribute.cs
+++ /dev/null
@@ -1,10 +0,0 @@
-namespace ZeroLevel.Qdrant.DataAttributes
-{
- ///
- /// Attribute for directly specifying the 'geo' data type of qdrant
- ///
- public class GeoAttribute
- : QdrantAttribute
- {
- }
-}
diff --git a/ZeroLevel.Qdrant/DataAttributes/IntegerAttribute.cs b/ZeroLevel.Qdrant/DataAttributes/IntegerAttribute.cs
deleted file mode 100644
index 5087161..0000000
--- a/ZeroLevel.Qdrant/DataAttributes/IntegerAttribute.cs
+++ /dev/null
@@ -1,10 +0,0 @@
-namespace ZeroLevel.Qdrant.DataAttributes
-{
- ///
- /// Attribute for directly specifying the 'integer' data type of qdrant
- ///
- public class IntegerAttribute
- : QdrantAttribute
- {
- }
-}
diff --git a/ZeroLevel.Qdrant/DataAttributes/KeywordAttribute.cs b/ZeroLevel.Qdrant/DataAttributes/KeywordAttribute.cs
deleted file mode 100644
index c65f10d..0000000
--- a/ZeroLevel.Qdrant/DataAttributes/KeywordAttribute.cs
+++ /dev/null
@@ -1,10 +0,0 @@
-namespace ZeroLevel.Qdrant.DataAttributes
-{
- ///
- /// Attribute for directly specifying the 'keyword' data type of qdrant
- ///
- public class KeywordAttribute
- : QdrantAttribute
- {
- }
-}
diff --git a/ZeroLevel.Qdrant/DataAttributes/QdrantAttribute.cs b/ZeroLevel.Qdrant/DataAttributes/QdrantAttribute.cs
deleted file mode 100644
index a464f15..0000000
--- a/ZeroLevel.Qdrant/DataAttributes/QdrantAttribute.cs
+++ /dev/null
@@ -1,12 +0,0 @@
-using System;
-
-namespace ZeroLevel.Qdrant.DataAttributes
-{
- ///
- /// Attribute for directly specifying the data type of qdrant
- ///
- public abstract class QdrantAttribute
- : Attribute
- {
- }
-}
diff --git a/ZeroLevel.Qdrant/FloatExtensions.cs b/ZeroLevel.Qdrant/FloatExtensions.cs
deleted file mode 100644
index 891871e..0000000
--- a/ZeroLevel.Qdrant/FloatExtensions.cs
+++ /dev/null
@@ -1,18 +0,0 @@
-using System.Globalization;
-
-namespace ZeroLevel.Qdrant
-{
- public static class NumericExtensions
- {
- private static NumberFormatInfo nfi = new NumberFormatInfo() { NumberDecimalSeparator = "." };
- public static string ConvertToString(this float num)
- {
- return num.ToString(nfi);
- }
-
- public static string ConvertToString(this double num)
- {
- return num.ToString(nfi);
- }
- }
-}
diff --git a/ZeroLevel.Qdrant/Models/Filters/Condition.cs b/ZeroLevel.Qdrant/Models/Filters/Condition.cs
deleted file mode 100644
index 68832a8..0000000
--- a/ZeroLevel.Qdrant/Models/Filters/Condition.cs
+++ /dev/null
@@ -1,81 +0,0 @@
-namespace ZeroLevel.Qdrant.Models.Filters
-{
- ///
- /// Condition for qdrant filters
- ///
- public class Condition
- : Operand
- {
- /*
- integer - 64-bit integer in the range -9223372036854775808 to 9223372036854775807. array of long
- float - 64-bit floating point number. array of double
- keyword - string value. array of strings
- geo - Geographical coordinates. Example: { "lon": 52.5200, "lat": 13.4050 } array of lon&lat of double
- */
- public string Json { get; set; }
-
- public static Condition Ids(long[] values)
- {
- return new Condition
- {
- Json = $"{{ \"has_id\": [{string.Join(",", values)}] }}"
- };
- }
-
- public static Condition IntegerMatch(string name, long value)
- {
- return new Condition
- {
- Json = $"{{ \"key\": \"{name.ToLowerInvariant()}\", \"match\": {{ \"integer\": {value} }} }}"
- };
- }
-
- public static Condition IntegerRange(string name, long left, long rigth, bool include_left, bool include_right)
- {
- var left_cond = include_left ? $"\"lt\": null,\"lte\": {rigth}" : $"\"lt\": {rigth},\"lte\": null";
- var right_cond = include_right ? $"\"gt\": null,\"gte\": {left}" : $"\"gt\": {left},\"gte\": null";
- return new Condition
- {
- Json = $"{{ \"key\": \"{name.ToLowerInvariant()}\", \"range\": {{ {right_cond}, {left_cond} }} }}"
- };
- }
-
- public static Condition FloatRange(string name, double left, double rigth, bool include_left, bool include_right)
- {
- var left_cond = include_left ? $"\"lt\": null,\"lte\": {rigth.ConvertToString()}" : $"\"lt\": {rigth.ConvertToString()},\"lte\": null";
- var right_cond = include_right ? $"\"gt\": null,\"gte\": {left.ConvertToString()}" : $"\"gt\": {left.ConvertToString()},\"gte\": null";
- return new Condition
- {
- Json = $"{{ \"key\": \"{name.ToLowerInvariant()}\", \"range\": {{ {left_cond}, {right_cond} }} }}"
- };
- }
-
- public static Condition KeywordMatch(string name, string value)
- {
- return new Condition
- {
- Json = $"{{ \"key\": \"{name.ToLowerInvariant()}\", \"match\": {{ \"keyword\": \"{value}\" }} }}"
- };
- }
-
- public static Condition GeoBox(string name, Location top_left, Location bottom_right)
- {
- return new Condition
- {
- Json = $"{{ \"key\": \"{name.ToLowerInvariant()}\", \"geo_bounding_box\": {{ \"bottom_right\": {{ \"lat\": {bottom_right.lat.ConvertToString()}, \"lon\": {bottom_right.lon.ConvertToString()} }}, \"top_left\": {{ \"lat\": {top_left.lat.ConvertToString()}, \"lon\": {top_left.lon.ConvertToString()} }} }} }}"
- };
- }
-
- public static Condition GeoRadius(string name, Location location, double radius)
- {
- return new Condition
- {
- Json = $"{{\"key\": \"{name.ToLowerInvariant()}\", \"geo_radius\": {{\"center\": {{ \"lat\": {location.lat.ConvertToString()}, \"lon\": {location.lon.ConvertToString()} }}, \"radius\": {radius.ConvertToString()} }} }}"
- };
- }
- public override string ToJSON()
- {
- return Json;
- }
- }
-}
diff --git a/ZeroLevel.Qdrant/Models/Filters/Filter.cs b/ZeroLevel.Qdrant/Models/Filters/Filter.cs
deleted file mode 100644
index b09a1e5..0000000
--- a/ZeroLevel.Qdrant/Models/Filters/Filter.cs
+++ /dev/null
@@ -1,32 +0,0 @@
-using System.Collections.Generic;
-using System.Linq;
-using System.Text;
-
-namespace ZeroLevel.Qdrant.Models.Filters
-{
- ///
- /// Filter for search in qdrant
- ///
- public class Filter
- {
- private List _groups = new List();
-
- public bool IsEmpty => _groups?.Count == 0;
-
- public Group AppendGroup(GroupOperator op)
- {
- var g = new Group(op);
- _groups.Add(g);
- return g;
- }
-
- public string ToJSON()
- {
- var json = new StringBuilder();
- json.Append("\"filter\": {");
- json.Append(string.Join(",", _groups.Select(g => g.ToJSON())));
- json.Append("}");
- return json.ToString();
- }
- }
-}
diff --git a/ZeroLevel.Qdrant/Models/Filters/Group.cs b/ZeroLevel.Qdrant/Models/Filters/Group.cs
deleted file mode 100644
index 0571b08..0000000
--- a/ZeroLevel.Qdrant/Models/Filters/Group.cs
+++ /dev/null
@@ -1,41 +0,0 @@
-using System.Collections.Generic;
-using System.Linq;
-
-namespace ZeroLevel.Qdrant.Models.Filters
-{
- public class Group
- : Operand
- {
- private List _items = new List();
- public GroupOperator Operator { get; private set; }
- public Group(GroupOperator op)
- {
- Operator = op;
- }
-
- public Group AppendGroup(GroupOperator op)
- {
- var g = new Group(op);
- _items.Add(g);
- return g;
- }
-
- public Group AppendCondition(Condition condition)
- {
- _items.Add(condition);
- return this;
- }
-
- public override string ToJSON()
- {
- string op;
- switch (Operator)
- {
- case GroupOperator.MustNot: op = "must_not"; break;
- case GroupOperator.Must: op = "must"; break;
- default: op = "should"; break;
- }
- return $"\"{op}\": [{string.Join(",", _items.Select(i => i.ToJSON()))}]";
- }
- }
-}
diff --git a/ZeroLevel.Qdrant/Models/Filters/GroupOperator.cs b/ZeroLevel.Qdrant/Models/Filters/GroupOperator.cs
deleted file mode 100644
index 29008cd..0000000
--- a/ZeroLevel.Qdrant/Models/Filters/GroupOperator.cs
+++ /dev/null
@@ -1,9 +0,0 @@
-namespace ZeroLevel.Qdrant.Models.Filters
-{
- public enum GroupOperator
- {
- Must,
- Should,
- MustNot
- }
-}
diff --git a/ZeroLevel.Qdrant/Models/Filters/Operand.cs b/ZeroLevel.Qdrant/Models/Filters/Operand.cs
deleted file mode 100644
index b468880..0000000
--- a/ZeroLevel.Qdrant/Models/Filters/Operand.cs
+++ /dev/null
@@ -1,7 +0,0 @@
-namespace ZeroLevel.Qdrant.Models.Filters
-{
- public abstract class Operand
- {
- public abstract string ToJSON();
- }
-}
diff --git a/ZeroLevel.Qdrant/Models/Location.cs b/ZeroLevel.Qdrant/Models/Location.cs
deleted file mode 100644
index 3b28a7d..0000000
--- a/ZeroLevel.Qdrant/Models/Location.cs
+++ /dev/null
@@ -1,8 +0,0 @@
-namespace ZeroLevel.Qdrant.Models
-{
- public class Location
- {
- public double lon { get; set; }
- public double lat { get; set; }
- }
-}
diff --git a/ZeroLevel.Qdrant/Models/Point.cs b/ZeroLevel.Qdrant/Models/Point.cs
deleted file mode 100644
index b992d45..0000000
--- a/ZeroLevel.Qdrant/Models/Point.cs
+++ /dev/null
@@ -1,9 +0,0 @@
-namespace ZeroLevel.Qdrant.Models
-{
- public sealed class Point
- {
- public long id { get; set; }
- public dynamic payload;
- public float[] vector;
- }
-}
diff --git a/ZeroLevel.Qdrant/Models/Requests/CreateCollectionReqeust.cs b/ZeroLevel.Qdrant/Models/Requests/CreateCollectionReqeust.cs
deleted file mode 100644
index 5c177a3..0000000
--- a/ZeroLevel.Qdrant/Models/Requests/CreateCollectionReqeust.cs
+++ /dev/null
@@ -1,17 +0,0 @@
-namespace ZeroLevel.Qdrant.Models.Requests
-{
- internal sealed class CreateCollectionReqeust
- {
- public string distance { get; set; }
- public int vector_size { get; set; }
- public bool? on_disk_payload { get; set; }
-
- public CreateCollectionReqeust(string distance, int vector_size,
- bool? on_disk_payload = null)
- {
- this.distance = distance;
- this.vector_size = vector_size;
- this.on_disk_payload = on_disk_payload;
- }
- }
-}
diff --git a/ZeroLevel.Qdrant/Models/Requests/CreateIndexRequest.cs b/ZeroLevel.Qdrant/Models/Requests/CreateIndexRequest.cs
deleted file mode 100644
index 48c7eb7..0000000
--- a/ZeroLevel.Qdrant/Models/Requests/CreateIndexRequest.cs
+++ /dev/null
@@ -1,34 +0,0 @@
-namespace ZeroLevel.Qdrant.Models.Requests
-{
- public enum IndexFieldType
- {
- Keyword,
- Integer,
- Float,
- Geo
- }
-
- ///
- /// Available field types are:
- /// keyword - for keyword payload, affects Match filtering conditions.
- /// integer - for integer payload, affects Match and Range filtering conditions.
- /// float - for float payload, affects Range filtering conditions.
- /// geo - for geo payload, affects Geo Bounding Box and Geo Radius filtering conditions.
- ///
- internal sealed class CreateIndexRequest
- {
- public string field_name { get; set; }
- public string field_type { get; set; }
- public CreateIndexRequest(string name, IndexFieldType type)
- {
- field_name = name;
- switch (type)
- {
- case IndexFieldType.Integer: field_type = "integer"; break;
- case IndexFieldType.Float: field_type = "float"; break;
- case IndexFieldType.Geo: field_type = "geo"; break;
- case IndexFieldType.Keyword: field_type = "keyword"; break;
- }
- }
- }
-}
diff --git a/ZeroLevel.Qdrant/Models/Requests/DeletePointsRequest.cs b/ZeroLevel.Qdrant/Models/Requests/DeletePointsRequest.cs
deleted file mode 100644
index cfb899f..0000000
--- a/ZeroLevel.Qdrant/Models/Requests/DeletePointsRequest.cs
+++ /dev/null
@@ -1,11 +0,0 @@
-namespace ZeroLevel.Qdrant.Models.Requests
-{
- internal sealed class DeletePoints
- {
- public long[] ids { get; set; }
- }
- internal sealed class DeletePointsRequest
- {
- public DeletePoints delete_points { get; set; }
- }
-}
diff --git a/ZeroLevel.Qdrant/Models/Requests/PointsRequest.cs b/ZeroLevel.Qdrant/Models/Requests/PointsRequest.cs
deleted file mode 100644
index 071656d..0000000
--- a/ZeroLevel.Qdrant/Models/Requests/PointsRequest.cs
+++ /dev/null
@@ -1,9 +0,0 @@
-namespace ZeroLevel.Qdrant.Models.Requests
-{
- internal sealed class PointsRequest
- {
- public long[] ids { get; set; }
- public bool with_payload { get; set; } = true;
- public bool with_vector { get; set; } = false;
- }
-}
diff --git a/ZeroLevel.Qdrant/Models/Requests/PointsUpsertRequest.cs b/ZeroLevel.Qdrant/Models/Requests/PointsUpsertRequest.cs
deleted file mode 100644
index 41a2311..0000000
--- a/ZeroLevel.Qdrant/Models/Requests/PointsUpsertRequest.cs
+++ /dev/null
@@ -1,46 +0,0 @@
-using System;
-using System.Linq;
-using System.Text;
-using ZeroLevel.Qdrant.Services;
-
-namespace ZeroLevel.Qdrant.Models.Requests
-{
- /*
- integer - 64-bit integer in the range -9223372036854775808 to 9223372036854775807. array of long
- float - 64-bit floating point number. array of double
- keyword - string value. array of strings
- geo - Geographical coordinates. Example: { "lon": 52.5200, "lat": 13.4050 } array of lon&lat of double
- */
-
- public sealed class UpsertPoint
- {
- public long? id { get; set; } = null;
- public T payload { get; set; }
- public float[] vector { get; set; }
- }
-
- public sealed class PointsUpsertRequest
- {
- public UpsertPoint[] points { get; set; }
-
- public string ToJSON()
- {
- if (points != null && points.Length > 0)
- {
- var dims = points[0].vector.Length;
- Func converter = o => QdrantJsonConverter.ConvertToJson(o);
- var json = new StringBuilder();
- json.Append("{");
- json.Append("\"batch\": {");
- json.Append($"\"ids\": [{string.Join(",", points.Select(p => p.id))}], ");
- json.Append($"\"payloads\": [ {{ {string.Join("} ,{ ", points.Select(p => converter(p.payload)))} }} ], ");
- json.Append($"\"vectors\": [{string.Join(", ", points.Select(p => QdrantJsonConverter.ConvertToJson(p.vector)))}]");
- json.Append("}");
- json.Append("}");
- return json.ToString();
- }
- return String.Empty;
- }
-
- }
-}
diff --git a/ZeroLevel.Qdrant/Models/Requests/ScrollRequest.cs b/ZeroLevel.Qdrant/Models/Requests/ScrollRequest.cs
deleted file mode 100644
index 69a0162..0000000
--- a/ZeroLevel.Qdrant/Models/Requests/ScrollRequest.cs
+++ /dev/null
@@ -1,51 +0,0 @@
-using System;
-using System.Text;
-using ZeroLevel.Qdrant.Models.Filters;
-
-namespace ZeroLevel.Qdrant.Models.Requests
-{
- internal sealed class ScrollRequest
- {
- public Filter Filter { get; set; }
- public bool WithPayload { get; set; } = true;
- public bool WithVector { get; set; } = true;
- public long Limit { get; set; }
- public long Offset { get; set; }
-
- /*
- {
- "filter": {
- "must": [
- { "has_id": [0, 3, 100] }
- ]
- },
- "limit": 10000,
- "offset": 0,
- "with_payload": false,
- "with_vector": true
-}
- */
-
- public string ToJson()
- {
- var json = new StringBuilder();
- json.Append("{");
- if (Filter == null || Filter.IsEmpty)
- {
- throw new ArgumentException("Filter must not by null or empty");
- }
- else
- {
- json.Append(Filter.ToJSON());
- json.Append(',');
- }
- json.Append($"\"limit\": {Limit},");
- json.Append($"\"offset\": {Offset},");
- json.Append($"\"with_payload\": {WithPayload.ToString().ToLowerInvariant()},");
- json.Append($"\"with_vector\": {WithVector.ToString().ToLowerInvariant()}");
- json.Append("}");
- return json.ToString();
-
- }
- }
-}
diff --git a/ZeroLevel.Qdrant/Models/Requests/SearchRequest.cs b/ZeroLevel.Qdrant/Models/Requests/SearchRequest.cs
deleted file mode 100644
index 0df42de..0000000
--- a/ZeroLevel.Qdrant/Models/Requests/SearchRequest.cs
+++ /dev/null
@@ -1,84 +0,0 @@
-using ZeroLevel.Qdrant.Models.Filters;
-using System;
-using System.Linq;
-using System.Text;
-
-namespace ZeroLevel.Qdrant.Models.Requests
-{
- internal sealed class SearchRequest
- {
- ///
- /// Look only for points which satisfies this conditions
- ///
- public Filter Filter { get; set; }
- ///
- /// Look for vectors closest to this
- ///
- public double[] FloatVector { get; set; }
- public long[] IntegerVector { get; set; }
- ///
- /// Max number of result to return
- ///
- public uint Top { get; set; }
- ///
- /// Params relevant to HNSW index /// Size of the beam in a beam-search. Larger the value - more accurate the result, more time required for search.
- ///
- public uint? HNSW { get; set; } = null;
-
-
- /*
-
-{
- "filter": {
- "must": [
- {
- "key": "city",
- "match": {
- "keyword": "London"
- }
- }
- ]
- },
- "params": {
- "hnsw_ef": 128
- },
- "vector": [0.2, 0.1, 0.9, 0.7],
- "top": 3
-}
-
- */
- public string ToJson()
- {
- var json = new StringBuilder();
- json.Append("{");
- if (Filter == null || Filter.IsEmpty)
- {
- json.Append("\"filter\": null,");
- }
- else
- {
- json.Append(Filter.ToJSON());
- json.Append(',');
- }
- if (HNSW != null)
- {
- json.Append($"\"params\": {{ \"hnsw_ef\": {HNSW.Value} }},");
- }
- if (FloatVector != null)
- {
- json.Append($"\"vector\": [{string.Join(",", FloatVector.Select(f => f.ConvertToString()))}],");
- }
- else if (IntegerVector != null)
- {
- json.Append($"\"vector\": [{string.Join(",", IntegerVector)}],");
- }
- else
- {
- throw new ArgumentException("No one vectors is set");
- }
- json.Append($"\"top\": {Top}");
- json.Append("}");
- return json.ToString();
- }
- }
-}
diff --git a/ZeroLevel.Qdrant/Models/Responces/CreateIndexResponse.cs b/ZeroLevel.Qdrant/Models/Responces/CreateIndexResponse.cs
deleted file mode 100644
index 87df28b..0000000
--- a/ZeroLevel.Qdrant/Models/Responces/CreateIndexResponse.cs
+++ /dev/null
@@ -1,15 +0,0 @@
-namespace ZeroLevel.Qdrant.Models.Responces
-{
- public sealed class IndexOperation
- {
- public long operation_id { get; set; }
- public string status { get; set; }
- }
-
- public sealed class CreateIndexResponse
- {
- public IndexOperation result { get; set; }
- public string status { get; set; }
- public float time { get; set; }
- }
-}
diff --git a/ZeroLevel.Qdrant/Models/Responces/OperationResponse.cs b/ZeroLevel.Qdrant/Models/Responces/OperationResponse.cs
deleted file mode 100644
index db0520e..0000000
--- a/ZeroLevel.Qdrant/Models/Responces/OperationResponse.cs
+++ /dev/null
@@ -1,9 +0,0 @@
-namespace ZeroLevel.Qdrant.Models.Responces
-{
- public sealed class OperationResponse
- {
- public bool result { get; set; }
- public string status { get; set; }
- public float time { get; set; }
- }
-}
diff --git a/ZeroLevel.Qdrant/Models/Responces/PointResponse.cs b/ZeroLevel.Qdrant/Models/Responces/PointResponse.cs
deleted file mode 100644
index 114532c..0000000
--- a/ZeroLevel.Qdrant/Models/Responces/PointResponse.cs
+++ /dev/null
@@ -1,16 +0,0 @@
-namespace ZeroLevel.Qdrant.Models.Responces
-{
- public sealed class PointsResponse
- {
- public Point[] result { get; set; }
- public string status { get; set; }
- public float time { get; set; }
- }
-
- public sealed class PointResponse
- {
- public Point result { get; set; }
- public string status { get; set; }
- public float time { get; set; }
- }
-}
diff --git a/ZeroLevel.Qdrant/Models/Responces/PointsOperationResponse.cs b/ZeroLevel.Qdrant/Models/Responces/PointsOperationResponse.cs
deleted file mode 100644
index b43df81..0000000
--- a/ZeroLevel.Qdrant/Models/Responces/PointsOperationResponse.cs
+++ /dev/null
@@ -1,16 +0,0 @@
-namespace ZeroLevel.Qdrant.Models.Responces
-{
- public sealed class PointsOperationResult
- {
- public long operation_id { get; set; }
- public string status { get; set; }
-
- }
-
- public sealed class PointsOperationResponse
- {
- public PointsOperationResult result { get; set; }
- public string status { get; set; }
- public float time { get; set; }
- }
-}
diff --git a/ZeroLevel.Qdrant/Models/Responces/ScrollResponse.cs b/ZeroLevel.Qdrant/Models/Responces/ScrollResponse.cs
deleted file mode 100644
index fb5b00e..0000000
--- a/ZeroLevel.Qdrant/Models/Responces/ScrollResponse.cs
+++ /dev/null
@@ -1,15 +0,0 @@
-namespace ZeroLevel.Qdrant.Models.Responces
-{
- public sealed class ScrollResult
- {
- public Point[] points { get; set; }
- public long? next_page_offset { get; set; }
- }
-
- public sealed class ScrollResponse
- {
- public ScrollResult result { get; set; }
- public string status { get; set; }
- public float time { get; set; }
- }
-}
diff --git a/ZeroLevel.Qdrant/Models/Responces/SearchResponse.cs b/ZeroLevel.Qdrant/Models/Responces/SearchResponse.cs
deleted file mode 100644
index 2d173aa..0000000
--- a/ZeroLevel.Qdrant/Models/Responces/SearchResponse.cs
+++ /dev/null
@@ -1,14 +0,0 @@
-namespace ZeroLevel.Qdrant.Models.Responces
-{
- public sealed class ScoredPoint
- {
- public long id { get; set; }
- public float score { get; set; }
- }
- public sealed class SearchResponse
- {
- public ScoredPoint[] result { get; set; }
- public string status { get; set; }
- public float time { get; set; }
- }
-}
diff --git a/ZeroLevel.Qdrant/QdrantClient.cs b/ZeroLevel.Qdrant/QdrantClient.cs
deleted file mode 100644
index 0f38890..0000000
--- a/ZeroLevel.Qdrant/QdrantClient.cs
+++ /dev/null
@@ -1,292 +0,0 @@
-using Newtonsoft.Json;
-using System;
-using System.Net;
-using System.Net.Http;
-using System.Text;
-using System.Threading.Tasks;
-using ZeroLevel.Models;
-using ZeroLevel.Qdrant.Models.Filters;
-using ZeroLevel.Qdrant.Models.Requests;
-using ZeroLevel.Qdrant.Models.Responces;
-
-namespace ZeroLevel.Qdrant
-{
- /*
- https://qdrant.github.io/qdrant/redoc/index.html#operation/search_points
- https://qdrant.tech/documentation/search/
- */
- ///
- /// Client for Qdrant API
- ///
- public class QdrantClient
- {
- private const int DEFAULT_OPERATION_TIMEOUT_S = 30;
- private HttpClient CreateClient()
- {
- var handler = new HttpClientHandler
- {
- ServerCertificateCustomValidationCallback = (sender, cert, chain, sslPolicyErrors) => { return true; }
- };
- handler.DefaultProxyCredentials = CredentialCache.DefaultCredentials;
- return new HttpClient(handler)
- {
- BaseAddress = _serverUri,
- Timeout = TimeSpan.FromMinutes(5)
- };
- }
- private readonly Uri _serverUri;
- public QdrantClient(string host = "localhost", int port = 6333)
- {
- _serverUri = new Uri($"{host}:{port}");
- }
-
- #region API
-
- #region Collection https://qdrant.github.io/qdrant/redoc/index.html#tag/collections
- ///
- /// Create collection
- ///
- /// Collection name
- /// Cosine or Dot or Euclid
- /// Count of elements in vectors
- ///
- public async Task> CreateCollection(string name, string distance, int vector_size, bool? on_disk_payload)
- {
- try
- {
- var collection = new CreateCollectionReqeust(distance, vector_size, on_disk_payload);
- var json = JsonConvert.SerializeObject(collection);
- var data = new StringContent(json, Encoding.UTF8, "application/json");
- var url = $"/collections/{name}";
-
- var response = await _request(url, new HttpMethod("PUT"), data);
- return InvokeResult.Succeeding(response);
- }
- catch (Exception ex)
- {
- Log.Error(ex, $"[QdrantClient.CreateCollection] Name: {name}. Distance: {distance}. Vector size: {vector_size}");
- return InvokeResult.Fault($"[QdrantClient.CreateCollection] Name: {name}\r\n{ex.ToString()}");
- }
- }
- ///
- /// Delete collection by name
- ///
- /// Collection name
- public async Task> DeleteCollection(string name, int timeout = DEFAULT_OPERATION_TIMEOUT_S)
- {
- try
- {
- var url = $"/collections/{name}?timeout={timeout}";
- var response = await _request(url, new HttpMethod("DELETE"), null);
- return InvokeResult.Succeeding(response);
- }
- catch (Exception ex)
- {
- Log.Error(ex, $"[QdrantClient.DeleteCollection] Name: {name}.");
- return InvokeResult.Fault($"[QdrantClient.DeleteCollection] Name: {name}\r\n{ex.ToString()}");
- }
- }
- #endregion
-
- #region Indexes https://qdrant.tech/documentation/indexing/
- ///
- /// For indexing, it is recommended to choose the field that limits the search result the most. As a rule, the more different values a payload value has, the more efficient the index will be used. You should not create an index for Boolean fields and fields with only a few possible values.
- ///
- public async Task> CreateIndex(string collection_name, string field_name, IndexFieldType field_type)
- {
- try
- {
- var index = new CreateIndexRequest(field_name, field_type);
- var json = JsonConvert.SerializeObject(index);
- var data = new StringContent(json, Encoding.UTF8, "application/json");
- var url = $"/collections/{collection_name}/index";
-
- var response = await _request(url, new HttpMethod("PUT"), data);
- return InvokeResult.Succeeding(response);
- }
- catch (Exception ex)
- {
- Log.Error(ex, $"[QdrantClient.CreateIndex] Collection name: {collection_name}. Field name: {field_name}");
- return InvokeResult.Fault($"[QdrantClient.CreateIndex] Collection name: {collection_name}. Field name: {field_name}\r\n{ex.ToString()}");
- }
- }
- #endregion
-
- #region Search https://qdrant.tech/documentation/search/
- ///
- /// Searching for the nearest vectors
- ///
- public async Task> Search(string collection_name, double[] vector, uint top, Filter filter = null)
- {
- try
- {
- var search = new SearchRequest { FloatVector = vector, Top = top, Filter = filter };
- var json = search.ToJson();
- var data = new StringContent(json, Encoding.UTF8, "application/json");
- var url = $"/collections/{collection_name}/points/search";
-
- var response = await _request(url, new HttpMethod("POST"), data);
- return InvokeResult.Succeeding(response);
- }
- catch (Exception ex)
- {
- Log.Error(ex, $"[QdrantClient.Search] Collection name: {collection_name}.");
- return InvokeResult.Fault($"[QdrantClient.Search] Collection name: {collection_name}.\r\n{ex.ToString()}");
- }
- }
- ///
- /// Searching for the nearest vectors
- ///
- public async Task> Search(string collection_name, long[] vector, uint top, Filter filter = null)
- {
- try
- {
- var search = new SearchRequest { IntegerVector = vector, Top = top, Filter = filter };
- var json = search.ToJson();
- var data = new StringContent(json, Encoding.UTF8, "application/json");
- var url = $"/collections/{collection_name}/points/search";
-
- var response = await _request(url, new HttpMethod("POST"), data);
- return InvokeResult.Succeeding(response);
- }
- catch (Exception ex)
- {
- Log.Error(ex, $"[QdrantClient.Search] Collection name: {collection_name}.");
- return InvokeResult.Fault($"[QdrantClient.Search] Collection name: {collection_name}.\r\n{ex.ToString()}");
- }
- }
- #endregion
-
- #region Points https://qdrant.tech/documentation/points/
- ///
- /// There is a method for retrieving points by their ids.
- ///
- public async Task> GetPoint(string collection_name, long id)
- {
- try
- {
- string url = $"/collections/{collection_name}/points/{id}";
- var response = await _request(url, new HttpMethod("GET"), null);
- return InvokeResult.Succeeding(response);
- }
- catch (Exception ex)
- {
- Log.Error(ex, $"[QdrantClient.Points] Collection name: {collection_name}.");
- return InvokeResult.Fault($"[QdrantClient.GetPoint] Collection name: {collection_name}. Point ID: {id}\r\n{ex.ToString()}");
- }
- }
-
- ///
- /// There is a method for retrieving points by their ids.
- ///
- public async Task> GetPoints(string collection_name, long[] ids)
- {
- try
- {
- var points = new PointsRequest { ids = ids };
- var json = JsonConvert.SerializeObject(points);
- var data = new StringContent(json, Encoding.UTF8, "application/json");
- string url = $"/collections/{collection_name}/points";
- var response = await _request(url, new HttpMethod("POST"), data);
- return InvokeResult.Succeeding(response);
- }
- catch (Exception ex)
- {
- Log.Error(ex, $"[QdrantClient.Points] Collection name: {collection_name}.");
- return InvokeResult.Fault($"[QdrantClient.GetPoints] Collection name: {collection_name}.\r\n{ex.ToString()}");
- }
- }
-
- ///
- /// There is a method for retrieving points by their ids.
- ///
- public async Task> Scroll(string collection_name, Filter filter, long limit, long offset = 0, bool with_vector = true, bool with_payload = true)
- {
- try
- {
- var scroll = new ScrollRequest { Filter = filter, Limit = limit, Offset = offset, WithPayload = with_payload, WithVector = with_vector };
- var json = scroll.ToJson();
- var data = new StringContent(json, Encoding.UTF8, "application/json");
- string url = url = $"/collections/{collection_name}/points/scroll";
-
- var response = await _request(url, new HttpMethod("POST"), data);
- return InvokeResult.Succeeding(response);
- }
- catch (Exception ex)
- {
- Log.Error(ex, $"[QdrantClient.Scroll] Collection name: {collection_name}.");
- return InvokeResult.Fault($"[QdrantClient.Scroll] Collection name: {collection_name}.\r\n{ex.ToString()}");
- }
- }
-
-
- ///
- /// Record-oriented of creating batches
- ///
- public async Task> UpsertPoints(string collection_name, PointsUpsertRequest points)
- {
- try
- {
- var json = points.ToJSON();
- var data = new StringContent(json, Encoding.UTF8, "application/json");
- var url = $"/collections/{collection_name}/points";
- var response = await _request(url, new HttpMethod("PUT"), data);
- return InvokeResult.Succeeding(response);
- }
- catch (Exception ex)
- {
- Log.Error(ex, $"[QdrantClient.UpsertPoints] Collection name: {collection_name}.");
- return InvokeResult.Fault($"[QdrantClient.UpsertPoints] Collection name: {collection_name}.\r\n{ex.ToString()}");
- }
- }
- ///
- /// Delete points by their ids.
- ///
- public async Task> DeletePoints(string collection_name, long[] ids)
- {
- try
- {
- var points = new DeletePointsRequest { delete_points = new DeletePoints { ids = ids } };
- var json = JsonConvert.SerializeObject(points);
- var data = new StringContent(json, Encoding.UTF8, "application/json");
- var url = $"/collections/{collection_name}";
-
- var response = await _request(url, new HttpMethod("POST"), data);
- return InvokeResult.Succeeding(response);
- }
- catch (Exception ex)
- {
- Log.Error(ex, $"[QdrantClient.DeleteCollection] Name: {collection_name}.");
- return InvokeResult.Fault($"[QdrantClient.DeleteCollection] Name: {collection_name}\r\n{ex.ToString()}");
- }
- }
- #endregion
-
- #endregion
-
- #region Private
- private async Task _request(string url, HttpMethod method, HttpContent content = null)
- {
- var json = await _request(url, method, content);
- return JsonConvert.DeserializeObject(json);
- }
-
- private async Task _request(string url, HttpMethod method, HttpContent content = null)
- {
- var fullUrl = new Uri(_serverUri, url);
- var message = new HttpRequestMessage(method, fullUrl) { Content = content };
- using (var client = CreateClient())
- {
- var response = await client.SendAsync(message);
- var result = await response.Content.ReadAsStringAsync();
- var jsonPrint = result?.Length >= 5000 ? "" : result;
- if (response.IsSuccessStatusCode == false)
- {
- throw new Exception($"Not SuccessStatusCode {method} {fullUrl}. Status: {response.StatusCode} {response.ReasonPhrase}. Content: {jsonPrint}");
- }
- return result;
- }
- }
- #endregion
- }
-}
diff --git a/ZeroLevel.Qdrant/Services/QdrantJsonConverter.cs b/ZeroLevel.Qdrant/Services/QdrantJsonConverter.cs
deleted file mode 100644
index cf2cb56..0000000
--- a/ZeroLevel.Qdrant/Services/QdrantJsonConverter.cs
+++ /dev/null
@@ -1,173 +0,0 @@
-using System;
-using System.Collections;
-using System.Collections.Generic;
-using System.Linq;
-using System.Text;
-using ZeroLevel.Qdrant.DataAttributes;
-using ZeroLevel.Qdrant.Models;
-using ZeroLevel.Services.ObjectMapping;
-using ZeroLevel.Services.Reflection;
-using ZeroLevel.Services.Serialization;
-
-namespace ZeroLevel.Qdrant.Services
-{
- public static class QdrantJsonConverter
- {
- private static string KeywordToString(IMemberInfo member, object v)
- {
- var text = TypeHelpers.IsString(member.ClrType) ? v as string : v.ToString();
- if (string.IsNullOrEmpty(text))
- {
- return "null";
- }
- else
- {
- return $"\"{JsonEscaper.EscapeString(text)}\"";
- }
- }
-
- /*
- integer - 64-bit integer in the range -9223372036854775808 to 9223372036854775807. array of long
- float - 64-bit floating point number. array of double
- keyword - string value. array of strings
- geo - Geographical coordinates. Example: { "lon": 52.5200, "lat": 13.4050 } array of lon&lat of double
- */
- private const string KYEWORD_TYPE = "keyword";
- private const string GEO_TYPE = "geo";
- private const string FLOAT_TYPE = "float";
- private const string INTEGER_TYPE = "integer";
-
- public static string ConvertToJson(float[] vector)
- {
- return "[" + string.Join(", ", vector.Select(f => f.ConvertToString())) + "]";
- }
- public static string ConvertToJson(T value)
- {
- var json = new StringBuilder();
-
- var map = TypeMapper.Create();
- foreach (var member in map.Members)
- {
- var val = member.Getter(value);
- var type = KYEWORD_TYPE;
-
- var attributes = member.Original.GetCustomAttributes(typeof(QdrantAttribute), true);
- if (attributes != null && attributes.Any())
- {
- var dataAttribute = attributes[0];
- if (dataAttribute is KeywordAttribute)
- {
- type = KYEWORD_TYPE;
- }
- else if (dataAttribute is FloatAttribute)
- {
- type = FLOAT_TYPE;
- }
- else if (dataAttribute is IntegerAttribute)
- {
- type = INTEGER_TYPE;
- }
- else if (dataAttribute is GeoAttribute)
- {
- type = GEO_TYPE;
- }
- }
- else
- {
- var item_type = member.ClrType;
- // autodetect type
- if (TypeHelpers.IsArray(item_type))
- {
- item_type = item_type.GetElementType();
- }
- else if (TypeHelpers.IsEnumerable(item_type))
- {
- item_type = TypeHelpers.GetElementTypeOfEnumerable(item_type);
- }
- if (item_type == typeof(float) || item_type == typeof(double) || item_type == typeof(decimal))
- {
- type = FLOAT_TYPE;
- }
- else if (item_type == typeof(int) || item_type == typeof(long) || item_type == typeof(byte) ||
- item_type == typeof(short) || item_type == typeof(uint) || item_type == typeof(ulong) ||
- item_type == typeof(ushort))
- {
- type = INTEGER_TYPE;
- }
- else if (item_type == typeof(Location))
- {
- type = GEO_TYPE;
- }
- }
- switch (type)
- {
- case KYEWORD_TYPE:
- if (TypeHelpers.IsEnumerable(member.ClrType) && TypeHelpers.IsString(member.ClrType) == false)
- {
- var arr = val as IEnumerable;
- json.Append($"\"{member.Name.ToLowerInvariant()}\": {{ \"type\": \"keyword\", \"value\": [ {string.Join(", ", E(arr).Select(v => KeywordToString(member, v)))}] }},");
- }
- else
- {
- json.Append($"\"{member.Name.ToLowerInvariant()}\": {{ \"type\": \"keyword\", \"value\":{KeywordToString(member, val)} }},");
- }
- break;
- case GEO_TYPE:
- if (TypeHelpers.IsEnumerable(member.ClrType) && TypeHelpers.IsString(member.ClrType) == false)
- {
- var arr = val as IEnumerable;
- json.Append($"\"{member.Name.ToLowerInvariant()}\": {{ \"type\": \"geo\", \"value\": [ {string.Join(",", E(arr).Select(v => v as Location).Where(l => l != null).Select(l => $" {{ \"lon\":{l.lon.ConvertToString()}, \"lat\":{l.lat.ConvertToString()} }}"))}] }},");
- }
- else
- {
- Location l = val as Location;
- if (l != null)
- {
- json.Append($"\"{member.Name.ToLowerInvariant()}\": {{ \"type\": \"geo\", \"value\": {{ \"lon\":{l.lon.ConvertToString()}, \"lat\":{l.lat.ConvertToString()} }} }},");
- }
- }
- break;
- case FLOAT_TYPE:
- if (TypeHelpers.IsEnumerable(member.ClrType) && TypeHelpers.IsString(member.ClrType) == false)
- {
- var arr = val as IEnumerable;
- json.Append($"\"{member.Name.ToLowerInvariant()}\": {{ \"type\": \"float\", \"value\": [ {string.Join(",", E(arr).Select(v => Convert.ToDouble(v).ConvertToString()))}] }},");
- }
- else
- {
-
- json.Append($"\"{member.Name.ToLowerInvariant()}\": {{ \"type\": \"float\", \"value\": {Convert.ToDouble(val).ConvertToString()} }},");
- }
- break;
- case INTEGER_TYPE:
- if (TypeHelpers.IsEnumerable(member.ClrType) && TypeHelpers.IsString(member.ClrType) == false)
- {
- var arr = val as IEnumerable;
- json.Append($"\"{member.Name.ToLowerInvariant()}\": {{ \"type\": \"integer\", \"value\": [ {string.Join(",", E(arr).Select(v => Convert.ToInt64(v)))}] }},");
- }
- else
- {
- json.Append($"\"{member.Name.ToLowerInvariant()}\": {{ \"type\": \"integer\", \"value\": {Convert.ToInt64(val)} }},");
- }
- break;
- }
- }
- if (json[json.Length - 1] == ',')
- {
- json.Remove(json.Length - 1, 1);
- }
- return json.ToString();
- }
-
- private static IEnumerable