initial release

This commit is contained in:
2026-03-18 09:28:14 +01:00
commit 9d4bec7a17
18 changed files with 914 additions and 0 deletions

7
.gitignore vendored Normal file
View File

@@ -0,0 +1,7 @@
bin
obj
.vs
.vscode
.anchor
.crush
.idea

63
ConfigManager.cs Normal file
View File

@@ -0,0 +1,63 @@
namespace OpenQuery;
public class AppConfig
{
public string ApiKey { get; set; } = "";
public string Model { get; set; } = "qwen/qwen3.5-flash-02-23";
public int DefaultQueries { get; set; } = 3;
public int DefaultChunks { get; set; } = 3;
public int DefaultResults { get; set; } = 5;
}
public static class ConfigManager
{
private static string GetConfigPath()
{
var home = Environment.GetFolderPath(Environment.SpecialFolder.UserProfile);
var configDir = Path.Combine(home, ".config", "openquery");
if (!Directory.Exists(configDir))
{
Directory.CreateDirectory(configDir);
}
return Path.Combine(configDir, "config");
}
public static AppConfig Load()
{
var config = new AppConfig();
var path = GetConfigPath();
if (File.Exists(path))
{
var lines = File.ReadAllLines(path);
foreach (var line in lines)
{
var parts = line.Split('=', 2);
if (parts.Length == 2)
{
var key = parts[0].Trim();
var val = parts[1].Trim();
if (key == "ApiKey") config.ApiKey = val;
if (key == "Model") config.Model = val;
if (key == "DefaultQueries" && int.TryParse(val, out var q)) config.DefaultQueries = q;
if (key == "DefaultChunks" && int.TryParse(val, out var c)) config.DefaultChunks = c;
if (key == "DefaultResults" && int.TryParse(val, out var r)) config.DefaultResults = r;
}
}
}
return config;
}
public static void Save(AppConfig config)
{
var path = GetConfigPath();
var lines = new List<string>
{
$"ApiKey={config.ApiKey}",
$"Model={config.Model}",
$"DefaultQueries={config.DefaultQueries}",
$"DefaultChunks={config.DefaultChunks}",
$"DefaultResults={config.DefaultResults}"
};
File.WriteAllLines(path, lines);
}
}

11
Models/Chunk.cs Normal file
View File

@@ -0,0 +1,11 @@
namespace OpenQuery.Models;
public record Chunk(
string Content,
string SourceUrl,
string? Title = null
)
{
public float[]? Embedding { get; set; }
public float Score { get; set; }
}

15
Models/JsonContexts.cs Normal file
View File

@@ -0,0 +1,15 @@
using System.Text.Json.Serialization;
using OpenQuery.Services;
namespace OpenQuery.Models;
[JsonSerializable(typeof(ChatCompletionRequest))]
[JsonSerializable(typeof(ChatCompletionResponse))]
[JsonSerializable(typeof(ChatCompletionChunk))]
[JsonSerializable(typeof(EmbeddingRequest))]
[JsonSerializable(typeof(EmbeddingResponse))]
[JsonSerializable(typeof(SearxngRoot))]
[JsonSerializable(typeof(List<string>))]
internal partial class AppJsonContext : JsonSerializerContext
{
}

View File

@@ -0,0 +1,10 @@
namespace OpenQuery.Models;
public record OpenQueryOptions(
int Chunks,
int Results,
int Queries,
bool Short,
bool Long,
string Question
);

75
Models/OpenRouter.cs Normal file
View File

@@ -0,0 +1,75 @@
using System.Text.Json;
using System.Text.Json.Serialization;
namespace OpenQuery.Models;
public record ChatCompletionRequest(
[property: JsonPropertyName("model")] string Model,
[property: JsonPropertyName("messages")] List<Message> Messages,
[property: JsonPropertyName("tools")] List<ToolDefinition>? Tools = null,
[property: JsonPropertyName("stream")] bool Stream = false
);
public record Message(
[property: JsonPropertyName("role")] string Role,
[property: JsonPropertyName("content")] string? Content = null,
[property: JsonPropertyName("tool_calls")] List<ToolCall>? ToolCalls = null,
[property: JsonPropertyName("tool_call_id")] string? ToolCallId = null
)
{
public static Message FromTool(string content, string toolCallId) =>
new Message("tool", content, null, toolCallId);
}
public record ToolDefinition(
[property: JsonPropertyName("type")] string Type,
[property: JsonPropertyName("function")] ToolFunction Function
);
public record ToolFunction(
[property: JsonPropertyName("name")] string Name,
[property: JsonPropertyName("description")] string Description,
[property: JsonPropertyName("parameters")] JsonElement Parameters
);
public record ToolCall(
[property: JsonPropertyName("id")] string Id,
[property: JsonPropertyName("type")] string Type,
[property: JsonPropertyName("function")] FunctionCall Function
);
public record FunctionCall(
[property: JsonPropertyName("name")] string Name,
[property: JsonPropertyName("arguments")] string Arguments
);
public record ChatCompletionResponse(
[property: JsonPropertyName("choices")] List<Choice> Choices,
[property: JsonPropertyName("usage")] Usage? Usage = null
);
public record Choice(
[property: JsonPropertyName("message")] Message Message,
[property: JsonPropertyName("finish_reason")] string? FinishReason = null
);
public record Usage(
[property: JsonPropertyName("prompt_tokens")] int PromptTokens,
[property: JsonPropertyName("completion_tokens")] int CompletionTokens,
[property: JsonPropertyName("total_tokens")] int TotalTokens
);
public record EmbeddingRequest(
[property: JsonPropertyName("model")] string Model,
[property: JsonPropertyName("input")] List<string> Input
);
public record EmbeddingResponse(
[property: JsonPropertyName("data")] List<EmbeddingData> Data,
[property: JsonPropertyName("usage")] Usage Usage
);
public record EmbeddingData(
[property: JsonPropertyName("embedding")] float[] Embedding,
[property: JsonPropertyName("index")] int Index
);

13
Models/Searxng.cs Normal file
View File

@@ -0,0 +1,13 @@
using System.Text.Json.Serialization;
namespace OpenQuery.Models;
public record SearxngRoot(
[property: JsonPropertyName("results")] List<SearxngResult> Results
);
public record SearxngResult(
[property: JsonPropertyName("title")] string Title,
[property: JsonPropertyName("url")] string Url,
[property: JsonPropertyName("content")] string Content
);

153
OpenQuery.cs Normal file
View File

@@ -0,0 +1,153 @@
using System.Text;
using System.Text.Json;
using System.Text.RegularExpressions;
using OpenQuery.Models;
using OpenQuery.Services;
using OpenQuery.Tools;
namespace OpenQuery;
public class OpenQueryApp
{
private readonly OpenRouterClient _client;
private readonly SearchTool _searchTool;
private readonly string _model;
private static readonly char[] Function = ['|', '/', '-', '\\'];
public OpenQueryApp(
OpenRouterClient client,
SearchTool searchTool,
string model)
{
_client = client;
_searchTool = searchTool;
_model = model;
}
public async Task RunAsync(OpenQueryOptions options)
{
var queries = new List<string> { options.Question };
if (options.Queries > 1)
{
Console.WriteLine($"[Generating {options.Queries} search queries based on your question...]");
var queryGenMessages = new List<Message>
{
new Message("system", """
You are an expert researcher. The user will ask a question. Your task is to generate optimal search queries to gather comprehensive information to answer this question.
Instructions:
1. Break down complex questions into diverse search queries.
2. Use synonyms and alternative phrasing to capture different sources.
3. Target different aspects of the question (e.g., specific entities, mechanisms, pros/cons, historical context).
Examples:
User: "What are the environmental impacts of electric cars compared to gas cars?"
Output: ["environmental impact of electric cars", "gas vs electric car carbon footprint", "EV battery production environmental cost", "lifecycle emissions electric vs gas vehicles"]
User: "How does the mRNA vaccine technology work?"
Output: ["how mRNA vaccines work", "mechanism of mRNA vaccination", "mRNA vaccine technology explained", "history of mRNA vaccines"]
CRITICAL: Your output MUST strictly be a valid JSON array of strings. Do not include any markdown formatting (like ```json), explanations, preambles, or other text. Just the raw JSON array.
"""),
new Message("user", $"Generate {options.Queries} distinct search queries for this question:\n{options.Question}")
};
try
{
var request = new ChatCompletionRequest(_model, queryGenMessages);
var response = await _client.CompleteAsync(request);
var content = response.Choices.FirstOrDefault()?.Message.Content;
if (!string.IsNullOrEmpty(content))
{
content = Regex.Replace(content, @"```json\s*|\s*```", "").Trim();
var generatedQueries = JsonSerializer.Deserialize(content, AppJsonContext.Default.ListString);
if (generatedQueries != null && generatedQueries.Count > 0)
{
queries = generatedQueries;
Console.WriteLine($"[Generated queries: {string.Join(", ", queries)}]");
}
}
}
catch (Exception ex)
{
Console.WriteLine($"[Failed to generate queries, falling back to original question. Error: {ex.Message}]");
}
}
var searchResult = await _searchTool.ExecuteAsync(options.Question, queries, options.Results, options.Chunks, msg => Console.WriteLine(msg));
Console.WriteLine();
var systemPrompt = "You are a helpful AI assistant. Answer the user's question in depth, based on the provided context. Be precise and accurate. You can mention sources or citations.";
if (options.Short)
systemPrompt += " Give a very short concise answer.";
if (options.Long)
systemPrompt += " Give a long elaborate detailed answer.";
var messages = new List<Message>
{
new Message("system", systemPrompt),
new Message("user", $"Context:\n{searchResult}\n\nQuestion: {options.Question}")
};
var requestStream = new ChatCompletionRequest(_model, messages);
var assistantResponse = new StringBuilder();
var isFirstChunk = true;
Console.Write("[Sending request to AI model...] ");
using var cts = new CancellationTokenSource();
var spinnerTask = Task.Run(async () =>
{
var spinner = Function;
var index = 0;
while (cts is { Token.IsCancellationRequested: false })
{
if (Console.CursorLeft > 0)
{
Console.Write(spinner[index++ % spinner.Length]);
Console.SetCursorPosition(Console.CursorLeft - 1, Console.CursorTop);
}
try
{
await Task.Delay(100, cts.Token);
}
catch (TaskCanceledException)
{
break;
}
}
}, cts.Token);
try
{
await foreach (var chunk in _client.StreamAsync(requestStream, cts.Token))
{
if (chunk.TextDelta == null) continue;
if (isFirstChunk)
{
await cts.CancelAsync();
await spinnerTask;
Console.WriteLine();
Console.Write("Assistant: ");
isFirstChunk = false;
}
Console.Write(chunk.TextDelta);
assistantResponse.Append(chunk.TextDelta);
}
}
finally
{
if (!cts.IsCancellationRequested)
{
await cts.CancelAsync();
}
}
Console.WriteLine();
}
}

18
OpenQuery.csproj Normal file
View File

@@ -0,0 +1,18 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<PublishAot>true</PublishAot>
<InvariantGlobalization>true</InvariantGlobalization>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="SmartReader" Version="0.11.0" />
<PackageReference Include="System.CommandLine" Version="2.0.0-beta4.22272.1" />
<PackageReference Include="System.Numerics.Tensors" Version="9.0.0" />
</ItemGroup>
</Project>

188
Program.cs Normal file
View File

@@ -0,0 +1,188 @@
using System.CommandLine;
using OpenQuery;
using OpenQuery.Models;
using OpenQuery.Services;
using OpenQuery.Tools;
var config = ConfigManager.Load();
var chunksOption = new Option<int>(
aliases: ["-c", "--chunks"],
getDefaultValue: () => config.DefaultChunks,
description: "Amount of top chunks to pass to the LLM overall"
);
var resultsOption = new Option<int>(
aliases: ["-r", "--results"],
getDefaultValue: () => config.DefaultResults,
description: "Amount of search results to choose from per query"
);
var queriesOption = new Option<int>(
aliases: ["-q", "--queries"],
getDefaultValue: () => config.DefaultQueries,
description: "Amount of search queries the LLM should generate before starting the searches"
);
var shortOption = new Option<bool>(
aliases: ["-s", "--short"],
description: "Give a very short concise answer"
);
var longOption = new Option<bool>(
aliases: ["-l", "--long"],
description: "Give a long elaborate detailed answer"
);
var questionArgument = new Argument<string[]>(
name: "question",
description: "The question to ask"
)
{
Arity = ArgumentArity.ZeroOrMore // Changed to ZeroOrMore so 'configure' works without error
};
var configureCommand = new Command("configure", "Configure OpenQuery settings");
var interactiveOption = new Option<bool>(["-i", "--interactive"], "Interactive configuration");
var keyOption = new Option<string>("--key", "Set API key");
var modelOption = new Option<string>("--model", "Set default model");
var defQueriesOption = new Option<int?>("--queries", "Set default queries");
var defChunksOption = new Option<int?>("--chunks", "Set default chunks");
var defResultsOption = new Option<int?>("--results", "Set default results");
configureCommand.AddOption(interactiveOption);
configureCommand.AddOption(keyOption);
configureCommand.AddOption(modelOption);
configureCommand.AddOption(defQueriesOption);
configureCommand.AddOption(defChunksOption);
configureCommand.AddOption(defResultsOption);
configureCommand.SetHandler((isInteractive, key, model, queries, chunks, results) =>
{
var cfg = ConfigManager.Load();
if (isInteractive)
{
Console.Write($"API Key [{cfg.ApiKey}]: ");
var k = Console.ReadLine();
if (!string.IsNullOrWhiteSpace(k)) cfg.ApiKey = k;
Console.WriteLine("Available models:");
Console.WriteLine("1. qwen/qwen3.5-flash-02-23");
Console.WriteLine("2. qwen/qwen3.5-122b-a10b");
Console.WriteLine("3. minimax/minimax-m2.5");
Console.WriteLine("4. google/gemini-3-flash-preview");
Console.WriteLine("5. deepseek/deepseek-v3.2");
Console.WriteLine("6. moonshotai/kimi-k2.5");
Console.Write($"Model [{cfg.Model}]: ");
var m = Console.ReadLine();
if (!string.IsNullOrWhiteSpace(m))
{
var models = new[] {
"qwen/qwen3.5-flash-02-23",
"qwen/qwen3.5-122b-a10b",
"minimax/minimax-m2.5",
"google/gemini-3-flash-preview",
"deepseek/deepseek-v3.2",
"moonshotai/kimi-k2.5"
};
if (int.TryParse(m, out var idx) && idx >= 1 && idx <= 6)
{
cfg.Model = models[idx - 1];
}
else
{
cfg.Model = m;
}
}
Console.Write($"Default Queries [{cfg.DefaultQueries}]: ");
var q = Console.ReadLine();
if (int.TryParse(q, out var qi)) cfg.DefaultQueries = qi;
Console.Write($"Default Chunks [{cfg.DefaultChunks}]: ");
var c = Console.ReadLine();
if (int.TryParse(c, out var ci)) cfg.DefaultChunks = ci;
Console.Write($"Default Results [{cfg.DefaultResults}]: ");
var r = Console.ReadLine();
if (int.TryParse(r, out var ri)) cfg.DefaultResults = ri;
}
else
{
cfg.ApiKey = key;
cfg.Model = model;
if (queries.HasValue) cfg.DefaultQueries = queries.Value;
if (chunks.HasValue) cfg.DefaultChunks = chunks.Value;
if (results.HasValue) cfg.DefaultResults = results.Value;
}
ConfigManager.Save(cfg);
Console.WriteLine("Configuration saved to " + Environment.GetFolderPath(Environment.SpecialFolder.UserProfile) + "/.config/openquery/config");
}, interactiveOption, keyOption, modelOption, defQueriesOption, defChunksOption, defResultsOption);
var rootCommand = new RootCommand("OpenQuery - AI powered search and answer")
{
chunksOption,
resultsOption,
queriesOption,
shortOption,
longOption,
questionArgument,
configureCommand
};
rootCommand.SetHandler(async (chunks, results, queries, isShort, isLong, questionArgs) =>
{
var question = string.Join(" ", questionArgs);
if (string.IsNullOrWhiteSpace(question))
{
rootCommand.Invoke("--help");
return;
}
var options = new OpenQueryOptions(chunks, results, queries, isShort, isLong, question);
var apiKey = Environment.GetEnvironmentVariable("OPENROUTER_API_KEY");
if (string.IsNullOrEmpty(apiKey))
{
apiKey = config.ApiKey;
}
if (string.IsNullOrEmpty(apiKey))
{
Console.Error.WriteLine("Error: API Key is missing. Set OPENROUTER_API_KEY environment variable or run 'configure -i' to set it up.");
Environment.Exit(1);
}
var model = Environment.GetEnvironmentVariable("OPENROUTER_MODEL");
if (string.IsNullOrEmpty(model))
{
model = config.Model;
}
var searxngUrl = Environment.GetEnvironmentVariable("SEARXNG_URL") ?? "http://localhost:8002";
var client = new OpenRouterClient(apiKey);
var searxngClient = new SearxngClient(searxngUrl);
var embeddingService = new EmbeddingService(client);
var searchTool = new SearchTool(searxngClient, embeddingService);
try
{
var openQuery = new OpenQueryApp(client, searchTool, model);
await openQuery.RunAsync(options);
}
catch (HttpRequestException ex)
{
Console.Error.WriteLine($"\n[Error] Network request failed. Details: {ex.Message}");
Environment.Exit(1);
}
catch (Exception ex)
{
Console.Error.WriteLine($"\n[Error] An unexpected error occurred: {ex.Message}");
Environment.Exit(1);
}
}, chunksOption, resultsOption, queriesOption, shortOption, longOption, questionArgument);
return await rootCommand.InvokeAsync(args);

View File

@@ -0,0 +1,12 @@
using SmartReader;
namespace OpenQuery.Services;
public class ArticleService
{
public static async Task<Article> FetchArticleAsync(string url)
{
var article = await Reader.ParseArticleAsync(url);
return article;
}
}

View File

@@ -0,0 +1,32 @@
namespace OpenQuery.Services;
public static class ChunkingService
{
private const int MAX_CHUNK_SIZE = 500;
public static List<string> ChunkText(string text)
{
var chunks = new List<string>();
var start = 0;
while (start < text.Length)
{
var length = Math.Min(MAX_CHUNK_SIZE, text.Length - start);
if (start + length < text.Length)
{
var lastSpace = text.LastIndexOfAny([' ', '\n', '\r', '.', '!'], start + length, length);
if (lastSpace > start)
length = lastSpace - start + 1;
}
var chunk = text.Substring(start, length).Trim();
if (!string.IsNullOrEmpty(chunk))
chunks.Add(chunk);
start += length;
}
return chunks;
}
}

View File

@@ -0,0 +1,38 @@
using System.Numerics.Tensors;
namespace OpenQuery.Services;
public class EmbeddingService
{
private readonly OpenRouterClient _client;
private readonly string _embeddingModel;
public EmbeddingService(OpenRouterClient client, string embeddingModel = "openai/text-embedding-3-small")
{
_client = client;
_embeddingModel = embeddingModel;
}
public async Task<float[][]> GetEmbeddingsAsync(List<string> texts)
{
var results = new List<float[]>();
const int batchSize = 300;
for (var i = 0; i < texts.Count; i += batchSize)
{
if (texts.Count > batchSize)
Console.WriteLine(
$"[Generating {Math.Ceiling(i / (double)batchSize)}/{Math.Ceiling(texts.Count / (double)batchSize)} batch of embeddings]");
var batch = texts.Skip(i).Take(batchSize).ToList();
var batchResults = await _client.EmbedAsync(_embeddingModel, batch);
results.AddRange(batchResults);
}
return results.ToArray();
}
public static float CosineSimilarity(float[] vector1, float[] vector2)
{
return TensorPrimitives.CosineSimilarity(vector1, vector2);
}
}

View File

@@ -0,0 +1,123 @@
using System.Net.Http.Headers;
using System.Runtime.CompilerServices;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using OpenQuery.Models;
namespace OpenQuery.Services;
public class OpenRouterClient
{
private readonly HttpClient _httpClient;
private readonly string _apiKey;
private readonly string _baseUrl = "https://openrouter.ai/api/v1";
public OpenRouterClient(string apiKey)
{
_apiKey = apiKey;
_httpClient = new HttpClient();
_httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", apiKey);
_httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json"));
}
public async IAsyncEnumerable<StreamChunk> StreamAsync(ChatCompletionRequest request, [EnumeratorCancellation] CancellationToken cancellationToken = default)
{
request = request with { Stream = true };
var json = JsonSerializer.Serialize(request, AppJsonContext.Default.ChatCompletionRequest);
var content = new StringContent(json, Encoding.UTF8, new MediaTypeHeaderValue("application/json"));
var httpRequest = new HttpRequestMessage(HttpMethod.Post, $"{_baseUrl}/chat/completions")
{
Content = content
};
using var response = await _httpClient.SendAsync(httpRequest, HttpCompletionOption.ResponseHeadersRead, cancellationToken);
response.EnsureSuccessStatusCode();
await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken);
using var reader = new StreamReader(stream);
while (await reader.ReadLineAsync(cancellationToken) is { } line)
{
if (string.IsNullOrEmpty(line) || line.StartsWith($":"))
continue;
if (!line.StartsWith("data: ")) continue;
var data = line[6..];
if (data == "[DONE]")
yield break;
var chunk = JsonSerializer.Deserialize<ChatCompletionChunk>(data, AppJsonContext.Default.ChatCompletionChunk);
if (!(chunk?.Choices?.Count > 0)) continue;
var delta = chunk.Choices[0].Delta;
if (!string.IsNullOrEmpty(delta?.Content))
yield return new StreamChunk(delta.Content);
if (delta?.ToolCalls is not { Count: > 0 }) continue;
var toolCall = delta.ToolCalls[0];
yield return new StreamChunk(null, new ClientToolCall(
toolCall.Id,
toolCall.Function.Name,
toolCall.Function.Arguments
));
}
}
public async Task<ChatCompletionResponse> CompleteAsync(ChatCompletionRequest request)
{
request = request with { Stream = false };
var json = JsonSerializer.Serialize(request, AppJsonContext.Default.ChatCompletionRequest);
var content = new StringContent(json, Encoding.UTF8, new MediaTypeHeaderValue("application/json"));
var response = await _httpClient.PostAsync($"{_baseUrl}/chat/completions", content);
response.EnsureSuccessStatusCode();
var responseJson = await response.Content.ReadAsStringAsync();
return JsonSerializer.Deserialize<ChatCompletionResponse>(responseJson, AppJsonContext.Default.ChatCompletionResponse)!;
}
public async Task<float[][]> EmbedAsync(string model, List<string> inputs)
{
var request = new EmbeddingRequest(model, inputs);
var json = JsonSerializer.Serialize(request, AppJsonContext.Default.EmbeddingRequest);
var content = new StringContent(json, Encoding.UTF8, new MediaTypeHeaderValue("application/json"));
var response = await _httpClient.PostAsync($"{_baseUrl}/embeddings", content);
response.EnsureSuccessStatusCode();
var responseJson = await response.Content.ReadAsStringAsync();
var embeddingResponse = JsonSerializer.Deserialize<EmbeddingResponse>(responseJson, AppJsonContext.Default.EmbeddingResponse)!;
return embeddingResponse.Data
.OrderBy(d => d.Index)
.Select(d => d.Embedding)
.ToArray();
}
}
public record StreamChunk(
string? TextDelta = null,
ClientToolCall? Tool = null
);
public record ClientToolCall(
string ToolId,
string ToolName,
string Arguments
);
public record ChatCompletionChunk(
[property: JsonPropertyName("choices")] List<ChunkChoice> Choices
);
public record ChunkChoice(
[property: JsonPropertyName("delta")] ChunkDelta Delta
);
public record ChunkDelta(
[property: JsonPropertyName("content")] string? Content = null,
[property: JsonPropertyName("tool_calls")] List<ToolCall>? ToolCalls = null
);

30
Services/SearxngClient.cs Normal file
View File

@@ -0,0 +1,30 @@
using System.Text.Json;
using OpenQuery.Models;
namespace OpenQuery.Services;
public class SearxngClient
{
private readonly HttpClient _httpClient;
private readonly string _baseUrl;
public SearxngClient(string baseUrl)
{
_baseUrl = baseUrl.TrimEnd('/');
_httpClient = new HttpClient();
}
public async Task<List<SearxngResult>> SearchAsync(string query, int limit = 10)
{
var encodedQuery = Uri.EscapeDataString(query);
var url = $"{_baseUrl}/search?q={encodedQuery}&format=json";
var response = await _httpClient.GetAsync(url);
response.EnsureSuccessStatusCode();
var json = await response.Content.ReadAsStringAsync();
var results = JsonSerializer.Deserialize<SearxngRoot>(json, AppJsonContext.Default.SearxngRoot);
return results?.Results?.Take(limit).ToList() ?? [];
}
}

89
Tools/SearchTool.cs Normal file
View File

@@ -0,0 +1,89 @@
using OpenQuery.Models;
using OpenQuery.Services;
namespace OpenQuery.Tools;
public class SearchTool
{
private readonly SearxngClient _searxngClient;
private readonly EmbeddingService _embeddingService;
public static string Name => "search";
public static string Description => "Search the web for information on a topic";
public SearchTool(
SearxngClient searxngClient,
EmbeddingService embeddingService)
{
_searxngClient = searxngClient;
_embeddingService = embeddingService;
}
public async Task<string> ExecuteAsync(string originalQuery, List<string> generatedQueries, int maxResults, int topChunksLimit, Action<string>? onProgress = null)
{
var allResults = new List<SearxngResult>();
foreach (var query in generatedQueries)
{
onProgress?.Invoke($"[Searching web for '{query}'...]");
var results = await _searxngClient.SearchAsync(query, maxResults);
allResults.AddRange(results);
}
var uniqueResults = allResults.DistinctBy(r => r.Url).ToList();
if (uniqueResults.Count == 0)
return "No search results found.";
onProgress?.Invoke($"[Found {uniqueResults.Count} unique results across all queries. Fetching and reading articles...]");
var chunks = new List<Chunk>();
foreach (var result in uniqueResults)
{
try
{
var article = await ArticleService.FetchArticleAsync(result.Url);
if (!article.IsReadable || string.IsNullOrEmpty(article.TextContent)) continue;
var textChunks = ChunkingService.ChunkText(article.TextContent);
chunks.AddRange(textChunks.Select(chunkText => new Chunk(chunkText, result.Url, article.Title)));
}
catch
{
// ignored
}
}
if (chunks.Count == 0)
return "Found search results but could not extract readable content.";
onProgress?.Invoke($"[Extracted {chunks.Count} text chunks. Generating embeddings for semantic search...]");
var chunkTexts = chunks.Select(c => c.Content).ToList();
var embeddings = await _embeddingService.GetEmbeddingsAsync(chunkTexts);
for (var i = 0; i < chunks.Count; i++)
{
chunks[i] = chunks[i] with { Embedding = embeddings[i] };
}
var queryEmbedding = (await _embeddingService.GetEmbeddingsAsync([originalQuery]))[0];
foreach (var chunk in chunks)
{
chunk.Score = EmbeddingService.CosineSimilarity(queryEmbedding, chunk.Embedding!);
}
var topChunks = chunks.OrderByDescending(c => c.Score).Take(topChunksLimit).ToList();
onProgress?.Invoke($"[Found top {topChunks.Count} most relevant chunks overall. Generating answer...]");
var context = string.Join("\n\n", topChunks.Select((c, i) =>
$"[Source {i + 1}: {c.Title ?? "Unknown"}]({c.SourceUrl})\n{c.Content}"));
return context;
}
public static string Execute(string argumentsJson)
{
throw new InvalidOperationException("Use ExecuteAsync instead");
}
}

21
install.sh Executable file
View File

@@ -0,0 +1,21 @@
#!/bin/bash
# Exit on error
set -e
echo "Building OpenQuery with Native AOT..."
dotnet publish -c Release
BINARY_PATH="bin/Release/net10.0/linux-x64/publish/OpenQuery"
if [ ! -f "$BINARY_PATH" ]; then
echo "Error: Published binary not found at $BINARY_PATH"
echo "Please ensure the project builds successfully."
exit 1
fi
echo "Installing OpenQuery to /usr/bin/openquery..."
sudo cp "$BINARY_PATH" /usr/bin/openquery
sudo chmod +x /usr/bin/openquery
echo "OpenQuery installed successfully! You can now run it using the 'openquery' command."

16
uninstall.sh Executable file
View File

@@ -0,0 +1,16 @@
#!/bin/bash
# Exit on error
set -e
INSTALL_PATH="/usr/bin/openquery"
if [ ! -f "$INSTALL_PATH" ]; then
echo "OpenQuery is not installed at $INSTALL_PATH"
exit 0
fi
echo "Removing OpenQuery from $INSTALL_PATH..."
sudo rm "$INSTALL_PATH"
echo "OpenQuery uninstalled successfully."