Files
OpenQuery/Program.cs
TomiEckert b28d8998f7 feat: parallel async processing and compact output mode
Major performance improvements:
- Parallel search execution across all queries
- Parallel article fetching with 10 concurrent limit
- Parallel embeddings with rate limiting (4 concurrent)
- Polly integration for retry resilience

New features:
- Add -v/--verbose flag for detailed output
- Compact single-line status mode with braille spinner
- StatusReporter service for unified output handling
- Query generation and errors hidden in compact mode
- ANSI escape codes for clean line updates

New files:
- Services/RateLimiter.cs - Semaphore-based concurrency control
- Services/StatusReporter.cs - Verbose/compact output handler
- Models/ParallelOptions.cs - Parallel processing configuration

All changes maintain Native AOT compatibility.
2026-03-18 22:16:28 +01:00

194 lines
6.5 KiB
C#

using System.CommandLine;
using OpenQuery;
using OpenQuery.Models;
using OpenQuery.Services;
using OpenQuery.Tools;
var config = ConfigManager.Load();
var chunksOption = new Option<int>(
aliases: ["-c", "--chunks"],
getDefaultValue: () => config.DefaultChunks,
description: "Amount of top chunks to pass to the LLM overall"
);
var resultsOption = new Option<int>(
aliases: ["-r", "--results"],
getDefaultValue: () => config.DefaultResults,
description: "Amount of search results to choose from per query"
);
var queriesOption = new Option<int>(
aliases: ["-q", "--queries"],
getDefaultValue: () => config.DefaultQueries,
description: "Amount of search queries the LLM should generate before starting the searches"
);
var shortOption = new Option<bool>(
aliases: ["-s", "--short"],
description: "Give a very short concise answer"
);
var longOption = new Option<bool>(
aliases: ["-l", "--long"],
description: "Give a long elaborate detailed answer"
);
var verboseOption = new Option<bool>(
aliases: ["-v", "--verbose"],
description: "Show detailed progress information"
);
var questionArgument = new Argument<string[]>(
name: "question",
description: "The question to ask"
)
{
Arity = ArgumentArity.ZeroOrMore // Changed to ZeroOrMore so 'configure' works without error
};
var configureCommand = new Command("configure", "Configure OpenQuery settings");
var interactiveOption = new Option<bool>(["-i", "--interactive"], "Interactive configuration");
var keyOption = new Option<string>("--key", "Set API key");
var modelOption = new Option<string>("--model", "Set default model");
var defQueriesOption = new Option<int?>("--queries", "Set default queries");
var defChunksOption = new Option<int?>("--chunks", "Set default chunks");
var defResultsOption = new Option<int?>("--results", "Set default results");
configureCommand.AddOption(interactiveOption);
configureCommand.AddOption(keyOption);
configureCommand.AddOption(modelOption);
configureCommand.AddOption(defQueriesOption);
configureCommand.AddOption(defChunksOption);
configureCommand.AddOption(defResultsOption);
configureCommand.SetHandler((isInteractive, key, model, queries, chunks, results) =>
{
var cfg = ConfigManager.Load();
if (isInteractive)
{
Console.Write($"API Key [{cfg.ApiKey}]: ");
var k = Console.ReadLine();
if (!string.IsNullOrWhiteSpace(k)) cfg.ApiKey = k;
Console.WriteLine("Available models:");
Console.WriteLine("1. qwen/qwen3.5-flash-02-23");
Console.WriteLine("2. qwen/qwen3.5-122b-a10b");
Console.WriteLine("3. minimax/minimax-m2.5");
Console.WriteLine("4. google/gemini-3-flash-preview");
Console.WriteLine("5. deepseek/deepseek-v3.2");
Console.WriteLine("6. moonshotai/kimi-k2.5");
Console.Write($"Model [{cfg.Model}]: ");
var m = Console.ReadLine();
if (!string.IsNullOrWhiteSpace(m))
{
var models = new[] {
"qwen/qwen3.5-flash-02-23",
"qwen/qwen3.5-122b-a10b",
"minimax/minimax-m2.5",
"google/gemini-3-flash-preview",
"deepseek/deepseek-v3.2",
"moonshotai/kimi-k2.5"
};
if (int.TryParse(m, out var idx) && idx >= 1 && idx <= 6)
{
cfg.Model = models[idx - 1];
}
else
{
cfg.Model = m;
}
}
Console.Write($"Default Queries [{cfg.DefaultQueries}]: ");
var q = Console.ReadLine();
if (int.TryParse(q, out var qi)) cfg.DefaultQueries = qi;
Console.Write($"Default Chunks [{cfg.DefaultChunks}]: ");
var c = Console.ReadLine();
if (int.TryParse(c, out var ci)) cfg.DefaultChunks = ci;
Console.Write($"Default Results [{cfg.DefaultResults}]: ");
var r = Console.ReadLine();
if (int.TryParse(r, out var ri)) cfg.DefaultResults = ri;
}
else
{
cfg.ApiKey = key;
cfg.Model = model;
if (queries.HasValue) cfg.DefaultQueries = queries.Value;
if (chunks.HasValue) cfg.DefaultChunks = chunks.Value;
if (results.HasValue) cfg.DefaultResults = results.Value;
}
ConfigManager.Save(cfg);
Console.WriteLine("Configuration saved to " + Environment.GetFolderPath(Environment.SpecialFolder.UserProfile) + "/.config/openquery/config");
}, interactiveOption, keyOption, modelOption, defQueriesOption, defChunksOption, defResultsOption);
var rootCommand = new RootCommand("OpenQuery - AI powered search and answer")
{
chunksOption,
resultsOption,
queriesOption,
shortOption,
longOption,
verboseOption,
questionArgument,
configureCommand
};
rootCommand.SetHandler(async (chunks, results, queries, isShort, isLong, verbose, questionArgs) =>
{
var question = string.Join(" ", questionArgs);
if (string.IsNullOrWhiteSpace(question))
{
rootCommand.Invoke("--help");
return;
}
var options = new OpenQueryOptions(chunks, results, queries, isShort, isLong, verbose, question);
var apiKey = Environment.GetEnvironmentVariable("OPENROUTER_API_KEY");
if (string.IsNullOrEmpty(apiKey))
{
apiKey = config.ApiKey;
}
if (string.IsNullOrEmpty(apiKey))
{
Console.Error.WriteLine("Error: API Key is missing. Set OPENROUTER_API_KEY environment variable or run 'configure -i' to set it up.");
Environment.Exit(1);
}
var model = Environment.GetEnvironmentVariable("OPENROUTER_MODEL");
if (string.IsNullOrEmpty(model))
{
model = config.Model;
}
var searxngUrl = Environment.GetEnvironmentVariable("SEARXNG_URL") ?? "http://localhost:8002";
var client = new OpenRouterClient(apiKey);
var searxngClient = new SearxngClient(searxngUrl);
var embeddingService = new EmbeddingService(client);
var searchTool = new SearchTool(searxngClient, embeddingService);
try
{
var openQuery = new OpenQueryApp(client, searchTool, model);
await openQuery.RunAsync(options);
}
catch (HttpRequestException ex)
{
Console.Error.WriteLine($"\n[Error] Network request failed. Details: {ex.Message}");
Environment.Exit(1);
}
catch (Exception ex)
{
Console.Error.WriteLine($"\n[Error] An unexpected error occurred: {ex.Message}");
Environment.Exit(1);
}
}, chunksOption, resultsOption, queriesOption, shortOption, longOption, verboseOption, questionArgument);
return await rootCommand.InvokeAsync(args);