diff --git a/AnchorConfig.cs b/AnchorConfig.cs
index 148783f..45ed0c6 100644
--- a/AnchorConfig.cs
+++ b/AnchorConfig.cs
@@ -7,7 +7,8 @@ internal sealed class AnchorConfig
{
public string ApiKey { get; set; } = "";
public string Model { get; set; } = "qwen/qwen3.5-397b-a17b";
-
+ public string Provider { get; set; } = "openrouter";
+ public string Endpoint { get; set; } = "https://openrouter.ai/api/v1";
// ── Persistence ──────────────────────────────────────────────────────
private static string ConfigPath =>
diff --git a/OpenRouter/TokenTracker.cs b/OpenRouter/TokenTracker.cs
index eaf1925..60caa62 100644
--- a/OpenRouter/TokenTracker.cs
+++ b/OpenRouter/TokenTracker.cs
@@ -5,6 +5,7 @@ namespace AnchorCli.OpenRouter;
///
internal sealed class TokenTracker
{
+ public string Provider { get; set; } = "Unknown";
public long SessionInputTokens { get; private set; }
public long SessionOutputTokens { get; private set; }
public int RequestCount { get; private set; }
@@ -23,7 +24,6 @@ internal sealed class TokenTracker
/// Fixed USD per API request.
public decimal RequestPrice { get; set; }
-
///
/// Record usage from one response (may span multiple LLM rounds).
///
diff --git a/Program.cs b/Program.cs
index 5f1b23a..85a7819 100644
--- a/Program.cs
+++ b/Program.cs
@@ -1,4 +1,5 @@
using System.ClientModel;
+using AnchorCli.Providers;
using Microsoft.Extensions.AI;
using OpenAI;
using AnchorCli;
@@ -15,10 +16,11 @@ if (args.Length > 0 && args[0].Equals("setup", StringComparison.OrdinalIgnoreCas
}
// ── Config ──────────────────────────────────────────────────────────────
-const string endpoint = "https://openrouter.ai/api/v1";
var cfg = AnchorConfig.Load();
string apiKey = cfg.ApiKey;
string model = cfg.Model;
+string provider = cfg.Provider ?? "openrouter";
+string endpoint = cfg.Endpoint ?? "https://openrouter.ai/api/v1";
if (string.IsNullOrWhiteSpace(apiKey))
{
@@ -26,28 +28,33 @@ if (string.IsNullOrWhiteSpace(apiKey))
return;
}
-// ── Fetch model pricing from OpenRouter ─────────────────────────────────
-var pricingProvider = new PricingProvider();
-var tokenTracker = new TokenTracker();
+// ── Create token extractor for this provider ───────────────────────────
+var tokenExtractor = ProviderFactory.CreateTokenExtractorForEndpoint(endpoint);
+var tokenTracker = new TokenTracker { Provider = tokenExtractor.ProviderName };
+// ── Fetch model pricing (only for supported providers) ─────────────────
ModelInfo? modelInfo = null;
-await AnsiConsole.Status()
- .Spinner(Spinner.Known.BouncingBar)
- .SpinnerStyle(Style.Parse("cornflowerblue"))
- .StartAsync("Fetching model pricing...", async ctx =>
- {
- try
+if (ProviderFactory.IsOpenRouter(endpoint))
+{
+ await AnsiConsole.Status()
+ .Spinner(Spinner.Known.BouncingBar)
+ .SpinnerStyle(Style.Parse("cornflowerblue"))
+ .StartAsync("Fetching model pricing...", async ctx =>
{
- modelInfo = await pricingProvider.GetModelInfoAsync(model);
- if (modelInfo?.Pricing != null)
+ try
{
- tokenTracker.InputPrice = PricingProvider.ParsePrice(modelInfo.Pricing.Prompt);
- tokenTracker.OutputPrice = PricingProvider.ParsePrice(modelInfo.Pricing.Completion);
- tokenTracker.RequestPrice = PricingProvider.ParsePrice(modelInfo.Pricing.Request);
+ var pricingProvider = new OpenRouterProvider();
+ modelInfo = await pricingProvider.GetModelInfoAsync(model);
+ if (modelInfo?.Pricing != null)
+ {
+ tokenTracker.InputPrice = PricingProvider.ParsePrice(modelInfo.Pricing.Prompt);
+ tokenTracker.OutputPrice = PricingProvider.ParsePrice(modelInfo.Pricing.Completion);
+ tokenTracker.RequestPrice = PricingProvider.ParsePrice(modelInfo.Pricing.Request);
+ }
}
- }
- catch { /* pricing is best-effort */ }
- });
+ catch { /* pricing is best-effort */ }
+ });
+}
// ── Pretty header ───────────────────────────────────────────────────────
AnsiConsole.Write(
@@ -68,9 +75,12 @@ var infoTable = new Table()
.AddColumn(new TableColumn("[dim]Value[/]"));
infoTable.AddRow("[grey]Model[/]", $"[cyan]{Markup.Escape(modelInfo?.Name ?? model)}[/]");
-infoTable.AddRow("[grey]Endpoint[/]", $"[blue]OpenRouter[/]");
+infoTable.AddRow("[grey]Provider[/]", $"[blue]{tokenExtractor.ProviderName}[/]");
+infoTable.AddRow("[grey]Endpoint[/]", $"[dim]{endpoint}[/]");
infoTable.AddRow("[grey]CWD[/]", $"[green]{Markup.Escape(Environment.CurrentDirectory)}[/]");
+if (modelInfo?.Pricing != null)
+
if (modelInfo?.Pricing != null)
{
var inM = tokenTracker.InputPrice * 1_000_000m;
diff --git a/Providers/GenericTokenExtractor.cs b/Providers/GenericTokenExtractor.cs
new file mode 100644
index 0000000..9330d40
--- /dev/null
+++ b/Providers/GenericTokenExtractor.cs
@@ -0,0 +1,89 @@
+using System.Net.Http.Headers;
+using System.Text.Json;
+
+namespace AnchorCli.Providers;
+
+///
+/// Generic token extractor for any OpenAI-compatible endpoint.
+/// Tries common header names and JSON body parsing.
+///
+internal sealed class GenericTokenExtractor : ITokenExtractor
+{
+ public string ProviderName => "Generic";
+
+ public (int inputTokens, int outputTokens)? ExtractTokens(HttpResponseHeaders headers, string? responseBody)
+ {
+ // Try various common header names
+ var headerNames = new[] {
+ "x-total-tokens",
+ "x-ai-response-tokens",
+ "x-tokens",
+ "x-prompt-tokens",
+ "x-completion-tokens"
+ };
+
+ foreach (var headerName in headerNames)
+ {
+ if (headers.TryGetValues(headerName, out var values))
+ {
+ if (int.TryParse(values.FirstOrDefault(), out var tokens))
+ {
+ // Assume all tokens are output if we can't determine split
+ return (0, tokens);
+ }
+ }
+ }
+
+ // Fallback: try parsing from response body JSON
+ if (!string.IsNullOrEmpty(responseBody))
+ {
+ try
+ {
+ using var doc = JsonDocument.Parse(responseBody);
+ var root = doc.RootElement;
+
+ // Try standard OpenAI format: usage.prompt_tokens, usage.completion_tokens
+ if (root.TryGetProperty("usage", out var usage))
+ {
+ var prompt = usage.TryGetProperty("prompt_tokens", out var p) ? p.GetInt32() : 0;
+ var completion = usage.TryGetProperty("completion_tokens", out var c) ? c.GetInt32() : 0;
+
+ if (prompt > 0 || completion > 0)
+ {
+ return (prompt, completion);
+ }
+ }
+ }
+ catch
+ {
+ // Ignore parsing errors
+ }
+ }
+
+ return null;
+ }
+
+ public int? ExtractLatency(HttpResponseHeaders headers)
+ {
+ // Try various common latency headers
+ var headerNames = new[] {
+ "x-response-time",
+ "x-response-timing",
+ "x-latency-ms",
+ "x-duration-ms"
+ };
+
+ foreach (var headerName in headerNames)
+ {
+ if (headers.TryGetValues(headerName, out var values))
+ {
+ if (int.TryParse(values.FirstOrDefault(), out var latency))
+ {
+ return latency;
+ }
+ }
+ }
+
+ return null;
+ }
+}
diff --git a/Providers/GroqProvider.cs b/Providers/GroqProvider.cs
new file mode 100644
index 0000000..c0517eb
--- /dev/null
+++ b/Providers/GroqProvider.cs
@@ -0,0 +1,61 @@
+using System.Net.Http.Headers;
+
+namespace AnchorCli.Providers;
+
+///
+/// Token extractor for Groq responses.
+///
+internal sealed class GroqTokenExtractor : ITokenExtractor
+{
+ public string ProviderName => "Groq";
+
+ public (int inputTokens, int outputTokens)? ExtractTokens(HttpResponseHeaders headers, string? responseBody)
+ {
+ // Groq provides x-groq-tokens header (format: "n;,n;")
+ if (headers.TryGetValues("x-groq-tokens", out var values))
+ {
+ var tokenStr = values.FirstOrDefault();
+ if (!string.IsNullOrEmpty(tokenStr))
+ {
+ // Parse format: "n;123,n;45" where first is prompt, second is completion
+ var parts = tokenStr.Split(',');
+ if (parts.Length >= 2)
+ {
+ var inputPart = parts[0].Trim();
+ var outputPart = parts[1].Trim();
+
+ // Extract numbers after "n;"
+ if (inputPart.StartsWith("n;") && outputPart.StartsWith("n;"))
+ {
+ if (int.TryParse(inputPart[2..], out var input) &&
+ int.TryParse(outputPart[2..], out var output))
+ {
+ return (input, output);
+ }
+ }
+ }
+ }
+ }
+
+ // Fallback: try parsing from response body
+ if (!string.IsNullOrEmpty(responseBody))
+ {
+ // TODO: Parse usage from JSON body if headers aren't available
+ }
+
+ return null;
+ }
+
+ public int? ExtractLatency(HttpResponseHeaders headers)
+ {
+ if (headers.TryGetValues("x-groq-response-time", out var values))
+ {
+ if (int.TryParse(values.FirstOrDefault(), out var latency))
+ {
+ return latency;
+ }
+ }
+
+ return null;
+ }
+}
diff --git a/Providers/IPricingProvider.cs b/Providers/IPricingProvider.cs
new file mode 100644
index 0000000..9976928
--- /dev/null
+++ b/Providers/IPricingProvider.cs
@@ -0,0 +1,18 @@
+using AnchorCli.OpenRouter;
+namespace AnchorCli.Providers;
+
+///
+/// Interface for fetching model pricing information.
+///
+internal interface IPricingProvider
+{
+ ///
+ /// Fetches pricing info for a specific model.
+ ///
+ Task GetModelInfoAsync(string modelId, CancellationToken ct = default);
+
+ ///
+ /// Fetches all available models with pricing.
+ ///
+ Task> GetAllModelsAsync(CancellationToken ct = default);
+}
diff --git a/Providers/ITokenExtractor.cs b/Providers/ITokenExtractor.cs
new file mode 100644
index 0000000..1a11b0c
--- /dev/null
+++ b/Providers/ITokenExtractor.cs
@@ -0,0 +1,25 @@
+using System.Net.Http.Headers;
+
+namespace AnchorCli.Providers;
+
+///
+/// Interface for extracting token usage from provider responses.
+///
+internal interface ITokenExtractor
+{
+ ///
+ /// Extracts token usage from response headers and/or body.
+ /// Returns (inputTokens, outputTokens) or null if unavailable.
+ ///
+ (int inputTokens, int outputTokens)? ExtractTokens(HttpResponseHeaders headers, string? responseBody);
+
+ ///
+ /// Gets the latency from response headers (in ms).
+ ///
+ int? ExtractLatency(HttpResponseHeaders headers);
+
+ ///
+ /// Gets the provider name for display purposes.
+ ///
+ string ProviderName { get; }
+}
diff --git a/Providers/OllamaTokenExtractor.cs b/Providers/OllamaTokenExtractor.cs
new file mode 100644
index 0000000..2b97c77
--- /dev/null
+++ b/Providers/OllamaTokenExtractor.cs
@@ -0,0 +1,39 @@
+using System.Net.Http.Headers;
+
+namespace AnchorCli.Providers;
+
+///
+/// Token extractor for Ollama responses.
+/// Ollama doesn't provide official token counts, so we estimate.
+///
+internal sealed class OllamaTokenExtractor : ITokenExtractor
+{
+ public string ProviderName => "Ollama";
+
+ public (int inputTokens, int outputTokens)? ExtractTokens(HttpResponseHeaders headers, string? responseBody)
+ {
+ // Ollama doesn't provide token headers
+ return null;
+ }
+
+ public int? ExtractLatency(HttpResponseHeaders headers)
+ {
+ // Ollama doesn't provide latency headers
+ return null;
+ }
+
+ ///
+ /// Estimates token count from text length (rough approximation).
+ /// Assumes ~4 characters per token on average.
+ ///
+ public static int EstimateTokens(string text)
+ {
+ if (string.IsNullOrEmpty(text))
+ {
+ return 0;
+ }
+
+ // Rough estimate: 4 characters per token
+ return text.Length / 4;
+ }
+}
diff --git a/Providers/OpenRouterProvider.cs b/Providers/OpenRouterProvider.cs
new file mode 100644
index 0000000..c942ec6
--- /dev/null
+++ b/Providers/OpenRouterProvider.cs
@@ -0,0 +1,40 @@
+using System.Net.Http.Json;
+using System.Text.Json;
+using AnchorCli.OpenRouter;
+
+namespace AnchorCli.Providers;
+
+///
+/// Pricing provider for OpenRouter API.
+///
+internal sealed class OpenRouterProvider : IPricingProvider
+{
+ private const string ModelsUrl = "https://openrouter.ai/api/v1/models";
+ private static readonly HttpClient Http = new();
+ private Dictionary? _models;
+
+ static OpenRouterProvider()
+ {
+ OpenRouterHeaders.ApplyTo(Http);
+ }
+
+ public async Task> GetAllModelsAsync(CancellationToken ct = default)
+ {
+ if (_models != null) return _models;
+
+ var response = await Http.GetAsync(ModelsUrl, ct);
+ response.EnsureSuccessStatusCode();
+
+ var json = await response.Content.ReadAsStringAsync(ct);
+ var result = JsonSerializer.Deserialize(json, AppJsonContext.Default.ModelsResponse);
+
+ _models = result?.Data?.ToDictionary(m => m.Id) ?? [];
+ return _models;
+ }
+
+ public async Task GetModelInfoAsync(string modelId, CancellationToken ct = default)
+ {
+ var models = await GetAllModelsAsync(ct);
+ return models.GetValueOrDefault(modelId);
+ }
+}
diff --git a/Providers/OpenRouterTokenExtractor.cs b/Providers/OpenRouterTokenExtractor.cs
new file mode 100644
index 0000000..daa34cd
--- /dev/null
+++ b/Providers/OpenRouterTokenExtractor.cs
@@ -0,0 +1,42 @@
+using System.Net.Http.Headers;
+
+namespace AnchorCli.Providers;
+
+///
+/// Token extractor for OpenRouter responses.
+///
+internal sealed class OpenRouterTokenExtractor : ITokenExtractor
+{
+ public string ProviderName => "OpenRouter";
+
+ public (int inputTokens, int outputTokens)? ExtractTokens(HttpResponseHeaders headers, string? responseBody)
+ {
+ // OpenRouter provides x-total-tokens header
+ if (headers.TryGetValues("x-total-tokens", out var values))
+ {
+ // Note: OpenRouter only provides total tokens, not split
+ // We'll estimate split based on typical ratios if needed
+ if (long.TryParse(values.FirstOrDefault(), out var total))
+ {
+ // For now, return total as output (placeholder until we have better splitting)
+ // In practice, you'd need to track input separately from the request
+ return (0, (int)total);
+ }
+ }
+
+ return null;
+ }
+
+ public int? ExtractLatency(HttpResponseHeaders headers)
+ {
+ if (headers.TryGetValues("x-response-timing", out var values))
+ {
+ if (int.TryParse(values.FirstOrDefault(), out var latency))
+ {
+ return latency;
+ }
+ }
+
+ return null;
+ }
+}
diff --git a/Providers/ProviderFactory.cs b/Providers/ProviderFactory.cs
new file mode 100644
index 0000000..c9b77e9
--- /dev/null
+++ b/Providers/ProviderFactory.cs
@@ -0,0 +1,70 @@
+namespace AnchorCli.Providers;
+
+///
+/// Factory for creating provider instances based on endpoint or provider name.
+///
+internal static class ProviderFactory
+{
+ ///
+ /// Creates a token extractor based on the provider name.
+ ///
+ public static ITokenExtractor CreateTokenExtractor(string providerName)
+ {
+ return providerName.ToLowerInvariant() switch
+ {
+ "openrouter" => new OpenRouterTokenExtractor(),
+ "groq" => new GroqTokenExtractor(),
+ "ollama" => new OllamaTokenExtractor(),
+ _ => new GenericTokenExtractor()
+ };
+ }
+
+ ///
+ /// Creates a token extractor by auto-detecting from the endpoint URL.
+ ///
+ public static ITokenExtractor CreateTokenExtractorForEndpoint(string endpoint)
+ {
+ if (string.IsNullOrEmpty(endpoint))
+ {
+ return new GenericTokenExtractor();
+ }
+
+ var url = endpoint.ToLowerInvariant();
+
+ if (url.Contains("openrouter"))
+ {
+ return new OpenRouterTokenExtractor();
+ }
+
+ if (url.Contains("groq"))
+ {
+ return new GroqTokenExtractor();
+ }
+
+ if (url.Contains("ollama") || url.Contains("localhost") || url.Contains("127.0.0.1"))
+ {
+ return new OllamaTokenExtractor();
+ }
+
+ return new GenericTokenExtractor();
+ }
+
+ ///
+ /// Creates a pricing provider based on the provider name.
+ /// Only OpenRouter has a pricing API currently.
+ ///
+ public static IPricingProvider? CreatePricingProvider(string providerName)
+ {
+ return providerName.ToLowerInvariant() switch
+ {
+ "openrouter" => new OpenRouterProvider(),
+ _ => null // Other providers don't have pricing APIs yet
+ };
+ }
+
+ ///
+ /// Determines if an endpoint is OpenRouter.
+ ///
+ public static bool IsOpenRouter(string endpoint) =>
+ !string.IsNullOrEmpty(endpoint) && endpoint.Contains("openrouter", StringComparison.OrdinalIgnoreCase);
+}
diff --git a/SetupTui.cs b/SetupTui.cs
index 6b9ed7d..9b81df4 100644
--- a/SetupTui.cs
+++ b/SetupTui.cs
@@ -27,17 +27,93 @@ internal static class SetupTui
AnsiConsole.WriteLine();
+ // ── Provider ────────────────────────────────────────────────────
+ var providers = new List<(string Value, string Description)>
+ {
+ ("openrouter", "default, pricing support"),
+ ("groq", "high-speed inference"),
+ ("ollama", "local, no auth required"),
+ ("openai", "official OpenAI API"),
+ ("custom", "generic OpenAI-compatible endpoint")
+ };
+
+ string currentProvider = config.Provider ?? "openrouter";
+ AnsiConsole.MarkupLine($" Current provider: [cyan]{Markup.Escape(currentProvider)}[/]");
+
+ var selectedProviderChoice = AnsiConsole.Prompt(
+ new SelectionPrompt<(string Value, string Description)>()
+ .Title(" Select a provider:")
+ .UseConverter(p => p.Value + (string.IsNullOrEmpty(p.Description) ? "" : $" [dim]({p.Description})[/]"))
+ .AddChoices(providers));
+
+ config.Provider = selectedProviderChoice.Value;
+
+
+
+
+ if (config.Provider == "custom")
+ {
+ string customEndpoint = AnsiConsole.Prompt(
+ new TextPrompt(" Enter endpoint URL:")
+ .DefaultValue(config.Endpoint)
+ .AllowEmpty());
+
+ if (!string.IsNullOrWhiteSpace(customEndpoint))
+ {
+ config.Endpoint = customEndpoint.Trim();
+ }
+ }
+ else
+ {
+ config.Endpoint = config.Provider.ToLowerInvariant() switch
+ {
+ "openrouter" => "https://openrouter.ai/api/v1",
+ "groq" => "https://api.groq.com/openai/v1",
+ "ollama" => "http://localhost:11434/v1",
+ "openai" => "https://api.openai.com/v1",
+ _ => config.Endpoint
+ };
+ }
+
+ AnsiConsole.WriteLine();
+
// ── Model ─────────────────────────────────────────────────────
AnsiConsole.MarkupLine($" Current model: [cyan]{Markup.Escape(config.Model)}[/]");
- var models = new List<(string Value, string Description)>
+ var models = config.Provider.ToLowerInvariant() switch
{
- ("qwen/qwen3.5-397b-a17b", "smart, expensive"),
- ("qwen/qwen3.5-122b-a10b", "faster"),
- ("qwen/qwen3.5-27b", "fast"),
- ("qwen/qwen3.5-flash-02-23", "cloud, fast"),
- ("qwen/qwen3.5-plus-02-15", "cloud, smart"),
- ("Custom...", "")
+ "groq" => new List<(string Value, string Description)>
+ {
+ ("llama-3.3-70b-versatile", "fast, powerful"),
+ ("llama-3.1-8b-instant", "very fast"),
+ ("mixtral-8x7b-32768", "sparse MoE"),
+ ("gemma2-9b-it", "Google's Gemma"),
+ ("Custom...", "")
+ },
+ "ollama" => new List<(string Value, string Description)>
+ {
+ ("llama3.2", "Meta's Llama 3.2"),
+ ("qwen2.5", "Alibaba Qwen"),
+ ("mistral", "Mistral AI"),
+ ("codellama", "code-focused"),
+ ("Custom...", "")
+ },
+ "openai" => new List<(string Value, string Description)>
+ {
+ ("gpt-4o", "most capable"),
+ ("gpt-4o-mini", "fast, affordable"),
+ ("o1-preview", "reasoning model"),
+ ("Custom...", "")
+ },
+ _ => new List<(string Value, string Description)>
+ {
+ ("qwen/qwen3.5-397b-a17b", "smart, expensive"),
+ ("qwen/qwen3.5-122b-a10b", "faster"),
+ ("qwen/qwen3.5-27b", "fast"),
+ ("qwen/qwen3.5-flash-02-23", "cloud, fast"),
+ ("qwen/qwen3.5-plus-02-15", "cloud, smart"),
+ ("Custom...", "")
+ }
};
string selectedModel = AnsiConsole.Prompt(