1
0

feat: Introduce a pluggable LLM provider system with token extraction, pricing, and updated setup configuration.

This commit is contained in:
2026-03-05 22:02:22 +01:00
parent 4476cc7f15
commit c7e7976d9d
12 changed files with 499 additions and 28 deletions

View File

@@ -0,0 +1,42 @@
using System.Net.Http.Headers;
namespace AnchorCli.Providers;
/// <summary>
/// Token extractor for OpenRouter responses.
/// </summary>
internal sealed class OpenRouterTokenExtractor : ITokenExtractor
{
public string ProviderName => "OpenRouter";
public (int inputTokens, int outputTokens)? ExtractTokens(HttpResponseHeaders headers, string? responseBody)
{
// OpenRouter provides x-total-tokens header
if (headers.TryGetValues("x-total-tokens", out var values))
{
// Note: OpenRouter only provides total tokens, not split
// We'll estimate split based on typical ratios if needed
if (long.TryParse(values.FirstOrDefault(), out var total))
{
// For now, return total as output (placeholder until we have better splitting)
// In practice, you'd need to track input separately from the request
return (0, (int)total);
}
}
return null;
}
public int? ExtractLatency(HttpResponseHeaders headers)
{
if (headers.TryGetValues("x-response-timing", out var values))
{
if (int.TryParse(values.FirstOrDefault(), out var latency))
{
return latency;
}
}
return null;
}
}