feat: Introduce a pluggable LLM provider system with token extraction, pricing, and updated setup configuration.
This commit is contained in:
39
Providers/OllamaTokenExtractor.cs
Normal file
39
Providers/OllamaTokenExtractor.cs
Normal file
@@ -0,0 +1,39 @@
|
||||
using System.Net.Http.Headers;
|
||||
|
||||
namespace AnchorCli.Providers;
|
||||
|
||||
/// <summary>
|
||||
/// Token extractor for Ollama responses.
|
||||
/// Ollama doesn't provide official token counts, so we estimate.
|
||||
/// </summary>
|
||||
internal sealed class OllamaTokenExtractor : ITokenExtractor
|
||||
{
|
||||
public string ProviderName => "Ollama";
|
||||
|
||||
public (int inputTokens, int outputTokens)? ExtractTokens(HttpResponseHeaders headers, string? responseBody)
|
||||
{
|
||||
// Ollama doesn't provide token headers
|
||||
return null;
|
||||
}
|
||||
|
||||
public int? ExtractLatency(HttpResponseHeaders headers)
|
||||
{
|
||||
// Ollama doesn't provide latency headers
|
||||
return null;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Estimates token count from text length (rough approximation).
|
||||
/// Assumes ~4 characters per token on average.
|
||||
/// </summary>
|
||||
public static int EstimateTokens(string text)
|
||||
{
|
||||
if (string.IsNullOrEmpty(text))
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Rough estimate: 4 characters per token
|
||||
return text.Length / 4;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user