1
0

feat: Introduce a pluggable LLM provider system with token extraction, pricing, and updated setup configuration.

This commit is contained in:
2026-03-05 22:02:22 +01:00
parent 4476cc7f15
commit c7e7976d9d
12 changed files with 499 additions and 28 deletions

View File

@@ -0,0 +1,40 @@
using System.Net.Http.Json;
using System.Text.Json;
using AnchorCli.OpenRouter;
namespace AnchorCli.Providers;
/// <summary>
/// Pricing provider for OpenRouter API.
/// </summary>
internal sealed class OpenRouterProvider : IPricingProvider
{
private const string ModelsUrl = "https://openrouter.ai/api/v1/models";
private static readonly HttpClient Http = new();
private Dictionary<string, ModelInfo>? _models;
static OpenRouterProvider()
{
OpenRouterHeaders.ApplyTo(Http);
}
public async Task<Dictionary<string, ModelInfo>> GetAllModelsAsync(CancellationToken ct = default)
{
if (_models != null) return _models;
var response = await Http.GetAsync(ModelsUrl, ct);
response.EnsureSuccessStatusCode();
var json = await response.Content.ReadAsStringAsync(ct);
var result = JsonSerializer.Deserialize(json, AppJsonContext.Default.ModelsResponse);
_models = result?.Data?.ToDictionary(m => m.Id) ?? [];
return _models;
}
public async Task<ModelInfo?> GetModelInfoAsync(string modelId, CancellationToken ct = default)
{
var models = await GetAllModelsAsync(ct);
return models.GetValueOrDefault(modelId);
}
}