1
0

initial commit

This commit is contained in:
2026-03-04 07:59:35 +01:00
commit 3ceb0e4884
27 changed files with 2280 additions and 0 deletions

48
OpenRouter/ModelInfo.cs Normal file
View File

@@ -0,0 +1,48 @@
using System.Text.Json.Serialization;
namespace AnchorCli.OpenRouter;
/// <summary>
/// Represents the response from OpenRouter's /api/v1/models endpoint.
/// </summary>
internal sealed class ModelsResponse
{
[JsonPropertyName("data")]
public List<ModelInfo> Data { get; set; } = [];
}
/// <summary>
/// A single model entry from the OpenRouter API.
/// </summary>
internal sealed class ModelInfo
{
[JsonPropertyName("id")]
public string Id { get; set; } = "";
[JsonPropertyName("name")]
public string Name { get; set; } = "";
[JsonPropertyName("pricing")]
public ModelPricing? Pricing { get; set; }
[JsonPropertyName("context_length")]
public int ContextLength { get; set; }
}
/// <summary>
/// Pricing info for a model. All values are USD per token (as strings).
/// </summary>
internal sealed class ModelPricing
{
/// <summary>USD per input token.</summary>
[JsonPropertyName("prompt")]
public string Prompt { get; set; } = "0";
/// <summary>USD per output token.</summary>
[JsonPropertyName("completion")]
public string Completion { get; set; } = "0";
/// <summary>Fixed USD cost per API request.</summary>
[JsonPropertyName("request")]
public string Request { get; set; } = "0";
}

View File

@@ -0,0 +1,52 @@
using System.Globalization;
using System.Net.Http.Json;
using System.Text.Json;
namespace AnchorCli.OpenRouter;
/// <summary>
/// Fetches and caches model pricing from the OpenRouter API.
/// </summary>
internal sealed class PricingProvider
{
private const string ModelsUrl = "https://openrouter.ai/api/v1/models";
private static readonly HttpClient Http = new();
private Dictionary<string, ModelInfo>? _models;
/// <summary>
/// Fetches the full model list from OpenRouter (cached after first call).
/// </summary>
public async Task<Dictionary<string, ModelInfo>> GetAllModelsAsync(
CancellationToken ct = default)
{
if (_models != null) return _models;
var response = await Http.GetAsync(ModelsUrl, ct);
response.EnsureSuccessStatusCode();
var json = await response.Content.ReadAsStringAsync(ct);
var result = JsonSerializer.Deserialize(json, AppJsonContext.Default.ModelsResponse);
_models = result?.Data?.ToDictionary(m => m.Id) ?? [];
return _models;
}
/// <summary>
/// Looks up pricing for a specific model ID. Returns null if not found.
/// </summary>
public async Task<ModelInfo?> GetModelInfoAsync(
string modelId, CancellationToken ct = default)
{
var models = await GetAllModelsAsync(ct);
return models.GetValueOrDefault(modelId);
}
/// <summary>
/// Parses a pricing string (USD per token) to decimal. Returns 0 on failure.
/// </summary>
public static decimal ParsePrice(string? priceStr) =>
decimal.TryParse(priceStr, NumberStyles.Float, CultureInfo.InvariantCulture, out var v)
? v
: 0m;
}

View File

@@ -0,0 +1,80 @@
namespace AnchorCli.OpenRouter;
/// <summary>
/// Tracks token usage and calculates costs for the session.
/// </summary>
internal sealed class TokenTracker
{
public long SessionInputTokens { get; private set; }
public long SessionOutputTokens { get; private set; }
public int RequestCount { get; private set; }
/// <summary>Maximum context window for the model (tokens). 0 = unknown.</summary>
public int ContextLength { get; set; }
/// <summary>Input tokens from the most recent API response — approximates current context size.</summary>
public int LastInputTokens { get; private set; }
/// <summary>USD per input token.</summary>
public decimal InputPrice { get; set; }
/// <summary>USD per output token.</summary>
public decimal OutputPrice { get; set; }
/// <summary>Fixed USD per API request.</summary>
public decimal RequestPrice { get; set; }
/// <summary>
/// Record usage from one response (may span multiple LLM rounds).
/// </summary>
public void AddUsage(int inputTokens, int outputTokens)
{
SessionInputTokens += inputTokens;
SessionOutputTokens += outputTokens;
LastInputTokens = inputTokens;
RequestCount++;
}
/// <summary>
/// Returns true if the context is getting too large and should be compacted.
/// Triggers at min(75% of model context, 100K tokens).
/// </summary>
public bool ShouldCompact()
{
if (LastInputTokens <= 0) return false;
int threshold = ContextLength > 0
? Math.Min((int)(ContextLength * 0.75), 100_000)
: 100_000;
return LastInputTokens >= threshold;
}
/// <summary>Context usage as a percentage (0-100). Returns -1 if context length is unknown.</summary>
public double ContextUsagePercent =>
ContextLength > 0 && LastInputTokens > 0
? (double)LastInputTokens / ContextLength * 100.0
: -1;
/// <summary>
/// Calculate cost for a single response.
/// </summary>
public decimal CalculateCost(int inputTokens, int outputTokens) =>
inputTokens * InputPrice +
outputTokens * OutputPrice +
RequestPrice;
/// <summary>
/// Total session cost.
/// </summary>
public decimal SessionCost =>
SessionInputTokens * InputPrice +
SessionOutputTokens * OutputPrice +
RequestCount * RequestPrice;
public static string FormatTokens(long count) =>
count >= 1_000 ? $"{count / 1_000.0:F1}k" : count.ToString("N0");
public static string FormatCost(decimal cost) =>
cost < 0.01m ? $"${cost:F4}" : $"${cost:F2}";
}