1
0

feat: Implement compact JSON serialization for history entries, improve history loading robustness, and add a new LLM model option.

This commit is contained in:
2026-02-28 14:35:30 +01:00
parent ac4ef78c02
commit dcb2ab3968
4 changed files with 23 additions and 7 deletions

View File

@@ -26,8 +26,8 @@ public static class OnboardCommand
config.LlmModel = AnsiConsole.Prompt( config.LlmModel = AnsiConsole.Prompt(
new SelectionPrompt<string>() new SelectionPrompt<string>()
.Title("Select [green]LLM Model[/]:") .Title("Select [green]LLM Model[/]:")
.AddChoices(new[] { "openai/gpt-oss-20b", "llama-3.1-8b-instant" }) .AddChoices(new[] { "openai/gpt-oss-20b", "llama-3.1-8b-instant", "llama-3.3-70b-versatile" })
.UseConverter(c => c == "openai/gpt-oss-20b" ? "openai/gpt-oss-20b (Fastest)" : "llama-3.1-8b-instant (Cheapest)")); .UseConverter(c => c == "openai/gpt-oss-20b" ? "openai/gpt-oss-20b (Fastest)" : c == "llama-3.1-8b-instant" ? "llama-3.1-8b-instant (Cheapest)" : "llama-3.3-70b-versatile (More Accurate)"));
if (config.LlmModel.Contains(" ")) config.LlmModel = config.LlmModel.Split(' ')[0]; if (config.LlmModel.Contains(" ")) config.LlmModel = config.LlmModel.Split(' ')[0];

View File

@@ -38,7 +38,7 @@ public static class StatsCommand
.Select(g => g.Key) .Select(g => g.Key)
.ToList(); .ToList();
AnsiConsole.MarkupLine("[bold blue]Toak Usage Statistics[/]"); AnsiConsole.MarkupLine("\n[bold blue]Toak Usage Statistics[/]");
AnsiConsole.MarkupLine($"[dim]Total recordings:[/] {totalCount}"); AnsiConsole.MarkupLine($"[dim]Total recordings:[/] {totalCount}");
AnsiConsole.MarkupLine($"[dim]Total duration:[/] {totalDuration.TotalMinutes:F1}m"); AnsiConsole.MarkupLine($"[dim]Total duration:[/] {totalDuration.TotalMinutes:F1}m");
AnsiConsole.MarkupLine($"[dim]Average processing latency:[/] {avgDuration.TotalSeconds:F2}s"); AnsiConsole.MarkupLine($"[dim]Average processing latency:[/] {avgDuration.TotalSeconds:F2}s");

View File

@@ -30,7 +30,7 @@ public static class HistoryManager
DurationMs = durationMs DurationMs = durationMs
}; };
var json = JsonSerializer.Serialize(entry, AppJsonSerializerContext.Default.HistoryEntry); var json = JsonSerializer.Serialize(entry, CompactJsonSerializerContext.Default.HistoryEntry);
// Thread-safe append // Thread-safe append
lock (HistoryFile) lock (HistoryFile)
@@ -60,15 +60,25 @@ public static class HistoryManager
foreach (var line in lines) foreach (var line in lines)
{ {
if (string.IsNullOrWhiteSpace(line)) continue; if (string.IsNullOrWhiteSpace(line)) continue;
var entry = JsonSerializer.Deserialize(line, AppJsonSerializerContext.Default.HistoryEntry); if (!line.Trim().StartsWith("{") || !line.Trim().EndsWith("}")) continue; // Skip malformed old multiline json entries
if (entry != null)
try
{ {
entries.Add(entry); var entry = JsonSerializer.Deserialize(line, CompactJsonSerializerContext.Default.HistoryEntry);
if (entry != null)
{
entries.Add(entry);
}
}
catch
{
// Skip entry if deserialization fails
} }
} }
} }
catch (Exception ex) catch (Exception ex)
{ {
Console.WriteLine($"CRASH IN LOAD ENTRIES: {ex}");
Logger.LogDebug($"Failed to load history: {ex.Message}"); Logger.LogDebug($"Failed to load history: {ex.Message}");
} }

View File

@@ -23,3 +23,9 @@ namespace Toak.Serialization;
internal partial class AppJsonSerializerContext : JsonSerializerContext internal partial class AppJsonSerializerContext : JsonSerializerContext
{ {
} }
[JsonSourceGenerationOptions(WriteIndented = false, DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull)]
[JsonSerializable(typeof(Toak.Core.HistoryEntry))]
internal partial class CompactJsonSerializerContext : JsonSerializerContext
{
}