1
0

Compare commits

...

4 Commits

7 changed files with 282 additions and 465 deletions

View File

@@ -71,10 +71,10 @@ internal sealed partial class ContextCompactor(IChatClient client)
string reason = ""; string reason = "";
// Rule 1: Deduplication. If we have already seen this file in a newer message (since we are walking backward), redact this one. // Rule 1: Deduplication. If we have already seen this file in a newer message (since we are walking backward), redact this one.
if (filesRead.TryGetValue(filePath, out int count) && count >= 3) if (filesRead.TryGetValue(filePath, out int count) && count >= 5)
{ {
shouldRedact = true; shouldRedact = true;
reason = "deduplication — you read this file 3 or more times later"; reason = "deduplication — you read this file 5 or more times later";
} }
// Rule 2: TTL. If this was read 2 or more user turns ago, redact it. // Rule 2: TTL. If this was read 2 or more user turns ago, redact it.
else if (userTurnsSeen >= 2) else if (userTurnsSeen >= 2)

View File

@@ -12,23 +12,18 @@ internal static class ToolRegistry
return new List<AITool> return new List<AITool>
{ {
AIFunctionFactory.Create(FileTools.ReadFile, serializerOptions: jsonOptions), AIFunctionFactory.Create(FileTools.ReadFile, serializerOptions: jsonOptions),
AIFunctionFactory.Create(FileTools.GrepFile, serializerOptions: jsonOptions), AIFunctionFactory.Create(FileTools.Grep, serializerOptions: jsonOptions),
AIFunctionFactory.Create(FileTools.ListDir, serializerOptions: jsonOptions), AIFunctionFactory.Create(FileTools.ListDir, serializerOptions: jsonOptions),
AIFunctionFactory.Create(EditTools.ReplaceLines, serializerOptions: jsonOptions), AIFunctionFactory.Create(EditTools.ReplaceLines, serializerOptions: jsonOptions),
AIFunctionFactory.Create(EditTools.InsertAfter, serializerOptions: jsonOptions),
AIFunctionFactory.Create(EditTools.DeleteRange, serializerOptions: jsonOptions), AIFunctionFactory.Create(EditTools.DeleteRange, serializerOptions: jsonOptions),
AIFunctionFactory.Create(EditTools.CreateFile, serializerOptions: jsonOptions), AIFunctionFactory.Create(EditTools.Delete, serializerOptions: jsonOptions),
AIFunctionFactory.Create(EditTools.DeleteFile, serializerOptions: jsonOptions),
AIFunctionFactory.Create(EditTools.RenameFile, serializerOptions: jsonOptions),
AIFunctionFactory.Create(EditTools.CopyFile, serializerOptions: jsonOptions),
AIFunctionFactory.Create(DirTools.CreateDir, serializerOptions: jsonOptions),
AIFunctionFactory.Create(DirTools.RenameDir, serializerOptions: jsonOptions),
AIFunctionFactory.Create(DirTools.DeleteDir, serializerOptions: jsonOptions),
AIFunctionFactory.Create(FileTools.FindFiles, serializerOptions: jsonOptions), AIFunctionFactory.Create(FileTools.FindFiles, serializerOptions: jsonOptions),
AIFunctionFactory.Create(FileTools.GrepRecursive, serializerOptions: jsonOptions),
AIFunctionFactory.Create(FileTools.GetFileInfo, serializerOptions: jsonOptions), AIFunctionFactory.Create(FileTools.GetFileInfo, serializerOptions: jsonOptions),
AIFunctionFactory.Create(EditTools.AppendToFile, serializerOptions: jsonOptions), AIFunctionFactory.Create(EditTools.WriteToFile, serializerOptions: jsonOptions),
AIFunctionFactory.Create(CommandTool.ExecuteCommand, serializerOptions: jsonOptions), AIFunctionFactory.Create(CommandTool.ExecuteCommand, serializerOptions: jsonOptions),
AIFunctionFactory.Create(EditTools.MoveFile, serializerOptions: jsonOptions),
AIFunctionFactory.Create(DirTools.RenameDir, serializerOptions: jsonOptions),
AIFunctionFactory.Create(DirTools.CreateDir, serializerOptions: jsonOptions),
}; };
} }
} }

View File

@@ -39,27 +39,6 @@ internal static class DirTools
} }
} }
[Description("Delete a directory and all its contents permanently.")]
public static string DeleteDir(
[Description("Path to the directory to delete.")] string path,
[Description("If true, delete recursively. Defaults to true.")] bool recursive = true)
{
path = ResolvePath(path);
Log($"Deleting directory: {path}");
if (!Directory.Exists(path))
return $"ERROR: Directory not found: {path}";
try
{
Directory.Delete(path, recursive);
return $"OK: Directory deleted: '{path}'";
}
catch (Exception ex)
{
return $"ERROR deleting directory '{path}': {ex.Message}";
}
}
[Description("Create a new directory. Creates parent directories if they don't exist. Returns OK on success, or an error message if the directory already exists or creation fails.")] [Description("Create a new directory. Creates parent directories if they don't exist. Returns OK on success, or an error message if the directory already exists or creation fails.")]
public static string CreateDir( public static string CreateDir(
[Description("Path to the directory to create.")] string path) [Description("Path to the directory to create.")] string path)

View File

@@ -90,44 +90,6 @@ internal static partial class EditTools
} }
} }
[Description("Insert lines after the specified line:hash anchor.")]
public static string InsertAfter(
[Description("Path to the file.")] string path,
[Description("line:hash anchor to insert after (e.g. '3:0e').")] string anchor,
[Description("Raw source code to insert. Do NOT include 'lineNumber:hash|' prefixes.")] string[] newLines)
{
newLines = SanitizeNewLines(newLines);
path = FileTools.ResolvePath(path);
Log($"INSERT_AFTER: {path}");
Log($" Anchor: {anchor}");
Log($" Inserting {newLines.Length} lines after line {anchor.Split(':')[0]}");
if (!File.Exists(path))
return $"ERROR: File not found: {path}";
try
{
string[] lines = File.ReadAllLines(path);
if (!HashlineValidator.TryResolve(anchor, lines, out int idx, out string error))
return $"ERROR: {error}";
var result = new List<string>(lines.Length + newLines.Length);
result.AddRange(lines[..(idx + 1)]);
result.AddRange(newLines);
result.AddRange(lines[(idx + 1)..]);
File.WriteAllLines(path, result);
return $"OK fp:{HashlineEncoder.FileFingerprint([.. result])}";
}
catch (Exception ex)
{
return $"ERROR modifying '{path}': {ex.Message}";
}
}
[Description("Delete a range of lines.")] [Description("Delete a range of lines.")]
public static string DeleteRange( public static string DeleteRange(
@@ -162,153 +124,162 @@ internal static partial class EditTools
} }
} }
[Description("Create a new file (parents auto-created). Max initial lines: 200. Alternatively, append lines later.")]
public static string CreateFile(
[Description("Path to the new file to create.")] string path, [Description("Delete a file or directory. Use mode='file' to delete a file, mode='dir' to delete a directory.")]
[Description("Optional initial raw source code. Do NOT include 'lineNumber:hash|' prefixes.")] string[]? initialLines = null) public static string Delete(
[Description("Path to the file or directory to delete.")] string path,
[Description("Type of deletion: 'file' or 'dir'. Defaults to 'file'.")] string mode = "file")
{ {
path = FileTools.ResolvePath(path); path = FileTools.ResolvePath(path);
Log($"Creating file: {path}"); string targetType = mode.ToLower() == "dir" ? "directory" : "file";
Log($"Deleting {targetType}: {path}");
if (File.Exists(path)) if (mode.ToLower() == "dir")
return $"ERROR: File already exists: {path}";
try
{ {
if (initialLines is not null) if (!Directory.Exists(path))
initialLines = SanitizeNewLines(initialLines); return $"ERROR: Directory not found: {path}";
string? dir = Path.GetDirectoryName(path);
if (!string.IsNullOrWhiteSpace(dir) && !Directory.Exists(dir))
Directory.CreateDirectory(dir);
if (initialLines is not null && initialLines.Length > 0) try
File.WriteAllLines(path, initialLines); {
else Directory.Delete(path, true);
File.WriteAllText(path, ""); return $"OK: Directory deleted: '{path}'";
}
return $"OK fp:{HashlineEncoder.FileFingerprint(initialLines ?? [])}"; catch (Exception ex)
{
return $"ERROR deleting directory '{path}': {ex.Message}";
}
} }
catch (Exception ex) else
{ {
return $"ERROR creating '{path}': {ex.Message}";
}
}
[Description("Delete a file permanently.")]
public static string DeleteFile(
[Description("Path to the file to delete.")] string path)
{
path = FileTools.ResolvePath(path);
Log($"Deleting file: {path}");
if (!File.Exists(path))
return $"ERROR: File not found: {path}";
try
{
File.Delete(path);
return $"OK (deleted)";
}
catch (Exception ex)
{
return $"ERROR deleting '{path}': {ex.Message}";
}
}
[Description("Rename or move a file. Auto-creates target dirs.")]
public static string RenameFile(
[Description("Current path to the file.")] string sourcePath,
[Description("New path for the file.")] string destinationPath)
{
sourcePath = FileTools.ResolvePath(sourcePath);
destinationPath = FileTools.ResolvePath(destinationPath);
Log($"Renaming file: {sourcePath} -> {destinationPath}");
if (!File.Exists(sourcePath))
return $"ERROR: Source file not found: {sourcePath}";
if (File.Exists(destinationPath))
return $"ERROR: Destination file already exists: {destinationPath}";
try
{
string? dir = Path.GetDirectoryName(destinationPath);
if (!string.IsNullOrWhiteSpace(dir) && !Directory.Exists(dir))
Directory.CreateDirectory(dir);
File.Move(sourcePath, destinationPath);
return $"OK (moved to {destinationPath})";
}
catch (Exception ex)
{
return $"ERROR moving file: {ex.Message}";
}
}
[Description("Copy a file to a new location.")]
public static string CopyFile(
[Description("Path to the existing file.")] string sourcePath,
[Description("Path for the copy.")] string destinationPath)
{
sourcePath = FileTools.ResolvePath(sourcePath);
destinationPath = FileTools.ResolvePath(destinationPath);
Log($"Copying file: {sourcePath} -> {destinationPath}");
if (!File.Exists(sourcePath))
return $"ERROR: Source file not found: {sourcePath}";
if (File.Exists(destinationPath))
return $"ERROR: Destination file already exists: {destinationPath}";
try
{
string? dir = Path.GetDirectoryName(destinationPath);
if (!string.IsNullOrWhiteSpace(dir) && !Directory.Exists(dir))
Directory.CreateDirectory(dir);
File.Copy(sourcePath, destinationPath);
return $"OK (copied to {destinationPath})";
}
catch (Exception ex)
{
return $"ERROR copying file: {ex.Message}";
}
}
[Description("Append lines to EOF (auto-creating the file if missing).")]
public static string AppendToFile(
[Description("Path to the file to append to.")] string path,
[Description("Raw source code to append. Do NOT include 'lineNumber:hash|' prefixes.")] string[] lines)
{
lines = SanitizeNewLines(lines);
path = FileTools.ResolvePath(path);
Log($"Appending to file: {path}");
Log($" Appending {lines.Length} lines");
try
{
string? dir = Path.GetDirectoryName(path);
if (!string.IsNullOrWhiteSpace(dir) && !Directory.Exists(dir))
Directory.CreateDirectory(dir);
if (!File.Exists(path)) if (!File.Exists(path))
{ return $"ERROR: File not found: {path}";
File.WriteAllText(path, "");
Log($" (created new file)");
}
using (var writer = new System.IO.StreamWriter(path, true)) try
{ {
foreach (var line in lines) File.Delete(path);
{ return $"OK (deleted)";
writer.WriteLine(line);
}
} }
catch (Exception ex)
{
return $"ERROR deleting '{path}': {ex.Message}";
}
}
}
string[] allLines = File.ReadAllLines(path); [Description("Move or copy a file to a new location.")]
return $"OK fp:{HashlineEncoder.FileFingerprint([.. allLines])}"; public static string MoveFile(
[Description("Current path to the file.")] string sourcePath,
[Description("New path for the file.")] string destinationPath,
[Description("If true, copy the file instead of moving it. Defaults to false.")] bool copy = false)
{
sourcePath = FileTools.ResolvePath(sourcePath);
destinationPath = FileTools.ResolvePath(destinationPath);
string action = copy ? "Copying" : "Moving";
Log($"{action} file: {sourcePath} -> {destinationPath}");
if (!File.Exists(sourcePath))
return $"ERROR: Source file not found: {sourcePath}";
if (File.Exists(destinationPath))
return $"ERROR: Destination file already exists: {destinationPath}";
try
{
string? dir = Path.GetDirectoryName(destinationPath);
if (!string.IsNullOrWhiteSpace(dir) && !Directory.Exists(dir))
Directory.CreateDirectory(dir);
if (copy)
File.Copy(sourcePath, destinationPath);
else
File.Move(sourcePath, destinationPath);
return copy ? $"OK (copied to {destinationPath})" : $"OK (moved to {destinationPath})";
} }
catch (Exception ex) catch (Exception ex)
{ {
return $"ERROR appending to '{path}': {ex.Message}"; return $"ERROR {action.ToLower()} file: {ex.Message}";
}
}
[Description("Write to a file with different modes: create, append, or insert.")]
public static string WriteToFile(
[Description("Path to the file.")] string path,
[Description("Content to write.")] string[] content,
[Description("Write mode: 'create' (error if exists), 'append' (creates if missing), 'insert' (requires anchor)")] string mode = "create",
[Description("line:hash anchor to insert after (required for mode='insert', e.g. '3:0e').")] string? anchor = null)
{
content = SanitizeNewLines(content);
path = FileTools.ResolvePath(path);
Log($"WRITE_TO_FILE: {path}");
Log($" Mode: {mode}");
Log($" Writing {content.Length} lines");
try
{
string? dir = Path.GetDirectoryName(path);
if (!string.IsNullOrWhiteSpace(dir) && !Directory.Exists(dir))
Directory.CreateDirectory(dir);
switch (mode.ToLower())
{
case "create":
if (File.Exists(path))
return $"ERROR: File already exists: {path}";
if (content.Length > 0)
File.WriteAllLines(path, content);
else
File.WriteAllText(path, "");
return $"OK fp:{HashlineEncoder.FileFingerprint(content)}";
case "append":
if (!File.Exists(path))
{
File.WriteAllText(path, "");
Log($" (created new file)");
}
using (var writer = new System.IO.StreamWriter(path, true))
{
foreach (var line in content)
{
writer.WriteLine(line);
}
}
string[] appendedLines = File.ReadAllLines(path);
return $"OK fp:{HashlineEncoder.FileFingerprint([.. appendedLines])}";
case "insert":
if (!File.Exists(path))
return $"ERROR: File not found: {path}";
if (string.IsNullOrEmpty(anchor))
return "ERROR: mode='insert' requires an anchor parameter";
string[] lines = File.ReadAllLines(path);
if (!HashlineValidator.TryResolve(anchor, lines, out int idx, out string error))
return $"ERROR: {error}";
var result = new List<string>(lines.Length + content.Length);
result.AddRange(lines[..(idx + 1)]);
result.AddRange(content);
result.AddRange(lines[(idx + 1)..]);
File.WriteAllLines(path, result);
return $"OK fp:{HashlineEncoder.FileFingerprint([.. result])}";
default:
return $"ERROR: Unknown mode '{mode}'. Valid modes: create, append, insert";
}
}
catch (Exception ex)
{
return $"ERROR writing to '{path}': {ex.Message}";
} }
} }

View File

@@ -56,54 +56,6 @@ internal static class FileTools
} }
} }
[Description("Search a file for a regex pattern. Returns matches with line:hash| anchors.")]
public static string GrepFile(
[Description("Path to the file to search.")] string path,
[Description("Regex pattern.")] string pattern)
{
path = ResolvePath(path);
Log($"Searching file: {path}");
if (!File.Exists(path))
return $"ERROR: File not found: {path}";
Regex regex;
try
{
regex = new Regex(pattern, RegexOptions.Compiled | RegexOptions.IgnoreCase);
}
catch (Exception ex)
{
return $"ERROR: Invalid regex pattern '{pattern}': {ex.Message}";
}
try
{
string[] lines = File.ReadAllLines(path);
var sb = new System.Text.StringBuilder();
int matchCount = 0;
for (int i = 0; i < lines.Length; i++)
{
if (regex.IsMatch(lines[i]))
{
int lineNumber = i + 1;
string hash = HashlineEncoder.ComputeHash(lines[i].AsSpan(), lineNumber);
sb.Append(lineNumber).Append(':').Append(hash).Append('|').AppendLine(lines[i]);
matchCount++;
}
}
if (matchCount == 0)
return $"(no matches for '{pattern}' in {path})";
return sb.ToString();
}
catch (Exception ex)
{
return $"ERROR searching '{path}': {ex.Message}";
}
}
[Description("List files and subdirectories.")] [Description("List files and subdirectories.")]
public static string ListDir( public static string ListDir(
@@ -174,75 +126,136 @@ internal static class FileTools
} }
} }
[Description("Recursive regex search across all files. Returns matches with file:line:hash| format.")]
public static string GrepRecursive( [Description("Consolidated grep operation for single file or recursive directory search.")]
[Description("Directory to search.")] string path, public static string Grep(
[Description("Directory to search (for recursive mode) or file path (for file mode).")] string path,
[Description("Regex pattern.")] string pattern, [Description("Regex pattern.")] string pattern,
[Description("Optional glob to filter files (e.g. '*.cs').")] string? filePattern = null) [Description("Mode: 'file' for single file, 'recursive' for directory search.")] string mode = "recursive",
[Description("Optional glob to filter files in recursive mode (e.g. '*.cs').")] string? filePattern = null)
{ {
path = ResolvePath(path); path = ResolvePath(path);
Log($"Recursive grep: {pattern} in {path}" + (filePattern != null ? $" (files: {filePattern})" : "")); mode = mode.ToLowerInvariant();
if (!Directory.Exists(path)) if (mode == "file")
return $"ERROR: Directory not found: {path}";
Regex regex;
try
{ {
regex = new Regex(pattern, RegexOptions.Compiled | RegexOptions.IgnoreCase); Log($"Searching file: {path}");
}
catch (Exception ex)
{
return $"ERROR: Invalid regex pattern '{pattern}': {ex.Message}";
}
try if (!File.Exists(path))
{ if (Directory.Exists(path))
string globPattern = filePattern?.Replace("**/", "") ?? "*"; return $"ERROR: {path} is a directory, not a file.";
var sb = new System.Text.StringBuilder(); else
int totalMatches = 0; return $"ERROR: File not found: {path}";
foreach (var file in EnumerateFilesRecursive(path, globPattern)) Regex regex;
try
{ {
try regex = new Regex(pattern, RegexOptions.Compiled | RegexOptions.IgnoreCase);
{ }
// Skip binary files: check first 512 bytes for null chars catch (Exception ex)
using var probe = new StreamReader(file); {
var buf = new char[512]; return $"ERROR: Invalid regex pattern '{pattern}': {ex.Message}";
int read = probe.Read(buf, 0, buf.Length);
if (new ReadOnlySpan<char>(buf, 0, read).Contains('\0'))
continue;
}
catch { continue; }
try
{
string[] lines = File.ReadAllLines(file);
for (int i = 0; i < lines.Length; i++)
{
if (regex.IsMatch(lines[i]))
{
int lineNumber = i + 1;
string hash = HashlineEncoder.ComputeHash(lines[i].AsSpan(), lineNumber);
sb.Append(file).Append(':').Append(lineNumber).Append(':').Append(hash).Append('|').AppendLine(lines[i]);
totalMatches++;
}
}
}
catch
{
// Skip files that can't be read
}
} }
if (totalMatches == 0) try
return $"(no matches for '{pattern}' in {path})"; {
string[] lines = File.ReadAllLines(path);
var sb = new System.Text.StringBuilder();
int matchCount = 0;
return $"Found {totalMatches} match(es):\n" + sb.ToString(); for (int i = 0; i < lines.Length; i++)
{
if (regex.IsMatch(lines[i]))
{
int lineNumber = i + 1;
string hash = HashlineEncoder.ComputeHash(lines[i].AsSpan(), lineNumber);
sb.Append(lineNumber).Append(':').Append(hash).Append('|').AppendLine(lines[i]);
matchCount++;
}
}
if (matchCount == 0)
return $"(no matches for '{pattern}' in {path})";
return sb.ToString();
}
catch (Exception ex)
{
return $"ERROR searching '{path}': {ex.Message}\nThis is a bug, tell the user about it.";
}
} }
catch (Exception ex) else if (mode == "recursive")
{ {
return $"ERROR in recursive grep: {ex.Message}"; Log($"Recursive grep: {pattern} in {path}" + (filePattern != null ? $" (files: {filePattern})" : ""));
if (!Directory.Exists(path))
if (File.Exists(path))
return $"ERROR: {path} is a file, not a directory.";
else
return $"ERROR: Directory not found: {path}";
Regex regex;
try
{
regex = new Regex(pattern, RegexOptions.Compiled | RegexOptions.IgnoreCase);
}
catch (Exception ex)
{
return $"ERROR: Invalid regex pattern '{pattern}': {ex.Message}";
}
try
{
string globPattern = filePattern?.Replace("**/", "") ?? "*";
var sb = new System.Text.StringBuilder();
int totalMatches = 0;
foreach (var file in EnumerateFilesRecursive(path, globPattern))
{
try
{
// Skip binary files: check first 512 bytes for null chars
using var probe = new StreamReader(file);
var buf = new char[512];
int read = probe.Read(buf, 0, buf.Length);
if (new ReadOnlySpan<char>(buf, 0, read).Contains('\0'))
continue;
}
catch { continue; }
try
{
string[] lines = File.ReadAllLines(file);
for (int i = 0; i < lines.Length; i++)
{
if (regex.IsMatch(lines[i]))
{
int lineNumber = i + 1;
string hash = HashlineEncoder.ComputeHash(lines[i].AsSpan(), lineNumber);
sb.Append(file).Append(':').Append(lineNumber).Append(':').Append(hash).Append('|').AppendLine(lines[i]);
totalMatches++;
}
}
}
catch
{
// Skip files that can't be read
}
}
if (totalMatches == 0)
return $"(no matches for '{pattern}' in {path})";
return $"Found {totalMatches} match(es):\n" + sb.ToString();
}
catch (Exception ex)
{
return $"ERROR in recursive grep: {ex.Message}.\nThis is a bug, tell the user about it.";
}
}
else
{
return $"ERROR: Invalid mode '{mode}'. Use 'file' or 'recursive'.";
} }
} }

View File

@@ -1,134 +0,0 @@
# Advanced AI Agent CLI System Design
This document outlines the architecture for a completely new, built-from-scratch AI Agent Command Line Interface system, inspired by the lessons learned from the `Anchor CLI` refactoring.
## 1. Core Principles
* **Event-Driven UI & Decoupled State:** The UI and display layers communicate exclusively through an asynchronous Event Bus.
* **Explicit Control Flow:** Core agent execution utilizes a Mediator pattern (Request/Response) for predictable, traceable control flow rather than pure event spaghetti.
* **Dependency Injection:** A robust IoC container manages lifecycles and dependencies.
* **Pluggable Architecture:** Everything—from the LLM provider to the UI renderer and memory storage—is an injectable plugin.
* **Stateless Components:** Services maintain minimal internal state. State is managed centrally in a session or context store with immutable snapshots.
* **Test-First Design:** Complete absence of static delegates and global mutable state ensures every component is unit-testable in isolation.
* **Pervasive Cancellation:** Every asynchronous operation accepts a `CancellationToken` for graceful termination.
## 2. High-Level Architecture & Project Structure (AOT-Ready)
The system is structurally divided into three distinct C# projects to enforce decoupling, testability, and future-proof design, while maintaining strict compatibility with **.NET Native AOT** compilation for single-file, zero-dependency distribution on Linux/Windows.
### 2.1 Project: `Anchor.AgentFramework` (Class Library)
The core logic and abstractions. It has **no knowledge** of the console, the file system, or specific LLM SDKs.
* **Contains:** Interfaces (`IEventBus`, `IMediator`, `IAgentAvatar`), Memory Management (`ISessionManager`), Execution Loop (`ChatCoordinator`), and the `ToolRunner`.
* **Responsibilities:** Orchestrating the agent's thought process, managing state, and firing events.
### 2.2 Project: `Anchor.Providers` (Class Library)
The vendor-specific implementations for Language Models.
* **Contains:** `OpenAIAvatar`, `AnthropicAvatar`.
* **Responsibilities:** Translating the framework's semantic requests into vendor-specific API calls (e.g., mapping `ToolResult` to OpenAI's tool response format) via SDKs like `Azure.AI.OpenAI`.
### 2.3 Project: `Anchor.Cli` (Console Application)
The "Hosting Shell" and the physical "Senses/Hands" of the application.
* **Contains:** `Program.cs` (Composition Root), `RichConsoleRenderer`, `ConsoleInputDispatcher`, and concrete Tool implementations (e.g., `FileSystemTool`, `CmdTool`).
* **Responsibilities:** Wiring up Dependency Injection, reading from stdin, rendering UI/spinners to stdout, and executing side-effects on the host OS.
### 2.4 Logical Layers
Across these projects, the system operates in five primary layers:
1. **Hosting & Lifecycle (The Host)**
2. **Event & Messaging Backbone (The Bus)**
3. **State & Memory Management (The Brain)**
4. **I/O & User Interface (The Senses & Voice)**
5. **Execution & Tooling (The Hands)**
### 2.5 Dependency Injection Graph
```text
Anchor.Cli (Composition Root - Program.cs)
├── IEventBus → AsyncEventBus
├── IMemoryStore → VectorMemoryStore / SQLiteMemoryStore
├── ISessionManager → ContextAwareSessionManager
│ └── ICompactionStrategy → SemanticCompactionStrategy
├── IUserInputDispatcher → ConsoleInputDispatcher
├── ICommandRegistry → DynamicCommandRegistry
├── IAgentAvatar (LLM Interface) → AnthropicAvatar / OpenAIAvatar
├── IResponseStreamer → TokenAwareResponseStreamer
├── IUiRenderer → RichConsoleRenderer
│ ├── ISpinnerManager → AsyncSpinnerManager
│ └── IStreamingRenderer → ConsoleStreamingRenderer
└── IToolRegistry → DynamicToolRegistry
└── (Injected Tools: FileSystemTool, CmdTool, WebSearchTool)
```
## 3. Component Details
### 3.1 The Messaging Backbone: `IEventBus` and `IMediator` (AOT Safe)
The system utilizes a dual-messaging approach to prevent "event spaghetti":
* **Publish-Subscribe (Events):** Used for things that *happened* and might have multiple or zero listeners (e.g., UI updates, diagnostics).
* `EventBus.PublishAsync(EventBase @event)`
* **Request-Response (Commands):** Used for linear, required actions with a return value.
* `Mediator.Send(IRequest<TResponse> request)`
> [!WARNING]
> Standard `MediatR` relies heavily on runtime reflection for handler discovery, making it **incompatible with Native AOT**. We must use an AOT-safe source-generated alternative, such as the [Mediator](https://github.com/martinothamar/Mediator) library, or implement a simple, source-generated Event/Command bus internally.
**Key Events (Pub/Sub):**
* `UserInputReceived`: Triggered when the user hits Enter.
* `LLMStreamDeltaReceived`: Emitted for token-by-token streaming to the UI.
* `ToolExecutionStarted` / `ToolExecutionCompleted`: Emitted for UI spinners and logging.
* `ContextLimitWarning`: High token usage indicator.
**Key Commands (Request/Response):**
* `ExecuteToolCommand`: Sent from the Avatar to the Tool Runner, returns a `ToolResult`.
### 3.2 The Brain: `ISessionManager` & Memory
Instead of just a simple list of messages, the new system uses a multi-tiered memory architecture with thread-safe access.
* **Short-Term Memory (Context Window):** The active conversation. Must yield **Immutable Context Snapshots** to prevent collection modification exceptions when tools/LLM run concurrently with background tasks.
* **Long-Term Memory (Vector DB):** Indexed facts, summaries, and user preferences.
* **ICompactionStrategy:**
Instead of implicitly using an LLM on the critical path, the system uses tiered, deterministic strategies:
1. **Sliding Window:** Automatically drop the oldest user/assistant message pairs.
2. **Tool Output Truncation:** Remove large file reads from old turns.
3. **LLM Summarization (Optional):** As a last resort, explicitly lock state and summarize old context into a "Context Digest".
### 3.3 The Senses & Voice: Event-Driven CLI UI
The UI is strictly separated from business logic, which is an ideal architecture for a dedicated CLI tool. The `RichConsoleRenderer` only listens to the `IEventBus`.
* **Input Loop:** `IUserInputDispatcher` sits in a loop reading stdin. When input is received, it fires `UserInputReceived`. It captures `Ctrl+C` to trigger a global `CancellationToken`.
* **Output Loop:** `IUiRenderer` subscribes to `LLMStreamDeltaReceived` and renders tokens. It subscribes to `ToolExecutionStarted` and spins up a dedicated UI spinner, preventing async console output from overwriting the active prompt.
* **Headless CLI Mode:** For CI/CD environments or scripting, the system can run non-interactively by simply swapping the `RichConsoleRenderer` with a `BasicLoggingRenderer`—the core agent logic remains untouched.
### 3.4 The Hands: Plugins and Tooling
Tools are no longer hardcoded.
* **IToolRegistry:** Discovers tools at startup via Reflection or Assembly Scanning.
* **Tool Execution:** When the LLM API returns a `tool_calls` stop reason, the `IAgentAvatar` iteratively or concurrently sends an `ExecuteToolCommand` via the Mediator. It directly awaits the results, appends them to the context snapshot, and resumes the LLM generation. This provides explicit, traceable control flow.
* **Cancellation:** Every async method across the entire system accepts a `CancellationToken` to allow graceful termination of infinite loops or runaway processes.
## 4. Execution Flow (Anatomy of a User Turn)
1. **Input:** User types "Find the bug in main.py".
2. **Dispatch:** `ConsoleInputDispatcher` reads it and publishes `UserInputReceived`.
3. **Routing:** Built-in command handler (if applicable) checks if it's a structural command (`/clear`, `/exit`). Otherwise `SessionManager` adds it to the active context.
4. **Inference:** A `ChatCoordinator` service reacts to the updated context and asks the `IAgentAvatar` for a response.
5. **Streaming:** The Avatar calls the Anthropic/OpenAI API. As tokens arrive, it publishes `LLMStreamDeltaReceived`.
6. **Rendering:** `RichConsoleRenderer` receives the deltas and prints them to the terminal.
7. **Tool Request:** The LLM API returns a tool call. The Avatar dispatches an `ExecuteToolCommand` via the Mediator. The EventBus also publishes a `ToolExecutionStarted` event for the UI spinner.
8. **Execution & Feedback:** `ToolRunner` handles the command, runs it safely with the `CancellationToken`, and returns the result back to the Avatar. The Avatar feeds this back to the LLM API automatically.
9. **Completion:** The turn ends. The `SessionManager` checks token bounds and runs compaction if necessary.
## 5. Conclusion (Native AOT Focus)
While `ARCHITECTURE_REFACTOR.md` focuses on migrating a legacy "God Class", this new design assumes a green-field, **AOT-first** approach.
To achieve true Native AOT, we must strictly avoid runtime reflection. This means:
1. Using `CreateSlimBuilder()` instead of `CreateDefaultBuilder()` in `Microsoft.Extensions.Hosting`.
2. Using Source Generators for Dependency Injection setup.
3. Using Source Generators for JSON Serialization (`System.Text.Json.Serialization.JsonSerializableAttribute`).
4. Replacing reflection-heavy libraries like `MediatR` and `Scrutor` with AOT-friendly source-generated alternatives.
By adhering to these constraints, the resulting single-binary Linux executable will have near-instant startup time and a dramatically reduced memory footprint compared to a standard JIT-compiled .NET application.

View File

@@ -49,7 +49,7 @@ public static string MoveFile(
- Both create parent directories - Both create parent directories
- Similar error handling patterns - Similar error handling patterns
## 3. Grep Operations ## 4. Grep Operations ✅ DONE
**Current tools:** `GrepFile`, `GrepRecursive` **Current tools:** `GrepFile`, `GrepRecursive`
@@ -59,13 +59,13 @@ public static string MoveFile(
public static string Grep( public static string Grep(
string path, string path,
string pattern, string pattern,
bool recursive = false, string mode = "recursive",
string? filePattern = null) string? filePattern = null)
``` ```
**Behavior:** **Behavior:**
- `recursive=false` - Searches single file (current GrepFile) - `mode="file"` - Searches single file (current GrepFile)
- `recursive=true` - Searches directory recursively (current GrepRecursive) - `mode="recursive"` - Searches directory recursively (current GrepRecursive)
- `filePattern` - Optional glob to filter files when recursive - `filePattern` - Optional glob to filter files when recursive
**Benefits:** **Benefits:**
@@ -73,7 +73,7 @@ public static string Grep(
- Reduces 2 tools to 1 - Reduces 2 tools to 1
- Cleaner API for LLM - Cleaner API for LLM
## 4. Delete Operations ## 5. Delete Operations ✅ DONE
**Current tools:** `DeleteFile`, `DeleteDir` **Current tools:** `DeleteFile`, `DeleteDir`
@@ -82,31 +82,24 @@ public static string Grep(
```csharp ```csharp
public static string Delete( public static string Delete(
string path, string path,
bool recursive = true) string mode = "file")
``` ```
**Behavior:** **Behavior:**
- Auto-detects if path is file or directory - `mode="file"` - Deletes a file
- `recursive=true` - Delete directory and all contents - `mode="dir"` - Deletes a directory (recursive)
- `recursive=false` - Only matters for directories (error if not empty)
**Benefits:** **Benefits:**
- Auto-detects file vs directory - Unified interface for all deletion
- Similar error handling patterns - Similar error handling patterns
- Reduces 2 tools to 1 - Reduces 2 tools to 1
## Summary These consolidations reduced the tool count from 17 to 13 tools (4 completed), making the API simpler and easier for the LLM to use effectively.
These consolidations would reduce the tool count from 17 to 13 tools, making the API simpler and easier for the LLM to use effectively. **Completed merges**:
1. ✅ File Move Operations (2 → 1) - **DONE**
2. ✅ File Write Operations (3 → 1) - **DONE**
3. ✅ Delete Operations (2 → 1) - **DONE**
4. ✅ Grep Operations (2 → 1) - **DONE**
**High priority merges:** **All high priority merges completed!**
1. ✅ File Write Operations (3 → 1)
2. ✅ File Move Operations (2 → 1)
3. ✅ Grep Operations (2 → 1)
4. ✅ Delete Operations (2 → 1)
**Kept separate:**
- `ReadFile` - distinct read-only operation
- `ListDir`, `FindFiles`, `GetFileInfo` - different purposes
- `CreateDir` - simple enough to keep standalone
- `ReplaceLines`, `InsertAfter`, `DeleteRange` - too complex to merge without confusing LLM