using System; using System.ComponentModel; using System.Text.RegularExpressions; using AnchorCli.Hashline; using Spectre.Console; namespace AnchorCli.Tools; /// /// Read-only file tools exposed to the LLM as AIFunctions. /// All methods are static with primitive parameters for AOT compatibility. /// internal static class FileTools { public static Action Log { get; set; } = Console.WriteLine; /// /// Optional callback invoked after each successful ReadFile call, with the resolved path. /// Set by ReplLoop to trigger deduplication compaction while the tool loop is still active. /// public static Action? OnFileRead { get; set; } [Description("Read a file. Max 200 lines per call. Returns lines with line:hash| anchors. IMPORTANT: Call GrepFile first (pattern: 'public|class|func|interface|enum|def') to get a structural outline and target startLine/endLine before calling this.")] public static string ReadFile( [Description("Path to the file.")] string path, [Description("First line to return (inclusive). Defaults to 1.")] int startLine = 1, [Description("Last line to return (inclusive). Use 0 for EOF. Defaults to 0.")] int endLine = 0) { path = ResolvePath(path); Log($" ● read_file: {path} {startLine}:{endLine}L"); if (!File.Exists(path)) return $"ERROR: File not found: {path}"; try { string[] lines = File.ReadAllLines(path); if (lines.Length == 0) return $"(empty file: {path})"; int actualEnd = endLine <= 0 ? lines.Length : Math.Min(endLine, lines.Length); int start = Math.Max(1, startLine); if (actualEnd - start + 1 > 200) { return $"ERROR: File too large to read at once ({lines.Length} lines). Provide startLine and endLine to read a chunk of max 200 lines. Use GrepFile to get an outline (grep 'public') and find the line numbers."; } string result = HashlineEncoder.Encode(lines, startLine, endLine); OnFileRead?.Invoke(path); return result; } catch (Exception ex) { return $"ERROR reading '{path}': {ex.Message}"; } } [Description("List files and subdirectories.")] public static string ListDir( [Description("Path to the directory.")] string path = ".") { path = ResolvePath(path); Log($" ● list_dir: {path}"); if (!Directory.Exists(path)) return $"ERROR: Directory not found: {path}"; try { var sb = new System.Text.StringBuilder(); sb.AppendLine($"Directory: {path}"); foreach (string dir in Directory.GetDirectories(path)) sb.AppendLine($" [dir] {Path.GetFileName(dir)}/"); foreach (string file in Directory.GetFiles(path)) { var info = new FileInfo(file); sb.AppendLine($" [file] {info.Name} ({info.Length} bytes)"); } return sb.ToString(); } catch (Exception ex) { return $"ERROR listing '{path}': {ex.Message}"; } } [Description("Find files matching a glob pattern (e.g. '*.cs', '**/*.json').")] public static string FindFiles( [Description("Directory to start search.")] string path, [Description("Glob pattern (supports * and **).")] string pattern) { path = ResolvePath(path); Log($" ● find_files: {pattern} in {path}"); if (!Directory.Exists(path)) return $"ERROR: Directory not found: {path}"; try { var searchOption = pattern.Contains("**") ? System.IO.SearchOption.AllDirectories : System.IO.SearchOption.TopDirectoryOnly; string[] files = Directory.GetFiles(path, pattern.Replace("**/", ""), searchOption); var sb = new System.Text.StringBuilder(); if (files.Length == 0) return $"(no files matching '{pattern}' in {path})"; sb.AppendLine($"Found {files.Length} file(s) matching '{pattern}':"); foreach (var file in files) { sb.AppendLine($" {file}"); } return sb.ToString(); } catch (Exception ex) { return $"ERROR searching for files: {ex.Message}"; } } [Description("Consolidated grep operation for single file or recursive directory search.")] public static string Grep( [Description("Directory to search (for recursive mode) or file path (for file mode).")] string path, [Description("Regex pattern.")] string pattern, [Description("Mode: 'file' for single file, 'recursive' for directory search.")] string mode = "recursive", [Description("Optional glob to filter files in recursive mode (e.g. '*.cs').")] string? filePattern = null) { path = ResolvePath(path); mode = mode.ToLowerInvariant(); if (mode == "file") { Log($" ● grep_file: {path}"); if (!File.Exists(path)) if (Directory.Exists(path)) return $"ERROR: {path} is a directory, not a file."; else return $"ERROR: File not found: {path}"; Regex regex; try { regex = new Regex(pattern, RegexOptions.Compiled | RegexOptions.IgnoreCase); } catch (Exception ex) { return $"ERROR: Invalid regex pattern '{pattern}': {ex.Message}"; } try { string[] lines = File.ReadAllLines(path); var sb = new System.Text.StringBuilder(); int matchCount = 0; for (int i = 0; i < lines.Length; i++) { if (regex.IsMatch(lines[i])) { int lineNumber = i + 1; string hash = HashlineEncoder.ComputeHash(lines[i].AsSpan(), lineNumber); sb.Append(lineNumber).Append(':').Append(hash).Append('|').AppendLine(lines[i]); matchCount++; } } if (matchCount == 0) return $"(no matches for '{pattern}' in {path})"; return sb.ToString(); } catch (Exception ex) { return $"ERROR searching '{path}': {ex.Message}\nThis is a bug, tell the user about it."; } } else if (mode == "recursive") { Log($" ● grep_recursive: {pattern} in {path}" + (filePattern != null ? $" (files: {filePattern})" : "")); if (!Directory.Exists(path)) if (File.Exists(path)) return $"ERROR: {path} is a file, not a directory."; else return $"ERROR: Directory not found: {path}"; Regex regex; try { regex = new Regex(pattern, RegexOptions.Compiled | RegexOptions.IgnoreCase); } catch (Exception ex) { return $"ERROR: Invalid regex pattern '{pattern}': {ex.Message}"; } try { string globPattern = filePattern?.Replace("**/", "") ?? "*"; var sb = new System.Text.StringBuilder(); int totalMatches = 0; foreach (var file in EnumerateFilesRecursive(path, globPattern)) { try { // Skip binary files: check first 512 bytes for null chars using var probe = new StreamReader(file); var buf = new char[512]; int read = probe.Read(buf, 0, buf.Length); if (new ReadOnlySpan(buf, 0, read).Contains('\0')) continue; } catch { continue; } try { string[] lines = File.ReadAllLines(file); for (int i = 0; i < lines.Length; i++) { if (regex.IsMatch(lines[i])) { int lineNumber = i + 1; string hash = HashlineEncoder.ComputeHash(lines[i].AsSpan(), lineNumber); sb.Append(file).Append(':').Append(lineNumber).Append(':').Append(hash).Append('|').AppendLine(lines[i]); totalMatches++; } } } catch { // Skip files that can't be read } } if (totalMatches == 0) return $"(no matches for '{pattern}' in {path})"; return $"Found {totalMatches} match(es):\n" + sb.ToString(); } catch (Exception ex) { return $"ERROR in recursive grep: {ex.Message}.\nThis is a bug, tell the user about it."; } } else { return $"ERROR: Invalid mode '{mode}'. Use 'file' or 'recursive'."; } } /// /// Safely enumerates files recursively, skipping inaccessible and non-useful directories. /// Unlike Directory.GetFiles(..., AllDirectories), this doesn't crash on the first /// permission-denied directory — it just skips it and continues. /// private static readonly HashSet SkipDirs = new(StringComparer.OrdinalIgnoreCase) { ".git", "bin", "obj", "node_modules", ".vs", "publish", ".svn", "__pycache__" }; private static IEnumerable EnumerateFilesRecursive(string dir, string pattern) { string[] files; try { files = Directory.GetFiles(dir, pattern); } catch { yield break; } foreach (var f in files) yield return f; string[] subdirs; try { subdirs = Directory.GetDirectories(dir); } catch { yield break; } foreach (var sub in subdirs) { if (SkipDirs.Contains(Path.GetFileName(sub))) continue; foreach (var f in EnumerateFilesRecursive(sub, pattern)) yield return f; } } [Description("Get detailed file info (size, last modified, etc).")] public static string GetFileInfo( [Description("Path to the file.")] string path) { path = ResolvePath(path); Log($" ● get_file_info: {path}"); if (!File.Exists(path)) return $"ERROR: File not found: {path}"; try { var info = new FileInfo(path); var sb = new System.Text.StringBuilder(); sb.AppendLine($"File: {path}"); sb.AppendLine($" Name: {info.Name}"); sb.AppendLine($" Size: {info.Length} bytes ({info.Length / 1024f:F1} KB)"); sb.AppendLine($" Type: {info.Extension}".Replace(".", "")); sb.AppendLine($" Created: {info.CreationTime}"); sb.AppendLine($" Modified: {info.LastWriteTime}"); sb.AppendLine($" Accessed: {info.LastAccessTime}"); sb.AppendLine($" IsReadOnly: {info.IsReadOnly}"); return sb.ToString(); } catch (Exception ex) { return $"ERROR getting file info: {ex.Message}"; } } // --------------------------------------------------------------------------- // Helpers // --------------------------------------------------------------------------- /// /// Resolves a path relative to the current working directory. /// Does NOT restrict to a sandbox — anchor is a local tool running as the user. /// internal static string ResolvePath(string path) => Path.IsPathRooted(path) ? path : Path.GetFullPath(path, Environment.CurrentDirectory); }