feat: Implement a modular skill system with hotword detection, streaming text output, and enhanced logging.
This commit is contained in:
@@ -4,6 +4,7 @@ using System.Text.Json.Serialization;
|
||||
|
||||
using Toak.Api.Models;
|
||||
using Toak.Serialization;
|
||||
using Toak.Core;
|
||||
|
||||
namespace Toak.Api;
|
||||
|
||||
@@ -39,7 +40,9 @@ public class GroqApiClient
|
||||
content.Add(new StringContent(firstLang), "language");
|
||||
}
|
||||
|
||||
Logger.LogDebug($"Sending Whisper API request ({modelToUse})...");
|
||||
var response = await _httpClient.PostAsync("audio/transcriptions", content);
|
||||
Logger.LogDebug($"Whisper API response status: {response.StatusCode}");
|
||||
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
@@ -67,7 +70,9 @@ public class GroqApiClient
|
||||
|
||||
var jsonContent = new StringContent(JsonSerializer.Serialize(requestBody, AppJsonSerializerContext.Default.LlamaRequest), System.Text.Encoding.UTF8, "application/json");
|
||||
|
||||
Logger.LogDebug($"Sending Llama API request (model: {requestBody.Model})...");
|
||||
var response = await _httpClient.PostAsync("chat/completions", jsonContent);
|
||||
Logger.LogDebug($"Llama API response status: {response.StatusCode}");
|
||||
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
@@ -80,4 +85,55 @@ public class GroqApiClient
|
||||
|
||||
return result?.Choices?.FirstOrDefault()?.Message?.Content ?? string.Empty;
|
||||
}
|
||||
|
||||
public async IAsyncEnumerable<string> RefineTextStreamAsync(string rawTranscript, string systemPrompt, string model = "openai/gpt-oss-20b")
|
||||
{
|
||||
var requestBody = new LlamaRequest
|
||||
{
|
||||
Model = string.IsNullOrWhiteSpace(model) ? "openai/gpt-oss-20b" : model,
|
||||
Temperature = 0.0,
|
||||
Stream = true,
|
||||
Messages = new[]
|
||||
{
|
||||
new LlamaRequestMessage { Role = "system", Content = systemPrompt },
|
||||
new LlamaRequestMessage { Role = "user", Content = $"<transcript>{rawTranscript}</transcript>" }
|
||||
}
|
||||
};
|
||||
|
||||
var jsonContent = new StringContent(JsonSerializer.Serialize(requestBody, AppJsonSerializerContext.Default.LlamaRequest), System.Text.Encoding.UTF8, "application/json");
|
||||
|
||||
using var request = new HttpRequestMessage(HttpMethod.Post, "chat/completions") { Content = jsonContent };
|
||||
request.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue("text/event-stream"));
|
||||
|
||||
Logger.LogDebug($"Sending Llama Steam API request (model: {requestBody.Model})...");
|
||||
using var response = await _httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead);
|
||||
Logger.LogDebug($"Llama Stream API response status: {response.StatusCode}");
|
||||
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var error = await response.Content.ReadAsStringAsync();
|
||||
throw new Exception($"Llama API Error: {response.StatusCode} - {error}");
|
||||
}
|
||||
|
||||
using var stream = await response.Content.ReadAsStreamAsync();
|
||||
using var reader = new StreamReader(stream);
|
||||
|
||||
string? line;
|
||||
while ((line = await reader.ReadLineAsync()) != null)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(line)) continue;
|
||||
if (line.StartsWith("data: "))
|
||||
{
|
||||
var data = line.Substring("data: ".Length).Trim();
|
||||
if (data == "[DONE]") break;
|
||||
|
||||
var chunk = JsonSerializer.Deserialize(data, AppJsonSerializerContext.Default.LlamaStreamResponse);
|
||||
var content = chunk?.Choices?.FirstOrDefault()?.Delta?.Content;
|
||||
if (!string.IsNullOrEmpty(content))
|
||||
{
|
||||
yield return content;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,6 +18,8 @@ public class LlamaRequest
|
||||
public LlamaRequestMessage[] Messages { get; set; } = Array.Empty<LlamaRequestMessage>();
|
||||
[JsonPropertyName("temperature")]
|
||||
public double Temperature { get; set; } = 0.0;
|
||||
[JsonPropertyName("stream")]
|
||||
public bool? Stream { get; set; }
|
||||
}
|
||||
|
||||
public class LlamaResponse
|
||||
@@ -31,3 +33,22 @@ public class LlamaChoice
|
||||
[JsonPropertyName("message")]
|
||||
public LlamaRequestMessage Message { get; set; } = new();
|
||||
}
|
||||
|
||||
public class LlamaStreamResponse
|
||||
{
|
||||
[JsonPropertyName("choices")]
|
||||
public LlamaStreamChoice[] Choices { get; set; } = Array.Empty<LlamaStreamChoice>();
|
||||
}
|
||||
|
||||
public class LlamaStreamChoice
|
||||
{
|
||||
[JsonPropertyName("delta")]
|
||||
public LlamaStreamDelta Delta { get; set; } = new();
|
||||
}
|
||||
|
||||
public class LlamaStreamDelta
|
||||
{
|
||||
[JsonPropertyName("content")]
|
||||
public string? Content { get; set; }
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user