initial commit

This commit is contained in:
2026-03-22 02:25:16 +01:00
commit eb72820ce9
42 changed files with 2506 additions and 0 deletions
+9
View File
@@ -0,0 +1,9 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
</Project>
@@ -0,0 +1,19 @@
namespace Hush.Providers.Interfaces;
/// <summary>
/// Interface for audio-to-text transcription functionality.
/// </summary>
public interface IAudioToTextProvider
{
/// <summary>
/// Transcribes audio from a stream to text.
/// </summary>
/// <param name="audioStream">The audio stream to transcribe</param>
/// <param name="modelName">The model name to use for transcription (e.g., whisper-large-v3)</param>
/// <param name="cancellationToken">Cancellation token</param>
/// <returns>The transcribed text</returns>
Task<string> TranscribeAsync(
Stream audioStream,
string modelName,
CancellationToken cancellationToken = default);
}
@@ -0,0 +1,31 @@
namespace Hush.Providers.Interfaces;
/// <summary>
/// Interface for text generation with both synchronous and streaming capabilities.
/// </summary>
public interface ITextStreamingProvider
{
/// <summary>
/// Generates text completion for a given prompt.
/// </summary>
/// <param name="prompt">The input prompt</param>
/// <param name="modelName">The model name to use (e.g., llama-3.3-70b-versatile)</param>
/// <param name="cancellationToken">Cancellation token</param>
/// <returns>The generated text</returns>
Task<string> CompleteTextAsync(
string prompt,
string modelName,
CancellationToken cancellationToken = default);
/// <summary>
/// Streams text generation for a given prompt.
/// </summary>
/// <param name="prompt">The input prompt</param>
/// <param name="modelName">The model name to use (e.g., llama-3.3-70b-versatile)</param>
/// <param name="cancellationToken">Cancellation token</param>
/// <returns>Async enumerable of text chunks</returns>
IAsyncEnumerable<string> StreamTextAsync(
string prompt,
string modelName,
CancellationToken cancellationToken = default);
}
@@ -0,0 +1,75 @@
using System.Text.Json.Serialization;
namespace Hush.Providers.Models.Request;
/// <summary>
/// Request model for Groq chat completion API.
/// </summary>
public record ChatCompletionRequest
{
/// <summary>
/// A list of messages comprising the conversation so far.
/// </summary>
[JsonPropertyName("messages")]
public required List<Message> Messages { get; init; }
/// <summary>
/// ID of the model to use.
/// </summary>
[JsonPropertyName("model")]
public required string Model { get; init; }
/// <summary>
/// Whether to stream the response.
/// </summary>
[JsonPropertyName("stream")]
public bool Stream { get; init; } = false;
/// <summary>
/// Sampling temperature (0 to 2).
/// </summary>
[JsonPropertyName("temperature")]
public float? Temperature { get; init; } = 1.0f;
/// <summary>
/// Nucleus sampling cutoff (0 to 1).
/// </summary>
[JsonPropertyName("top_p")]
public float? TopP { get; init; } = 1.0f;
/// <summary>
/// Maximum number of tokens to generate.
/// </summary>
[JsonPropertyName("max_completion_tokens")]
public int? MaxCompletionTokens { get; init; }
/// <summary>
/// Up to 4 sequences where the API stops generating tokens.
/// </summary>
[JsonPropertyName("stop")]
public string[]? Stop { get; init; }
/// <summary>
/// Unique identifier representing your end-user.
/// </summary>
[JsonPropertyName("user")]
public string? User { get; init; }
}
/// <summary>
/// A message in the chat conversation.
/// </summary>
public record Message
{
/// <summary>
/// The role of the message author.
/// </summary>
[JsonPropertyName("role")]
public required string Role { get; init; }
/// <summary>
/// The content of the message.
/// </summary>
[JsonPropertyName("content")]
public required string Content { get; init; }
}
@@ -0,0 +1,39 @@
using System.Text.Json.Serialization;
namespace Hush.Providers.Models.Request;
/// <summary>
/// Request model for Groq audio transcription API.
/// </summary>
public record TranscriptionRequest
{
/// <summary>
/// The model to use for transcription.
/// </summary>
[JsonPropertyName("model")]
public required string Model { get; init; }
/// <summary>
/// The language of the audio (ISO-639-1 format).
/// </summary>
[JsonPropertyName("language")]
public string? Language { get; init; }
/// <summary>
/// Text to guide the model's style or context.
/// </summary>
[JsonPropertyName("prompt")]
public string? Prompt { get; init; }
/// <summary>
/// Response format (json, text, verbose_json).
/// </summary>
[JsonPropertyName("response_format")]
public string? ResponseFormat { get; init; } = "json";
/// <summary>
/// Sampling temperature (0 to 1).
/// </summary>
[JsonPropertyName("temperature")]
public float? Temperature { get; init; } = 0.0f;
}
@@ -0,0 +1,153 @@
using System.Text.Json.Serialization;
namespace Hush.Providers.Models.Response;
/// <summary>
/// Response model for Groq chat completion API.
/// </summary>
public record ChatCompletionResponse
{
/// <summary>
/// Unique identifier for the completion.
/// </summary>
[JsonPropertyName("id")]
public required string Id { get; init; }
/// <summary>
/// Object type, always "chat.completion".
/// </summary>
[JsonPropertyName("object")]
public required string Object { get; init; }
/// <summary>
/// Unix timestamp of creation.
/// </summary>
[JsonPropertyName("created")]
public required long Created { get; init; }
/// <summary>
/// Model used.
/// </summary>
[JsonPropertyName("model")]
public required string Model { get; init; }
/// <summary>
/// List of completion choices.
/// </summary>
[JsonPropertyName("choices")]
public required List<Choice> Choices { get; init; }
/// <summary>
/// Usage statistics.
/// </summary>
[JsonPropertyName("usage")]
public required Usage Usage { get; init; }
/// <summary>
/// Groq-specific metadata.
/// </summary>
[JsonPropertyName("x_groq")]
public required GroqMetadata XGroq { get; init; }
}
/// <summary>
/// A completion choice.
/// </summary>
public record Choice
{
/// <summary>
/// Index of the choice.
/// </summary>
[JsonPropertyName("index")]
public required int Index { get; init; }
/// <summary>
/// The message content.
/// </summary>
[JsonPropertyName("message")]
public required Message Message { get; init; }
/// <summary>
/// Reason the model stopped generating tokens.
/// </summary>
[JsonPropertyName("finish_reason")]
public required string FinishReason { get; init; }
}
/// <summary>
/// A message in the response.
/// </summary>
public record Message
{
/// <summary>
/// The role of the message author.
/// </summary>
[JsonPropertyName("role")]
public required string Role { get; init; }
/// <summary>
/// The content of the message.
/// </summary>
[JsonPropertyName("content")]
public required string Content { get; init; }
}
/// <summary>
/// Usage statistics for the completion.
/// </summary>
public record Usage
{
/// <summary>
/// Time spent in queue.
/// </summary>
[JsonPropertyName("queue_time")]
public double? QueueTime { get; init; }
/// <summary>
/// Number of tokens in the prompt.
/// </summary>
[JsonPropertyName("prompt_tokens")]
public required int PromptTokens { get; init; }
/// <summary>
/// Time spent processing the prompt.
/// </summary>
[JsonPropertyName("prompt_time")]
public double? PromptTime { get; init; }
/// <summary>
/// Number of tokens in the completion.
/// </summary>
[JsonPropertyName("completion_tokens")]
public required int CompletionTokens { get; init; }
/// <summary>
/// Time spent generating the completion.
/// </summary>
[JsonPropertyName("completion_time")]
public double? CompletionTime { get; init; }
/// <summary>
/// Total number of tokens.
/// </summary>
[JsonPropertyName("total_tokens")]
public required int TotalTokens { get; init; }
/// <summary>
/// Total time for the request.
/// </summary>
[JsonPropertyName("total_time")]
public double? TotalTime { get; init; }
}
/// <summary>
/// Groq-specific metadata.
/// </summary>
public record GroqMetadata
{
/// <summary>
/// Request ID.
/// </summary>
[JsonPropertyName("id")]
public required string Id { get; init; }
}
@@ -0,0 +1,21 @@
using System.Text.Json.Serialization;
namespace Hush.Providers.Models.Response;
/// <summary>
/// Response model for Groq audio transcription API.
/// </summary>
public record TranscriptionResponse
{
/// <summary>
/// The transcribed text.
/// </summary>
[JsonPropertyName("text")]
public required string Text { get; init; }
/// <summary>
/// Groq-specific metadata.
/// </summary>
[JsonPropertyName("x_groq")]
public required GroqMetadata XGroq { get; init; }
}
@@ -0,0 +1,208 @@
using System.Net.Http.Headers;
using System.Text;
using System.Text.Json;
using Hush.Providers.Interfaces;
using Hush.Providers.Models.Request;
using Hush.Providers.Models.Response;
using Hush.Providers.Serialization;
namespace Hush.Providers.Providers;
/// <summary>
/// Implementation of LLM provider for Groq API.
/// </summary>
public class GroqProvider : IAudioToTextProvider, ITextStreamingProvider
{
private const string ChatCompletionEndpoint = "https://api.groq.com/openai/v1/chat/completions";
private const string TranscriptionEndpoint = "https://api.groq.com/openai/v1/audio/transcriptions";
private readonly HttpClient _httpClient;
private readonly string _apiKey;
/// <summary>
/// Initializes a new instance of the GroqProvider class.
/// </summary>
/// <param name="apiKey">The Groq API key</param>
/// <param name="httpClient">Optional HttpClient instance (for testing)</param>
public GroqProvider(string apiKey, HttpClient? httpClient = null)
{
_apiKey = apiKey ?? throw new ArgumentNullException(nameof(apiKey));
_httpClient = httpClient ?? new HttpClient();
}
/// <inheritdoc />
public async Task<string> TranscribeAsync(
Stream audioStream,
string modelName,
CancellationToken cancellationToken = default)
{
if (audioStream == null)
throw new ArgumentNullException(nameof(audioStream));
if (string.IsNullOrWhiteSpace(modelName))
throw new ArgumentException("Model name is required", nameof(modelName));
var request = new TranscriptionRequest { Model = modelName };
using var content = new MultipartFormDataContent();
content.Add(new StreamContent(audioStream), "file", "audio.wav");
content.Add(new StringContent(request.Model), "model");
if (request.ResponseFormat != null)
content.Add(new StringContent(request.ResponseFormat), "response_format");
if (request.Language != null)
content.Add(new StringContent(request.Language), "language");
if (request.Prompt != null)
content.Add(new StringContent(request.Prompt), "prompt");
if (request.Temperature.HasValue)
content.Add(new StringContent(request.Temperature.Value.ToString(System.Globalization.CultureInfo.InvariantCulture)), "temperature");
var httpRequest = new HttpRequestMessage(HttpMethod.Post, TranscriptionEndpoint)
{
Content = content
};
httpRequest.Headers.Authorization = new AuthenticationHeaderValue("Bearer", _apiKey);
using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
response.EnsureSuccessStatusCode();
var responseContent = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
var result = JsonSerializer.Deserialize(
responseContent,
JsonSourceGeneration.Default.TranscriptionResponse);
if (result == null)
throw new InvalidOperationException("Failed to deserialize transcription response");
return result.Text;
}
/// <inheritdoc />
public async Task<string> CompleteTextAsync(
string prompt,
string modelName,
CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(prompt))
throw new ArgumentException("Prompt is required", nameof(prompt));
if (string.IsNullOrWhiteSpace(modelName))
throw new ArgumentException("Model name is required", nameof(modelName));
var request = new ChatCompletionRequest
{
Model = modelName,
Messages = new List<Hush.Providers.Models.Request.Message> { new Hush.Providers.Models.Request.Message { Role = "user", Content = prompt } }
};
var jsonContent = new StringContent(
JsonSerializer.Serialize(request, JsonSourceGeneration.Default.ChatCompletionRequest),
Encoding.UTF8,
"application/json");
var httpRequest = new HttpRequestMessage(HttpMethod.Post, ChatCompletionEndpoint)
{
Content = jsonContent
};
httpRequest.Headers.Authorization = new AuthenticationHeaderValue("Bearer", _apiKey);
using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
response.EnsureSuccessStatusCode();
var responseContent = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
var result = JsonSerializer.Deserialize(responseContent, JsonSourceGeneration.Default.ChatCompletionResponse);
if (result == null || result.Choices.Count == 0)
throw new InvalidOperationException("Failed to deserialize chat completion response");
return result.Choices[0].Message.Content;
}
/// <inheritdoc />
public async IAsyncEnumerable<string> StreamTextAsync(
string prompt,
string modelName,
[System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(prompt))
throw new ArgumentException("Prompt is required", nameof(prompt));
if (string.IsNullOrWhiteSpace(modelName))
throw new ArgumentException("Model name is required", nameof(modelName));
var request = new ChatCompletionRequest
{
Model = modelName,
Stream = true,
Messages = new List<Hush.Providers.Models.Request.Message> { new Hush.Providers.Models.Request.Message { Role = "user", Content = prompt } }
};
var jsonContent = new StringContent(
JsonSerializer.Serialize(request, JsonSourceGeneration.Default.ChatCompletionRequest),
Encoding.UTF8,
"application/json");
var httpRequest = new HttpRequestMessage(HttpMethod.Post, ChatCompletionEndpoint)
{
Content = jsonContent
};
httpRequest.Headers.Authorization = new AuthenticationHeaderValue("Bearer", _apiKey);
using var response = await _httpClient.SendAsync(httpRequest, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
response.EnsureSuccessStatusCode();
using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
using var reader = new StreamReader(stream);
string? line;
while ((line = await reader.ReadLineAsync(cancellationToken).ConfigureAwait(false)) != null)
{
if (string.IsNullOrWhiteSpace(line) || !line.StartsWith("data: "))
continue;
var data = line.Substring(6).Trim(); // Remove "data: " prefix
if (data == "[DONE]")
break;
var text = ParseTextFromStreamData(data);
if (!string.IsNullOrEmpty(text))
yield return text;
}
}
private static string? ParseTextFromStreamData(string data)
{
try
{
using var jsonDoc = JsonDocument.Parse(data);
var choices = jsonDoc.RootElement.GetProperty("choices");
var choice = choices[0];
if (choice.TryGetProperty("delta", out var delta))
{
if (delta.TryGetProperty("content", out var content))
{
return content.GetString();
}
}
else if (choice.TryGetProperty("text", out var text))
{
return text.GetString();
}
}
catch (JsonException)
{
// Skip malformed JSON chunks
}
return null;
}
}
@@ -0,0 +1,19 @@
using System.Text.Json.Serialization;
namespace Hush.Providers.Serialization;
/// <summary>
/// Source generation context for JSON serialization.
/// </summary>
[JsonSerializable(typeof(Models.Response.ChatCompletionResponse))]
[JsonSerializable(typeof(Models.Response.TranscriptionResponse))]
[JsonSerializable(typeof(Models.Response.Choice))]
[JsonSerializable(typeof(Models.Response.Message), TypeInfoPropertyName = "ResponseMessage")]
[JsonSerializable(typeof(Models.Response.Usage))]
[JsonSerializable(typeof(Models.Response.GroqMetadata))]
[JsonSerializable(typeof(Models.Request.ChatCompletionRequest))]
[JsonSerializable(typeof(Models.Request.TranscriptionRequest))]
[JsonSerializable(typeof(Models.Request.Message), TypeInfoPropertyName = "RequestMessage")]
public partial class JsonSourceGeneration : JsonSerializerContext
{
}