-
Notifications
You must be signed in to change notification settings - Fork 8
feat: Local Chat for development #1018
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from 11 commits
ea1305e
146e1be
583859d
18c40f4
b206140
67cb7fd
edbac68
6cbcf05
0a37ade
afe7714
418eeac
ffa3d53
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,36 @@ | ||
| namespace EssentialCSharp.Chat; | ||
|
|
||
| public enum AIServiceMode | ||
| { | ||
| Disabled, | ||
| Local, | ||
| Azure | ||
| } | ||
|
|
||
| public sealed record AIConfigurationState(AIServiceMode Mode) | ||
| { | ||
| public const string DevelopmentUnavailableMessage = | ||
| "AI chat is unavailable for this local run. Start the site with Aspire local AI or configure Azure AI to enable chat."; | ||
|
|
||
| public bool IsAvailable => Mode is AIServiceMode.Local or AIServiceMode.Azure; | ||
| public bool IsDisabled => Mode == AIServiceMode.Disabled; | ||
| public bool UsesLocalAI => Mode == AIServiceMode.Local; | ||
| public bool UsesAzureAI => Mode == AIServiceMode.Azure; | ||
|
|
||
| public static AIConfigurationState From(AIOptions? options) | ||
| { | ||
| options ??= new AIOptions(); | ||
|
|
||
| if (!string.IsNullOrWhiteSpace(options.Endpoint)) | ||
| { | ||
| return new(AIServiceMode.Azure); | ||
| } | ||
|
|
||
| if (options.UseLocalAI) | ||
| { | ||
| return new(AIServiceMode.Local); | ||
| } | ||
|
|
||
| return new(AIServiceMode.Disabled); | ||
| } | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,3 @@ | ||
| namespace EssentialCSharp.Chat.Common.Services; | ||
|
|
||
| public sealed class AIChatUnavailableException(string message) : InvalidOperationException(message); |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,31 @@ | ||
| using ModelContextProtocol.Client; | ||
| using OpenAI.Responses; | ||
|
|
||
| namespace EssentialCSharp.Chat.Common.Services; | ||
|
|
||
| public interface IAIChatService | ||
| { | ||
| Task<(string response, string responseId)> GetChatCompletion( | ||
| string prompt, | ||
| string? systemPrompt = null, | ||
| string? previousResponseId = null, | ||
| IMcpClient? mcpClient = null, | ||
| #pragma warning disable OPENAI001 | ||
| IEnumerable<ResponseTool>? tools = null, | ||
| ResponseReasoningEffortLevel? reasoningEffortLevel = null, | ||
| #pragma warning restore OPENAI001 | ||
| bool enableContextualSearch = false, | ||
| CancellationToken cancellationToken = default); | ||
|
|
||
| IAsyncEnumerable<(string text, string? responseId)> GetChatCompletionStream( | ||
| string prompt, | ||
| string? systemPrompt = null, | ||
| string? previousResponseId = null, | ||
| IMcpClient? mcpClient = null, | ||
| #pragma warning disable OPENAI001 | ||
| IEnumerable<ResponseTool>? tools = null, | ||
| ResponseReasoningEffortLevel? reasoningEffortLevel = null, | ||
| #pragma warning restore OPENAI001 | ||
| bool enableContextualSearch = false, | ||
| CancellationToken cancellationToken = default); | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,137 @@ | ||
| using System.Collections.Concurrent; | ||
| using System.Runtime.CompilerServices; | ||
| using Microsoft.Extensions.AI; | ||
| using Microsoft.Extensions.Logging; | ||
| using Microsoft.Extensions.Options; | ||
| using ModelContextProtocol.Client; | ||
| using OpenAI.Responses; | ||
|
|
||
| namespace EssentialCSharp.Chat.Common.Services; | ||
|
|
||
| /// <summary> | ||
| /// Local AI chat service using IChatClient (e.g. Ollama via CommunityToolkit.Aspire.OllamaSharp). | ||
| /// Compared to the Azure path: conversation history is in-memory only (lost on restart), | ||
| /// ResponseTool/ReasoningEffortLevel params are silently ignored, and vector search (RAG) | ||
| /// is disabled. Intended for local development without Azure credentials. | ||
| /// </summary> | ||
| public class LocalAIChatService : IAIChatService | ||
| { | ||
| private readonly IChatClient _chatClient; | ||
| private readonly AIOptions _options; | ||
| private readonly ILogger<LocalAIChatService> _logger; | ||
|
|
||
| // Synthetic conversation history keyed by GUID responseId. | ||
| // In-memory only — not shared across instances and lost on restart. | ||
| // ConcurrentDictionary prevents crashes from parallel requests (e.g., two chat tabs). | ||
| private readonly ConcurrentDictionary<string, List<ChatMessage>> _conversations = new(); | ||
|
|
||
| public LocalAIChatService( | ||
| IOptions<AIOptions> options, | ||
| IChatClient chatClient, | ||
| ILogger<LocalAIChatService> logger) | ||
| { | ||
| _options = options.Value; | ||
| _chatClient = chatClient; | ||
| _logger = logger; | ||
| } | ||
|
|
||
| public async Task<(string response, string responseId)> GetChatCompletion( | ||
| string prompt, | ||
| string? systemPrompt = null, | ||
| string? previousResponseId = null, | ||
| IMcpClient? mcpClient = null, | ||
| #pragma warning disable OPENAI001 | ||
| IEnumerable<ResponseTool>? tools = null, | ||
| ResponseReasoningEffortLevel? reasoningEffortLevel = null, | ||
| #pragma warning restore OPENAI001 | ||
| bool enableContextualSearch = false, | ||
| CancellationToken cancellationToken = default) | ||
| { | ||
| WarnUnsupportedFeatures(tools, reasoningEffortLevel, enableContextualSearch); | ||
|
|
||
| var messages = BuildMessages(prompt, systemPrompt, previousResponseId); | ||
| var response = await _chatClient.GetResponseAsync(messages, cancellationToken: cancellationToken); | ||
| var responseText = response.Text ?? string.Empty; | ||
| var responseId = SaveConversation(messages, responseText, previousResponseId); | ||
| return (responseText, responseId); | ||
| } | ||
|
|
||
| public async IAsyncEnumerable<(string text, string? responseId)> GetChatCompletionStream( | ||
| string prompt, | ||
| string? systemPrompt = null, | ||
| string? previousResponseId = null, | ||
| IMcpClient? mcpClient = null, | ||
| #pragma warning disable OPENAI001 | ||
| IEnumerable<ResponseTool>? tools = null, | ||
| ResponseReasoningEffortLevel? reasoningEffortLevel = null, | ||
| #pragma warning restore OPENAI001 | ||
| bool enableContextualSearch = false, | ||
| [EnumeratorCancellation] CancellationToken cancellationToken = default) | ||
| { | ||
| WarnUnsupportedFeatures(tools, reasoningEffortLevel, enableContextualSearch); | ||
|
|
||
| var messages = BuildMessages(prompt, systemPrompt, previousResponseId); | ||
| var fullResponse = new System.Text.StringBuilder(); | ||
|
|
||
| await foreach (var update in _chatClient.GetStreamingResponseAsync(messages, cancellationToken: cancellationToken)) | ||
| { | ||
| if (!string.IsNullOrEmpty(update.Text)) | ||
| { | ||
| fullResponse.Append(update.Text); | ||
| yield return (update.Text, null); | ||
| } | ||
| } | ||
|
|
||
| var responseId = SaveConversation(messages, fullResponse.ToString(), previousResponseId); | ||
| yield return (string.Empty, responseId); | ||
| } | ||
|
|
||
| #pragma warning disable OPENAI001 | ||
| private void WarnUnsupportedFeatures( | ||
| IEnumerable<ResponseTool>? tools, | ||
| ResponseReasoningEffortLevel? reasoningEffortLevel, | ||
| bool enableContextualSearch) | ||
| #pragma warning restore OPENAI001 | ||
| { | ||
| if (tools is not null || reasoningEffortLevel is not null) | ||
| { | ||
| _logger.LogWarning("LocalAIChatService: ResponseTool and ReasoningEffortLevel are Azure-specific and are ignored in local mode."); | ||
| } | ||
|
|
||
| if (enableContextualSearch) | ||
| { | ||
| _logger.LogWarning("LocalAIChatService: Vector search (RAG) is disabled in local mode (Phase 1). Run in Azure mode to enable contextual search."); | ||
| } | ||
| } | ||
|
Comment on lines
+90
to
+105
|
||
|
|
||
| private List<ChatMessage> BuildMessages(string prompt, string? systemPrompt, string? previousResponseId) | ||
| { | ||
| var messages = new List<ChatMessage>(); | ||
|
|
||
| var sys = string.IsNullOrWhiteSpace(systemPrompt) ? _options.SystemPrompt : systemPrompt; | ||
| if (!string.IsNullOrWhiteSpace(sys)) | ||
| messages.Add(new ChatMessage(ChatRole.System, sys)); | ||
|
|
||
| if (previousResponseId is not null && _conversations.TryGetValue(previousResponseId, out var history)) | ||
| messages.AddRange(history); | ||
|
|
||
| messages.Add(new ChatMessage(ChatRole.User, prompt)); | ||
| return messages; | ||
| } | ||
|
|
||
| private string SaveConversation(List<ChatMessage> messages, string assistantResponse, string? previousResponseId) | ||
| { | ||
| var history = messages.Where(m => m.Role != ChatRole.System).ToList(); | ||
| history.Add(new ChatMessage(ChatRole.Assistant, assistantResponse)); | ||
|
|
||
| var newId = Guid.NewGuid().ToString("N"); | ||
| _conversations[newId] = history; | ||
|
|
||
| // Remove previous entry to avoid unbounded memory growth. | ||
| // TryRemove is safe on ConcurrentDictionary. | ||
| if (previousResponseId is not null) | ||
| _conversations.TryRemove(previousResponseId, out _); | ||
|
|
||
| return newId; | ||
| } | ||
| } | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,46 @@ | ||
| using ModelContextProtocol.Client; | ||
| using OpenAI.Responses; | ||
|
|
||
| namespace EssentialCSharp.Chat.Common.Services; | ||
|
|
||
| public sealed class UnavailableAIChatService : IAIChatService | ||
| { | ||
| private static AIChatUnavailableException CreateException() => | ||
| new(AIConfigurationState.DevelopmentUnavailableMessage); | ||
|
|
||
| public Task<(string response, string responseId)> GetChatCompletion( | ||
| string prompt, | ||
| string? systemPrompt = null, | ||
| string? previousResponseId = null, | ||
| IMcpClient? mcpClient = null, | ||
| #pragma warning disable OPENAI001 | ||
| IEnumerable<ResponseTool>? tools = null, | ||
| ResponseReasoningEffortLevel? reasoningEffortLevel = null, | ||
| #pragma warning restore OPENAI001 | ||
| bool enableContextualSearch = false, | ||
| CancellationToken cancellationToken = default) => | ||
| Task.FromException<(string response, string responseId)>(CreateException()); | ||
|
|
||
| public IAsyncEnumerable<(string text, string? responseId)> GetChatCompletionStream( | ||
| string prompt, | ||
| string? systemPrompt = null, | ||
| string? previousResponseId = null, | ||
| IMcpClient? mcpClient = null, | ||
| #pragma warning disable OPENAI001 | ||
| IEnumerable<ResponseTool>? tools = null, | ||
| ResponseReasoningEffortLevel? reasoningEffortLevel = null, | ||
| #pragma warning restore OPENAI001 | ||
| bool enableContextualSearch = false, | ||
| CancellationToken cancellationToken = default) => | ||
| ThrowUnavailable(cancellationToken); | ||
|
|
||
| private static async IAsyncEnumerable<(string text, string? responseId)> ThrowUnavailable( | ||
| [System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken) | ||
| { | ||
| cancellationToken.ThrowIfCancellationRequested(); | ||
| throw CreateException(); | ||
| #pragma warning disable CS0162 | ||
| yield break; | ||
| #pragma warning restore CS0162 | ||
| } | ||
| } |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
AddAIServices()can exit without registering anyIAIChatService(development +AIOptions.Endpointempty +UseLocalAI=false). SinceChatControllernow requiresIAIChatServicevia DI, hitting/api/chat/*will result in a runtime 500 (service resolution failure), not a “graceful degradation”. Consider registering a no-opIAIChatServicethat returns a clear 503/feature-disabled error, or conditionally disable the chat endpoints/widget when AI isn’t configured.