-
Notifications
You must be signed in to change notification settings - Fork 8
Expand file tree
/
Copy pathAIChatService.cs
More file actions
354 lines (317 loc) · 19.3 KB
/
AIChatService.cs
File metadata and controls
354 lines (317 loc) · 19.3 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
using Azure.AI.OpenAI;
using Microsoft.Extensions.Options;
using ModelContextProtocol.Client;
using ModelContextProtocol.Protocol;
using OpenAI.Responses;
namespace EssentialCSharp.Chat.Common.Services;
/// <summary>
/// Service for handling AI chat completions using the OpenAI Responses API
/// </summary>
public class AIChatService : IAIChatService
{
private readonly AIOptions _Options;
private readonly AzureOpenAIClient _AzureClient;
#pragma warning disable OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
private readonly OpenAIResponseClient _ResponseClient;
#pragma warning restore OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
private readonly AISearchService _SearchService;
public AIChatService(IOptions<AIOptions> options, AISearchService searchService, AzureOpenAIClient azureClient)
{
_Options = options.Value;
_SearchService = searchService;
// Initialize Azure OpenAI client and get the Response Client from it
_AzureClient = azureClient;
#pragma warning disable OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
_ResponseClient = _AzureClient.GetOpenAIResponseClient(_Options.ChatDeploymentName);
#pragma warning restore OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
}
/// <summary>
/// Gets a single chat completion response with all optional features
/// </summary>
/// <param name="prompt">The user's input prompt</param>
/// <param name="systemPrompt">Optional system prompt to override the default</param>
/// <param name="previousResponseId">Previous response ID to maintain conversation context</param>
/// <param name="tools">Optional tools for the AI to use</param>
/// <param name="reasoningEffortLevel">Optional reasoning effort level for reasoning models</param>
/// <param name="enableContextualSearch">Enable vector search for contextual information</param>
/// <param name="cancellationToken">Cancellation token</param>
/// <returns>The AI response text and response ID for conversation continuity</returns>
public async Task<(string response, string responseId)> GetChatCompletion(
string prompt,
string? systemPrompt = null,
string? previousResponseId = null,
IMcpClient? mcpClient = null,
#pragma warning disable OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
IEnumerable<ResponseTool>? tools = null,
ResponseReasoningEffortLevel? reasoningEffortLevel = null,
#pragma warning restore OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
bool enableContextualSearch = false,
CancellationToken cancellationToken = default)
{
var responseOptions = await CreateResponseOptionsAsync(previousResponseId, tools, reasoningEffortLevel, mcpClient: mcpClient, cancellationToken: cancellationToken);
var enrichedPrompt = await EnrichPromptWithContext(prompt, enableContextualSearch, cancellationToken);
return await GetChatCompletionCore(enrichedPrompt, responseOptions, systemPrompt, cancellationToken);
}
/// <summary>
/// Gets a streaming chat completion response with all optional features
/// </summary>
/// <param name="prompt">The user's input prompt</param>
/// <param name="systemPrompt">Optional system prompt to override the default</param>
/// <param name="previousResponseId">Previous response ID to maintain conversation context</param>
/// <param name="tools">Optional tools for the AI to use</param>
/// <param name="reasoningEffortLevel">Optional reasoning effort level for reasoning models</param>
/// <param name="enableContextualSearch">Enable vector search for contextual information</param>
/// <param name="cancellationToken">Cancellation token</param>
/// <returns>An async enumerable of response text chunks and final response ID</returns>
public async IAsyncEnumerable<(string text, string? responseId)> GetChatCompletionStream(
string prompt,
string? systemPrompt = null,
string? previousResponseId = null,
IMcpClient? mcpClient = null,
#pragma warning disable OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
IEnumerable<ResponseTool>? tools = null,
ResponseReasoningEffortLevel? reasoningEffortLevel = null,
#pragma warning restore OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
bool enableContextualSearch = false,
[System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken = default)
{
var responseOptions = await CreateResponseOptionsAsync(previousResponseId, tools, reasoningEffortLevel, mcpClient: mcpClient, cancellationToken: cancellationToken);
var enrichedPrompt = await EnrichPromptWithContext(prompt, enableContextualSearch, cancellationToken);
// Construct the user input with system context if provided
var systemContext = !string.IsNullOrWhiteSpace(systemPrompt) ? systemPrompt : _Options.SystemPrompt;
// Create the streaming response using the Responses API
#pragma warning disable OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
List<ResponseItem> responseItems = systemContext is not null
? [ResponseItem.CreateSystemMessageItem(systemContext), ResponseItem.CreateUserMessageItem(enrichedPrompt)]
: [ResponseItem.CreateUserMessageItem(enrichedPrompt)];
#pragma warning restore OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
var streamingUpdates = _ResponseClient.CreateResponseStreamingAsync(
responseItems,
options: responseOptions,
cancellationToken: cancellationToken);
await foreach (var result in ProcessStreamingUpdatesAsync(streamingUpdates, responseOptions, mcpClient, cancellationToken))
{
yield return result;
}
}
/// <summary>
/// Enriches the user prompt with contextual information from vector search
/// </summary>
private async Task<string> EnrichPromptWithContext(string prompt, bool enableContextualSearch, CancellationToken cancellationToken)
{
if (!enableContextualSearch)
{
return prompt;
}
var searchResults = await _SearchService.ExecuteVectorSearch(prompt, cancellationToken: cancellationToken);
var contextualInfo = new System.Text.StringBuilder();
contextualInfo.AppendLine("## Contextual Information");
contextualInfo.AppendLine("The following information might be relevant to your question:");
contextualInfo.AppendLine();
foreach (var result in searchResults)
{
contextualInfo.AppendLine(System.Globalization.CultureInfo.InvariantCulture, $"**From: {result.Record.Heading}**");
contextualInfo.AppendLine(result.Record.ChunkText);
contextualInfo.AppendLine();
}
contextualInfo.AppendLine("## User Question");
contextualInfo.AppendLine(prompt);
return contextualInfo.ToString();
}
/// <summary>
/// Processes streaming updates from the OpenAI Responses API, handling both regular responses and function calls
/// </summary>
private async IAsyncEnumerable<(string text, string? responseId)> ProcessStreamingUpdatesAsync(
#pragma warning disable OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
IAsyncEnumerable<StreamingResponseUpdate> streamingUpdates,
ResponseCreationOptions responseOptions,
#pragma warning restore OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
IMcpClient? mcpClient,
[System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken = default)
{
await foreach (var update in streamingUpdates.WithCancellation(cancellationToken))
{
string? responseId;
#pragma warning disable OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
if (update is StreamingResponseCreatedUpdate created)
{
// Remember the response ID for later function calls
responseId = created.Response.Id;
}
else if (update is StreamingResponseOutputItemDoneUpdate itemDone)
{
// Check if this is a function call that needs to be executed
if (itemDone.Item is FunctionCallResponseItem functionCallItem && mcpClient != null)
{
// Execute the function call and stream its response
await foreach (var functionResult in ExecuteFunctionCallAsync(functionCallItem, responseOptions, mcpClient, cancellationToken))
{
if (functionResult.responseId != null)
{
responseId = functionResult.responseId;
}
yield return functionResult;
}
}
}
else if (update is StreamingResponseOutputTextDeltaUpdate deltaUpdate)
{
yield return (deltaUpdate.Delta.ToString(), null);
}
else if (update is StreamingResponseCompletedUpdate completedUpdate)
{
yield return (string.Empty, responseId: completedUpdate.Response.Id); // Signal completion with response ID
}
#pragma warning restore OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
}
}
/// <summary>
/// Executes a function call and streams the response
/// </summary>
private async IAsyncEnumerable<(string text, string? responseId)> ExecuteFunctionCallAsync(
#pragma warning disable OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
FunctionCallResponseItem functionCallItem,
ResponseCreationOptions responseOptions,
#pragma warning restore OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
IMcpClient mcpClient,
[System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken = default)
{
// A dictionary of arguments to pass to the tool. Each key represents a parameter name, and its associated value represents the argument value.
Dictionary<string, object?> arguments = [];
// example JsonResponse:
// "{\"question\":\"Azure OpenAI Responses API (Preview)\"}"
var jsonResponse = functionCallItem.FunctionArguments.ToString();
var jsonArguments = System.Text.Json.JsonSerializer.Deserialize<Dictionary<string, object?>>(jsonResponse) ?? new Dictionary<string, object?>();
// Convert JsonElement values to their actual types
foreach (var kvp in jsonArguments)
{
if (kvp.Value is System.Text.Json.JsonElement jsonElement)
{
arguments[kvp.Key] = jsonElement.ValueKind switch
{
System.Text.Json.JsonValueKind.String => jsonElement.GetString(),
System.Text.Json.JsonValueKind.Number => jsonElement.GetDecimal(),
System.Text.Json.JsonValueKind.True => true,
System.Text.Json.JsonValueKind.False => false,
System.Text.Json.JsonValueKind.Null => null,
_ => jsonElement.ToString()
};
}
else
{
arguments[kvp.Key] = kvp.Value;
}
}
// Execute the function call using the MCP client
var toolResult = await mcpClient.CallToolAsync(
functionCallItem.FunctionName,
arguments: arguments,
cancellationToken: cancellationToken);
// Create input items with both the function call and the result
// This matches the Python pattern: append both tool_call and result
#pragma warning disable OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
var inputItems = new List<ResponseItem>
{
functionCallItem, // The original function call
new FunctionCallOutputResponseItem(functionCallItem.CallId, string.Join("", toolResult.Content.Where(x => x.Type == "text").OfType<TextContentBlock>().Select(x => x.Text)))
};
#pragma warning restore OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
// Stream the function call response using the same processing logic
var functionResponseStream = _ResponseClient.CreateResponseStreamingAsync(
inputItems,
responseOptions,
cancellationToken);
await foreach (var result in ProcessStreamingUpdatesAsync(functionResponseStream, responseOptions, mcpClient, cancellationToken))
{
yield return result;
}
}
/// <summary>
/// Creates response options with optional features
/// </summary>
#pragma warning disable OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
private static async Task<ResponseCreationOptions> CreateResponseOptionsAsync(
string? previousResponseId = null,
IEnumerable<ResponseTool>? tools = null,
ResponseReasoningEffortLevel? reasoningEffortLevel = null,
IMcpClient? mcpClient = null,
CancellationToken cancellationToken = default
)
{
var options = new ResponseCreationOptions();
#pragma warning restore OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
// Add conversation context if available
if (!string.IsNullOrEmpty(previousResponseId))
{
options.PreviousResponseId = previousResponseId;
}
// Add tools if provided
if (tools != null)
{
foreach (var tool in tools)
{
options.Tools.Add(tool);
}
}
if (mcpClient is not null)
{
await foreach (McpClientTool tool in mcpClient.EnumerateToolsAsync(cancellationToken: cancellationToken))
{
#pragma warning disable OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
options.Tools.Add(ResponseTool.CreateFunctionTool(tool.Name, functionDescription: tool.Description, strictModeEnabled: true, functionParameters: BinaryData.FromString(tool.JsonSchema.GetRawText())));
#pragma warning restore OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
}
}
// Add reasoning options if specified
if (reasoningEffortLevel.HasValue)
{
#pragma warning disable OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
options.ReasoningOptions = new ResponseReasoningOptions()
{
ReasoningEffortLevel = reasoningEffortLevel.Value
};
#pragma warning restore OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
}
return options;
}
/// <summary>
/// Core method for getting chat completions with configurable response options
/// </summary>
private async Task<(string response, string responseId)> GetChatCompletionCore(
string prompt,
#pragma warning disable OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
ResponseCreationOptions responseOptions,
#pragma warning restore OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
string? systemPrompt = null,
CancellationToken cancellationToken = default)
{
// Construct the user input with system context if provided
var systemContext = !string.IsNullOrWhiteSpace(systemPrompt) ? systemPrompt : _Options.SystemPrompt;
// Create the streaming response using the Responses API
#pragma warning disable OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
List<ResponseItem> responseItems = systemContext is not null
? [ResponseItem.CreateSystemMessageItem(systemContext), ResponseItem.CreateUserMessageItem(prompt)]
: [ResponseItem.CreateUserMessageItem(prompt)];
#pragma warning restore OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
// Create the response using the Responses API
var response = await _ResponseClient.CreateResponseAsync(
responseItems,
options: responseOptions,
cancellationToken: cancellationToken);
// Extract the message content and response ID
string responseText = string.Empty;
string responseId = response.Value.Id;
#pragma warning disable OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
var assistantMessage = response.Value.OutputItems
.OfType<MessageResponseItem>()
.FirstOrDefault(m => m.Role == MessageRole.Assistant &&
!string.IsNullOrEmpty(m.Content?.FirstOrDefault()?.Text));
if (assistantMessage is not null)
{
responseText = assistantMessage.Content?.FirstOrDefault()?.Text ?? string.Empty;
}
#pragma warning restore OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
return (responseText, responseId);
}
// TODO: Look into using UserSecurityContext (https://learn.microsoft.com/en-us/azure/defender-for-cloud/gain-end-user-context-ai)
}