Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions EssentialCSharp.Chat.Shared/Models/BookContentChunk.cs
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,13 @@ public sealed class BookContentChunk
[VectorStoreData]
public int? ChapterNumber { get; set; }

/// <summary>
/// Zero-based ordinal of this chunk within its source file.
/// Together with FileName, forms the basis for the deterministic Id.
/// </summary>
[VectorStoreData]
public int ChunkIndex { get; set; }

/// <summary>
/// SHA256 hash of the chunk content for change detection
/// </summary>
Expand Down
25 changes: 22 additions & 3 deletions EssentialCSharp.Chat.Shared/Services/AISearchService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,30 @@ public async Task<IReadOnlyList<VectorSearchResult<BookContentChunk>>> ExecuteVe
{
try
{
var results = new List<VectorSearchResult<BookContentChunk>>();
await foreach (var result in collection.SearchAsync(searchVector, options: vectorSearchOptions, top: 3, cancellationToken: cancellationToken))
// Fetch more candidates than needed so we can deduplicate by heading.
// Multiple chunks from the same section share the same Heading; without dedup
// all top-N results could come from one long section, reducing context diversity.
const int candidates = 9;
const int maxDistinctResults = 3;

var candidates_list = new List<VectorSearchResult<BookContentChunk>>();
await foreach (var result in collection.SearchAsync(searchVector, options: vectorSearchOptions, top: candidates, cancellationToken: cancellationToken))
{
results.Add(result);
candidates_list.Add(result);
}
Comment on lines +40 to 44
Copy link

Copilot AI Apr 26, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Local variable name candidates_list uses snake_case, which is inconsistent with the surrounding camelCase naming in this method (e.g., vectorSearchOptions, searchVector). Rename to candidatesList (or similar) for consistency and readability.

Copilot uses AI. Check for mistakes.

// Keep only the highest-scoring chunk per unique heading, then take the globally
// top-N by score. GroupBy on a materialized list preserves insertion (score desc)
// order, but we make the ordering explicit via OrderByDescending so the result
// is correct regardless of provider sort guarantees.
// MaxBy on a non-empty IGrouping never returns null; ! asserts this invariant.
var results = candidates_list
.GroupBy(r => r.Record.Heading)
.Select(g => g.MaxBy(r => r.Score)!)
.OrderByDescending(r => r.Score)
.Take(maxDistinctResults)
.ToList();

return results;
}
catch (PostgresException ex) when (ex.SqlState == "28000" && attempt == 0)
Expand Down
30 changes: 9 additions & 21 deletions EssentialCSharp.Chat.Shared/Services/ChunkingResultExtensions.cs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
using System.Security.Cryptography;
using System.Text;
using System.Linq;
using EssentialCSharp.Chat.Common.Models;

namespace EssentialCSharp.Chat.Common.Services;
Expand All @@ -12,16 +11,17 @@ public static List<BookContentChunk> ToBookContentChunks(this FileChunkingResult
int? chapterNumber = ExtractChapterNumber(result.FileName);

var chunks = result.Chunks
.Select(chunkText =>
.Select((markdownChunk, index) =>
{
var contentHash = ComputeSha256Hash(chunkText);
var contentHash = ComputeSha256Hash(markdownChunk.ChunkText);
return new BookContentChunk
{
Id = Guid.NewGuid().ToString(),
Id = $"{result.FileName}_{index}",
FileName = result.FileName,
Heading = ExtractHeading(chunkText),
ChunkText = chunkText,
Heading = markdownChunk.Heading,
ChunkText = markdownChunk.ChunkText,
ChapterNumber = chapterNumber,
ChunkIndex = index,
ContentHash = contentHash
};
})
Expand All @@ -30,25 +30,13 @@ public static List<BookContentChunk> ToBookContentChunks(this FileChunkingResult
return chunks;
}

private static string ExtractHeading(string chunkText)
private static int? ExtractChapterNumber(string fileName)
{
// get characters until the first " - " or newline
var firstLine = chunkText.Split(["\r\n", "\r", "\n"], StringSplitOptions.None)[0];
var headingParts = firstLine.Split([" - "], StringSplitOptions.None);
return headingParts.Length > 0 ? headingParts[0].Trim() : string.Empty;
}

private static int ExtractChapterNumber(string fileName)
{
// Example: "Chapter01.md" -> 1
// Regex: Chapter(?<ChapterNumber>[0-9]{2})
// Example: "Chapter01.md" -> 1; non-chapter files return null.
var match = ChapterNumberRegex().Match(fileName);
if (match.Success && int.TryParse(match.Groups["ChapterNumber"].Value, out int chapterNumber))

{
return chapterNumber;
}
throw new InvalidOperationException($"File name '{fileName}' does not contain a valid chapter number in the expected format.");
return null;
}

private static string ComputeSha256Hash(string text)
Expand Down
111 changes: 91 additions & 20 deletions EssentialCSharp.Chat.Shared/Services/EmbeddingService.cs
Original file line number Diff line number Diff line change
@@ -1,51 +1,122 @@
using EssentialCSharp.Chat.Common.Models;
using Microsoft.Extensions.AI;
using Microsoft.Extensions.VectorData;
using Npgsql;

namespace EssentialCSharp.Chat.Common.Services;

/// <summary>
/// Service for generating embeddings for markdown chunks using Azure OpenAI
/// Service for generating embeddings for markdown chunks using Azure OpenAI and uploading
/// them to a PostgreSQL vector store via a staging-then-swap pattern to avoid downtime.
/// </summary>
public class EmbeddingService(VectorStore vectorStore, IEmbeddingGenerator<string, Embedding<float>> embeddingGenerator)
public class EmbeddingService(
VectorStore vectorStore,
IEmbeddingGenerator<string, Embedding<float>> embeddingGenerator,
NpgsqlDataSource? dataSource = null)
{
public static string CollectionName { get; } = "markdown_chunks";

/// <summary>
/// Generate an embedding for the given text.
/// </summary>
/// <param name="text">The text to generate an embedding for.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns>A search vector as ReadOnlyMemory&lt;float&gt;.</returns>
public async Task<ReadOnlyMemory<float>> GenerateEmbeddingAsync(string text, CancellationToken cancellationToken = default)
{
var embedding = await embeddingGenerator.GenerateAsync(text, cancellationToken: cancellationToken);
return embedding.Vector;
}

/// <summary>
/// Generate an embedding for each text paragraph and upload it to the specified collection.
/// Generate embeddings for all chunks in a single batch call and upload them to the vector
/// store using a staging-then-atomic-swap pattern so the live collection stays queryable
/// throughout the rebuild.
///
/// Steps:
/// 1. Create a staging collection ({collectionName}_staging).
/// 2. Embed all chunks in one batch API call (Azure OpenAI supports up to 2048 inputs).
/// 3. Batch-upsert all chunks into staging.
/// 4. Atomically swap staging → live via three SQL RENAMEs in a single transaction.
/// PostgreSQL ALTER TABLE acquires AccessExclusiveLock automatically; no explicit
/// LOCK TABLE is needed. The transaction ensures no reader sees an intermediate state.
/// 5. Drop the old live backup table.
Comment on lines +37 to +40
Copy link

Copilot AI Apr 26, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The method documentation says the swap uses "three SQL RENAMEs", but the implementation performs two RENAME operations (live→old, staging→live) plus DROP TABLE statements. Update the comments so they accurately describe the actual DDL being executed.

Suggested change
/// 4. Atomically swap staging → live via three SQL RENAMEs in a single transaction.
/// PostgreSQL ALTER TABLE acquires AccessExclusiveLock automatically; no explicit
/// LOCK TABLE is needed. The transaction ensures no reader sees an intermediate state.
/// 5. Drop the old live backup table.
/// 4. Atomically swap tables in a single transaction using two SQL RENAME operations
/// (live → old, staging → live). PostgreSQL ALTER TABLE acquires
/// AccessExclusiveLock automatically; no explicit LOCK TABLE is needed. The
/// transaction ensures no reader sees an intermediate state.
/// 5. Drop the old live backup table with DROP TABLE.

Copilot uses AI. Check for mistakes.
///
/// If an error occurs before the swap, only the staging table is affected — the live
/// collection is untouched.
/// </summary>
/// <param name="collectionName">The name of the collection to upload the text paragraphs to.</param>
/// <returns>An async task.</returns>
public async Task GenerateBookContentEmbeddingsAndUploadToVectorStore(IEnumerable<BookContentChunk> bookContents, CancellationToken cancellationToken, string? collectionName = null)
public async Task GenerateBookContentEmbeddingsAndUploadToVectorStore(
IEnumerable<BookContentChunk> bookContents,
CancellationToken cancellationToken,
string? collectionName = null)
{
collectionName ??= CollectionName;
string stagingName = $"{collectionName}_staging";
string oldName = $"{collectionName}_old";

var collection = vectorStore.GetCollection<string, BookContentChunk>(collectionName);
await collection.EnsureCollectionDeletedAsync(cancellationToken);
await collection.EnsureCollectionExistsAsync(cancellationToken);
if (dataSource is null)
throw new InvalidOperationException("NpgsqlDataSource is required for the staging swap. Ensure it is registered in DI.");

int uploadedCount = 0;
// ── Step 1: Prepare staging collection ────────────────────────────────────────
var staging = vectorStore.GetCollection<string, BookContentChunk>(stagingName);
await staging.EnsureCollectionDeletedAsync(cancellationToken);
await staging.EnsureCollectionExistsAsync(cancellationToken);

foreach (var chunk in bookContents)
// ── Step 2: Batch-embed all chunks in a single API call ───────────────────────
// IEmbeddingGenerator.GenerateAsync natively accepts IEnumerable<string>.
// The single-string overload used previously is a convenience extension method
// that wraps one item and calls this same method.
var chunkList = bookContents.ToList();
var texts = chunkList.Select(c => c.ChunkText).ToList();

GeneratedEmbeddings<Embedding<float>> embeddings =
await embeddingGenerator.GenerateAsync(texts, cancellationToken: cancellationToken);

if (embeddings.Count != chunkList.Count)
throw new InvalidOperationException(
$"Embedding count mismatch: expected {chunkList.Count}, got {embeddings.Count}.");

for (int i = 0; i < chunkList.Count; i++)
{
chunkList[i].TextEmbedding = embeddings[i].Vector;
Comment on lines +62 to +78
Copy link

Copilot AI Apr 26, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The embedding generator call batches all chunks into a single request, but Azure OpenAI embeddings have an input-count limit (noted in the comment as 2048). If the book produces more than that many chunks, this will fail at runtime. Consider chunking texts into batches (<= provider limit) and merging the returned vectors back into chunkList (or throw a clear exception when the limit is exceeded).

Suggested change
// ── Step 2: Batch-embed all chunks in a single API call ───────────────────────
// IEmbeddingGenerator.GenerateAsync natively accepts IEnumerable<string>.
// The single-string overload used previously is a convenience extension method
// that wraps one item and calls this same method.
var chunkList = bookContents.ToList();
var texts = chunkList.Select(c => c.ChunkText).ToList();
GeneratedEmbeddings<Embedding<float>> embeddings =
await embeddingGenerator.GenerateAsync(texts, cancellationToken: cancellationToken);
if (embeddings.Count != chunkList.Count)
throw new InvalidOperationException(
$"Embedding count mismatch: expected {chunkList.Count}, got {embeddings.Count}.");
for (int i = 0; i < chunkList.Count; i++)
{
chunkList[i].TextEmbedding = embeddings[i].Vector;
// ── Step 2: Batch-embed all chunks in provider-safe API calls ─────────────────
// Azure OpenAI embeddings impose an input-count limit per request.
// Process the texts in batches and merge the returned vectors back into the
// original chunk list to preserve ordering.
var chunkList = bookContents.ToList();
var texts = chunkList.Select(c => c.ChunkText).ToList();
const int maxEmbeddingBatchSize = 2048;
for (int batchStart = 0; batchStart < texts.Count; batchStart += maxEmbeddingBatchSize)
{
int batchSize = Math.Min(maxEmbeddingBatchSize, texts.Count - batchStart);
List<string> batchTexts = texts.GetRange(batchStart, batchSize);
GeneratedEmbeddings<Embedding<float>> embeddings =
await embeddingGenerator.GenerateAsync(batchTexts, cancellationToken: cancellationToken);
if (embeddings.Count != batchSize)
throw new InvalidOperationException(
$"Embedding count mismatch for batch starting at index {batchStart}: expected {batchSize}, got {embeddings.Count}.");
for (int i = 0; i < batchSize; i++)
{
chunkList[batchStart + i].TextEmbedding = embeddings[i].Vector;
}

Copilot uses AI. Check for mistakes.
}

// ── Step 3: Batch-upsert all chunks into staging ──────────────────────────────
await staging.UpsertAsync(chunkList, cancellationToken);
Console.WriteLine($"Uploaded {chunkList.Count} chunks to staging collection '{stagingName}'.");
Comment on lines +45 to +83
Copy link

Copilot AI Apr 26, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

GenerateBookContentEmbeddingsAndUploadToVectorStore now contains significant new behavior (batch embedding + staging/swap workflow) but there are no unit/integration tests covering it in the Chat test suite. Adding tests that at least verify batching semantics (e.g., embedding generator called with N inputs and staging collection UpsertAsync invoked once with embeddings assigned) would help prevent regressions; the swap step can be isolated behind an interface to make it testable without a live database.

Copilot generated this review using guidance from repository custom instructions.

// ── Step 4: Atomic swap — staging → live ──────────────────────────────────────
// Three ALTER TABLE RENAME statements in one transaction.
// Each RENAME auto-acquires AccessExclusiveLock on its table; the transaction
// guarantees all three renames are visible atomically to other sessions.
await using var conn = await dataSource.OpenConnectionAsync(cancellationToken);
await using var tx = await conn.BeginTransactionAsync(cancellationToken);

await using (var cmd = conn.CreateCommand())
{
cancellationToken.ThrowIfCancellationRequested();
chunk.TextEmbedding = await GenerateEmbeddingAsync(chunk.ChunkText, cancellationToken);
await collection.UpsertAsync(chunk, cancellationToken);
Console.WriteLine($"Uploaded chunk '{chunk.Id}' to collection '{collectionName}' for file '{chunk.FileName}' with heading '{chunk.Heading}'.");
uploadedCount++;
cmd.Transaction = tx;

// Drop any leftover backup from a previous run
cmd.CommandText = $"DROP TABLE IF EXISTS \"{oldName}\"";
await cmd.ExecuteNonQueryAsync(cancellationToken);

// Rename live → old. IF EXISTS is a no-op on first run when no live table exists.
// Using ALTER TABLE IF EXISTS avoids PL/pgSQL string interpolation entirely.
cmd.CommandText = $"ALTER TABLE IF EXISTS \"{collectionName}\" RENAME TO \"{oldName}\"";
await cmd.ExecuteNonQueryAsync(cancellationToken);

// Rename staging → live
cmd.CommandText = $"ALTER TABLE \"{stagingName}\" RENAME TO \"{collectionName}\"";
await cmd.ExecuteNonQueryAsync(cancellationToken);
Comment on lines +96 to +107
Copy link

Copilot AI Apr 26, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

These SQL statements interpolate collectionName/derived names directly into identifier-quoted SQL. If collectionName can be influenced outside trusted code, this becomes identifier-injection (quotes can be escaped/broken). Consider restricting collectionName to a safe identifier regex (e.g., letters/digits/underscore) before composing SQL, and use Npgsql's identifier-quoting helper to build the final identifiers consistently.

Copilot uses AI. Check for mistakes.
}

await tx.CommitAsync(cancellationToken);
Console.WriteLine($"Swapped '{stagingName}' → '{collectionName}' atomically.");

// ── Step 5: Drop the old backup ───────────────────────────────────────────────
await using (var cmd = conn.CreateCommand())
{
cmd.CommandText = $"DROP TABLE IF EXISTS \"{oldName}\"";
await cmd.ExecuteNonQueryAsync(cancellationToken);
}
Console.WriteLine($"Successfully generated embeddings and uploaded {uploadedCount} chunks to collection '{collectionName}'.");

Console.WriteLine($"Successfully generated embeddings and uploaded {chunkList.Count} chunks to collection '{collectionName}'.");
}
}
9 changes: 8 additions & 1 deletion EssentialCSharp.Chat.Shared/Services/FileChunkingResult.cs
Original file line number Diff line number Diff line change
@@ -1,5 +1,12 @@
namespace EssentialCSharp.Chat.Common.Services;

/// <summary>
/// A single chunk from a markdown file, paired with the section heading it belongs to.
/// </summary>
/// <param name="Heading">Full breadcrumb heading for the section (e.g. "Chapter: 1: Intro: Summary").</param>
/// <param name="ChunkText">The raw chunk text, including the "Heading - " prefix prepended by TextChunker.</param>
public record MarkdownChunk(string Heading, string ChunkText);

/// <summary>
/// Data structure to hold chunking results for a single file
/// </summary>
Expand All @@ -9,6 +16,6 @@ public class FileChunkingResult
public string FilePath { get; set; } = string.Empty;
public int OriginalCharCount { get; set; }
public int ChunkCount { get; set; }
public List<string> Chunks { get; set; } = [];
public List<MarkdownChunk> Chunks { get; set; } = [];
public int TotalChunkCharacters { get; set; }
}
37 changes: 26 additions & 11 deletions EssentialCSharp.Chat.Shared/Services/MarkdownChunkingService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -61,15 +61,24 @@ public async Task<List<FileChunkingResult>> ProcessMarkdownFilesAsync(
public FileChunkingResult ProcessSingleMarkdownFile(
string[] fileContent, string fileName, string filePath)
{
// Remove all multiple empty lines so there is no more than one empty line between paragraphs
string[] lines = [.. fileContent
.Select(line => line.Trim())
.Where(line => !string.IsNullOrWhiteSpace(line))];

// Collapse consecutive blank lines to at most one blank line. Single blank lines must
// be preserved because TextChunker.SplitMarkdownParagraphs uses them as paragraph
// separators — stripping all blanks defeats paragraph-aware chunking.
var normalizedLines = new List<string>(fileContent.Length);
bool lastWasBlank = false;
foreach (var raw in fileContent)
{
var line = raw.Trim();
var isBlank = string.IsNullOrWhiteSpace(line);
if (!isBlank || !lastWasBlank)
normalizedLines.Add(line);
lastWasBlank = isBlank;
}
Comment on lines +69 to +76
string[] lines = [.. normalizedLines];
string content = string.Join(Environment.NewLine, lines);

var sections = MarkdownContentToHeadersAndSection(content);
var allChunks = new List<string>();
var allChunks = new List<MarkdownChunk>();
int totalChunkCharacters = 0;
int chunkCount = 0;

Expand All @@ -83,7 +92,7 @@ public FileChunkingResult ProcessSingleMarkdownFile(
chunkHeader: Header + " - "
);
#pragma warning restore SKEXP0050
allChunks.AddRange(chunks);
allChunks.AddRange(chunks.Select(c => new MarkdownChunk(Header, c)));
chunkCount += chunks.Count;
totalChunkCharacters += chunks.Sum(c => c.Length);
}
Expand Down Expand Up @@ -155,18 +164,24 @@ public FileChunkingResult ProcessSingleMarkdownFile(
}
i++;

// Collect content until next header
// Collect content until next header, preserving blank lines as paragraph separators
// for TextChunker.SplitMarkdownParagraphs.
var contentLines = new List<string>();
while (i < lines.Length && !headerRegex.IsMatch(lines[i]))
{
if (!string.IsNullOrWhiteSpace(lines[i]))
contentLines.Add(lines[i]);
contentLines.Add(lines[i]);
i++;
}

// Strip leading and trailing blank lines; keep internal blanks for paragraph detection.
while (contentLines.Count > 0 && string.IsNullOrWhiteSpace(contentLines[0]))
contentLines.RemoveAt(0);
while (contentLines.Count > 0 && string.IsNullOrWhiteSpace(contentLines[^1]))
contentLines.RemoveAt(contentLines.Count - 1);

// Compose full header context
var fullHeader = string.Join(": ", headerStack.Select(h => h.Text));
if (contentLines.Count > 0)
if (contentLines.Any(l => !string.IsNullOrWhiteSpace(l)))
sections.Add((fullHeader, contentLines));
}
return sections;
Expand Down
6 changes: 3 additions & 3 deletions EssentialCSharp.Chat.Tests/MarkdownChunkingServiceTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -183,9 +183,9 @@ public async Task ProcessSingleMarkdownFile_ProducesExpectedChunksAndHeaders()
await Assert.That(result).IsNotNull();
await Assert.That(result.FileName).IsEqualTo(fileName);
await Assert.That(result.FilePath).IsEqualTo(filePath);
await Assert.That(string.Join("\n", result.Chunks)).Contains("This is the first section.");
await Assert.That(string.Join("\n", result.Chunks)).Contains("Console.WriteLine(\"Hello World\");");
await Assert.That(result.Chunks).Contains(c => c.Contains("This is the second section."));
await Assert.That(string.Join("\n", result.Chunks.Select(c => c.ChunkText))).Contains("This is the first section.");
await Assert.That(string.Join("\n", result.Chunks.Select(c => c.ChunkText))).Contains("Console.WriteLine(\"Hello World\");");
await Assert.That(result.Chunks).Contains(c => c.ChunkText.Contains("This is the second section."));
}
#endregion ProcessSingleMarkdownFile
}
Loading
Loading