Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
108 commits
Select commit Hold shift + click to select a range
23cf75b
Checkpoint
crickman Mar 5, 2026
fcd60da
Checkpoint
crickman Mar 5, 2026
eb84062
Stable
crickman Mar 5, 2026
0e8b9b2
Strategies
crickman Mar 5, 2026
b275d34
Merge branch 'main' into crickman/feature-compaction-deux
crickman Mar 5, 2026
dda15ea
Updated
crickman Mar 5, 2026
7608005
Encoding
crickman Mar 5, 2026
1428286
Formatting
crickman Mar 5, 2026
f70423b
Cleanup
crickman Mar 5, 2026
defb9dd
Formatting
crickman Mar 5, 2026
6ce0447
Tests
crickman Mar 5, 2026
7e2c5ad
Tuning
crickman Mar 5, 2026
06f55c0
Update tests
crickman Mar 5, 2026
f42863e
Test update
crickman Mar 5, 2026
c513694
Remove working solution
crickman Mar 5, 2026
1a8a58f
Merge branch 'main' into crickman/feature-compaction-deux
crickman Mar 5, 2026
43d226f
Add sample to solution
crickman Mar 5, 2026
5ef100c
Sample readyme
crickman Mar 5, 2026
4d6e1ff
Experimental
crickman Mar 5, 2026
2f443a1
Format
crickman Mar 5, 2026
209d0e3
Formatting
crickman Mar 5, 2026
84aa392
Encoding
crickman Mar 5, 2026
7c88b20
Merge branch 'main' into crickman/feature-compaction-deux
crickman Mar 5, 2026
9c1165f
Support IChatReducer
crickman Mar 5, 2026
6ef397e
Merge branch 'main' into crickman/feature-compaction-deux
crickman Mar 6, 2026
7afed95
Sample output formatting
crickman Mar 6, 2026
36ba6b0
Merge branch 'main' into crickman/feature-compaction-deux
crickman Mar 6, 2026
1598991
Initial plan
Copilot Mar 6, 2026
dc2bb4d
Replace CompactingChatClient with MessageCompactionContextProvider
Copilot Mar 6, 2026
601eddb
Boundary condition
crickman Mar 6, 2026
cc441d2
Merge branch 'main' into crickman/feature-compaction-deux
crickman Mar 6, 2026
14aae1f
Merge branch 'crickman/feature-compaction-deux' into copilot/create-m…
crickman Mar 6, 2026
094b415
Fix encoding
crickman Mar 6, 2026
93728c1
Fix cast
crickman Mar 6, 2026
aff1d06
Test coverage
crickman Mar 6, 2026
04f29e6
Merge branch 'crickman/feature-compaction-deux' into copilot/create-m…
crickman Mar 6, 2026
278912b
Namespace
crickman Mar 6, 2026
576f750
Improvements
crickman Mar 6, 2026
aa47a14
Efficiency
crickman Mar 6, 2026
b202b7c
Cleanup
crickman Mar 6, 2026
9f0cc62
Resolve merge
crickman Mar 7, 2026
da4886f
Detect service managed conversation
crickman Mar 7, 2026
dcf4b1a
Fix namespace
crickman Mar 7, 2026
fe09a1e
Fix merge
crickman Mar 7, 2026
b6070fd
Fix test expectation
crickman Mar 7, 2026
06787d0
Merge branch 'main' into crickman/feature-compaction-deux
crickman Mar 9, 2026
cf4fe99
Merge branch 'crickman/feature-compaction-deux' into copilot/create-m…
crickman Mar 9, 2026
ff3d9e5
Update dotnet/src/Microsoft.Agents.AI.Abstractions/InMemoryChatHistor…
crickman Mar 9, 2026
1fffafe
Address PR comments (x1)
crickman Mar 9, 2026
e157b5f
Merge branch 'copilot/create-message-compaction-provider' of https://…
crickman Mar 9, 2026
d6d2331
Update comment
crickman Mar 9, 2026
ee936d3
Update comments
crickman Mar 9, 2026
f42ebb0
Merge branch 'copilot/create-message-compaction-provider' of https://…
crickman Mar 9, 2026
b6cbf62
Clean-up
crickman Mar 9, 2026
6aeb295
Format output
crickman Mar 9, 2026
f88fed0
Sync sample comment
crickman Mar 9, 2026
9edd440
Fix condition
crickman Mar 9, 2026
f105ae0
Adjust data-flow
crickman Mar 9, 2026
f40710f
Address comments (x2)
crickman Mar 9, 2026
39bd2a5
Merge branch 'main' into copilot/create-message-compaction-provider
crickman Mar 9, 2026
5c406b8
Direct compaction
crickman Mar 10, 2026
b0138dc
Fix summarization content
crickman Mar 10, 2026
8179e2e
Argument check / fix count calculation
crickman Mar 10, 2026
4629cc3
Merge branch 'main' into copilot/create-message-compaction-provider
crickman Mar 10, 2026
011995b
Minor follow-up
crickman Mar 10, 2026
c68a7d3
Diagnostics
crickman Mar 10, 2026
c010d70
Minor updates
crickman Mar 10, 2026
6df1e23
Fix state test
crickman Mar 10, 2026
08354f4
Merge branch 'copilot/create-message-compaction-provider' of https://…
crickman Mar 10, 2026
133a631
Merge branch 'copilot/create-message-compaction-provider' of https://…
crickman Mar 10, 2026
2c37cfa
Fix sliding window perf
crickman Mar 10, 2026
7640783
Stable state keys
crickman Mar 10, 2026
9a46d18
Increase size computation
crickman Mar 10, 2026
60c4b7a
Formatting
crickman Mar 10, 2026
1287881
Add README.md for Agent_Step18_CompactionPipeline sample (#4574)
Copilot Mar 10, 2026
19643d3
Sample comments
crickman Mar 10, 2026
cc8cfc7
Merge branch 'copilot/create-message-compaction-provider' of https://…
crickman Mar 10, 2026
e79ca62
Updated
crickman Mar 10, 2026
302b4f4
Merge branch 'main' into copilot/create-message-compaction-provider
crickman Mar 10, 2026
182cefb
Update dotnet/src/Microsoft.Agents.AI/Compaction/MessageIndex.cs
crickman Mar 10, 2026
4b64b28
Update dotnet/tests/Microsoft.Agents.AI.UnitTests/Compaction/Compacti…
crickman Mar 10, 2026
2d8f53f
Update dotnet/src/Microsoft.Agents.AI/Compaction/MessageIndex.cs
crickman Mar 10, 2026
10f3538
Address copilot comments
crickman Mar 10, 2026
9e78a1e
Merge branch 'copilot/create-message-compaction-provider' of https://…
crickman Mar 10, 2026
1ae8d25
Fix namespace
crickman Mar 10, 2026
2c7e1c8
Merge branch 'main' into copilot/create-message-compaction-provider
crickman Mar 10, 2026
dfff5fc
Comments / convensions
crickman Mar 10, 2026
48fbe27
Prefix `MessageGroup` and `MessageIndex`
crickman Mar 10, 2026
d8d0d1c
Merge branch 'main' into copilot/create-message-compaction-provider
crickman Mar 10, 2026
1c5820e
Merge branch 'main' into copilot/create-message-compaction-provider
crickman Mar 10, 2026
70ae245
Fix sliding window
crickman Mar 10, 2026
d1cadea
Update dotnet/src/Microsoft.Agents.AI/Compaction/SummarizationCompact…
crickman Mar 10, 2026
efe5f67
Update dotnet/src/Microsoft.Agents.AI.Abstractions/InMemoryChatHistor…
crickman Mar 10, 2026
3821ae7
Python alignment
crickman Mar 10, 2026
235876e
Resolve merge
crickman Mar 10, 2026
6d663f2
Fix merge
crickman Mar 10, 2026
ee25c42
Merge branch 'main' into copilot/create-message-compaction-provider
crickman Mar 10, 2026
6fbe1ec
Fix equality, readme, and sample
crickman Mar 10, 2026
3cdb4c0
Readme update and ToolResult fix
crickman Mar 10, 2026
45d689b
Update dotnet/src/Microsoft.Agents.AI/Compaction/SummarizationCompact…
crickman Mar 10, 2026
e79cfd1
Update dotnet/samples/02-agents/Agents/Agent_Step18_CompactionPipelin…
crickman Mar 10, 2026
5023ffc
Simplify readme
crickman Mar 11, 2026
932385b
Merge branch 'copilot/create-message-compaction-provider' of https://…
crickman Mar 11, 2026
e4beb84
Update dotnet/samples/02-agents/Agents/Agent_Step18_CompactionPipelin…
crickman Mar 11, 2026
07c9af7
Remove example
crickman Mar 11, 2026
ffc1603
Merge branch 'copilot/create-message-compaction-provider' of https://…
crickman Mar 11, 2026
f75f796
Remove unused
crickman Mar 11, 2026
339117e
Merge branch 'main' into copilot/create-message-compaction-provider
crickman Mar 11, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions dotnet/Directory.Packages.props
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,7 @@
<!-- Inference SDKs -->
<PackageVersion Include="AWSSDK.Extensions.Bedrock.MEAI" Version="4.0.5.1" />
<PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI" Version="0.10.0" />
<PackageVersion Include="Microsoft.ML.Tokenizers" Version="2.0.0" />
<PackageVersion Include="OllamaSharp" Version="5.4.8" />
<PackageVersion Include="OpenAI" Version="2.8.0" />
<!-- Identity -->
Expand Down
1 change: 1 addition & 0 deletions dotnet/agent-framework-dotnet.slnx
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@
<Project Path="samples/02-agents/Agents/Agent_Step15_DeepResearch/Agent_Step15_DeepResearch.csproj" />
<Project Path="samples/02-agents/Agents/Agent_Step16_Declarative/Agent_Step16_Declarative.csproj" />
<Project Path="samples/02-agents/Agents/Agent_Step17_AdditionalAIContext/Agent_Step17_AdditionalAIContext.csproj" />
<Project Path="samples/02-agents/Agents/Agent_Step18_CompactionPipeline/Agent_Step18_CompactionPipeline.csproj" />
</Folder>
<Folder Name="/Samples/02-agents/DeclarativeAgents/">
<Project Path="samples/02-agents/DeclarativeAgents/ChatClient/DeclarativeChatClientAgents.csproj" />
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
<Project Sdk="Microsoft.NET.Sdk">

<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFrameworks>net10.0</TargetFrameworks>

<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
</PropertyGroup>

<ItemGroup>
<PackageReference Include="Azure.AI.OpenAI" />
<PackageReference Include="Azure.Identity" />
<PackageReference Include="Microsoft.Extensions.AI.OpenAI" />
</ItemGroup>

<ItemGroup>
<ProjectReference Include="..\..\..\..\src\Microsoft.Agents.AI.OpenAI\Microsoft.Agents.AI.OpenAI.csproj" />
</ItemGroup>

</Project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
// Copyright (c) Microsoft. All rights reserved.

// This sample demonstrates how to use a CompactionProvider with a compaction pipeline
// as an AIContextProvider for an agent's in-run context management. The pipeline chains multiple
// compaction strategies from gentle to aggressive:
// 1. ToolResultCompactionStrategy - Collapses old tool-call groups into concise summaries
// 2. SummarizationCompactionStrategy - LLM-compresses older conversation spans
// 3. SlidingWindowCompactionStrategy - Keeps only the most recent N user turns
// 4. TruncationCompactionStrategy - Emergency token-budget backstop

using System.ComponentModel;
using Azure.AI.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
using Microsoft.Agents.AI.Compaction;
using Microsoft.Extensions.AI;

var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";

// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
AzureOpenAIClient openAIClient = new(new Uri(endpoint), new DefaultAzureCredential());

// Create a chat client for the agent and a separate one for the summarization strategy.
// Using the same model for simplicity; in production, use a smaller/cheaper model for summarization.
IChatClient agentChatClient = openAIClient.GetChatClient(deploymentName).AsIChatClient();
IChatClient summarizerChatClient = openAIClient.GetChatClient(deploymentName).AsIChatClient();

// Define a tool the agent can use, so we can see tool-result compaction in action.
[Description("Look up the current price of a product by name.")]
static string LookupPrice([Description("The product name to look up.")] string productName) =>
productName.ToUpperInvariant() switch
{
"LAPTOP" => "The laptop costs $999.99.",
"KEYBOARD" => "The keyboard costs $79.99.",
"MOUSE" => "The mouse costs $29.99.",
_ => $"Sorry, I don't have pricing for '{productName}'."
};

// Configure the compaction pipeline with one of each strategy, ordered least to most aggressive.
PipelineCompactionStrategy compactionPipeline =
new(// 1. Gentle: collapse old tool-call groups into short summaries
new ToolResultCompactionStrategy(CompactionTriggers.MessagesExceed(7)),

// 2. Moderate: use an LLM to summarize older conversation spans into a concise message
new SummarizationCompactionStrategy(summarizerChatClient, CompactionTriggers.TokensExceed(0x500)),

// 3. Aggressive: keep only the last N user turns and their responses
new SlidingWindowCompactionStrategy(CompactionTriggers.TurnsExceed(4)),

// 4. Emergency: drop oldest groups until under the token budget
new TruncationCompactionStrategy(CompactionTriggers.TokensExceed(0x8000)));

// Create the agent with a CompactionProvider that uses the compaction pipeline.
AIAgent agent =
agentChatClient
.AsBuilder()
// Note: Adding the CompactionProvider at the builder level means it will be applied to all agents
// built from this builder and will manage context for both agent messages and tool calls.
.UseAIContextProviders(new CompactionProvider(compactionPipeline))
.BuildAIAgent(
new ChatClientAgentOptions
{
Name = "ShoppingAssistant",
ChatOptions = new()
{
Instructions =
"""
You are a helpful, but long winded, shopping assistant.
Help the user look up prices and compare products.
When responding, Be sure to be extra descriptive and use as
many words as possible without sounding ridiculous.
""",
Tools = [AIFunctionFactory.Create(LookupPrice)]
},
// Note: AIContextProviders may be specified here instead of ChatClientBuilder.UseAIContextProviders.
// Specifying compaction at the agent level skips compaction in the function calling loop.
//AIContextProviders = [new CompactionProvider(compactionPipeline)]
});

AgentSession session = await agent.CreateSessionAsync();

// Helper to print chat history size
void PrintChatHistory()
{
if (session.TryGetInMemoryChatHistory(out var history))
{
Console.ForegroundColor = ConsoleColor.Cyan;
Console.WriteLine($"\n[Messages: #{history.Count}]\n");
Console.ResetColor();
}
}

// Run a multi-turn conversation with tool calls to exercise the pipeline.
string[] prompts =
[
"What's the price of a laptop?",
"How about a keyboard?",
"And a mouse?",
"Which product is the cheapest?",
"Can you compare the laptop and the keyboard for me?",
"What was the first product I asked about?",
"Thank you!",
];

foreach (string prompt in prompts)
{
Console.ForegroundColor = ConsoleColor.Cyan;
Console.Write("\n[User] ");
Console.ResetColor();
Console.WriteLine(prompt);
Console.ForegroundColor = ConsoleColor.Cyan;
Console.Write("\n[Agent] ");
Console.ResetColor();
Console.WriteLine(await agent.RunAsync(prompt, session));

PrintChatHistory();
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,132 @@
# Compaction Pipeline

This sample demonstrates how to use a `CompactionProvider` with a `PipelineCompactionStrategy` to manage long conversation histories in a token-efficient way. The pipeline chains four compaction strategies, ordered from gentle to aggressive, so that the least disruptive strategy runs first and more aggressive strategies only activate when necessary.

## What This Sample Shows

- **`CompactionProvider`** — an `AIContextProvider` that applies a compaction strategy before each agent invocation, keeping only the most relevant messages within the model's context window
- **`PipelineCompactionStrategy`** — chains multiple compaction strategies into an ordered pipeline; each strategy evaluates its own trigger independently and operates on the output of the previous one
- **`ToolResultCompactionStrategy`** — collapses older tool-call groups into concise inline summaries, activated by a message-count trigger
- **`SummarizationCompactionStrategy`** — uses an LLM to compress older conversation spans into a single summary message, activated by a token-count trigger
- **`SlidingWindowCompactionStrategy`** — retains only the most recent N user turns and their responses, activated by a turn-count trigger
- **`TruncationCompactionStrategy`** — emergency backstop that drops the oldest groups until the conversation fits within a hard token budget
- **`CompactionTriggers`** — factory methods (`MessagesExceed`, `TokensExceed`, `TurnsExceed`, `GroupsExceed`, `HasToolCalls`, `All`, `Any`) that control when each strategy activates

## Concepts

### Message groups

The compaction engine organizes messages into atomic *groups* that are treated as indivisible units during compaction. A group is either:

| Group kind | Contents |
|---|---|
| `System` | System prompt message(s) |
| `User` | A single user message |
| `ToolCall` | One assistant message with tool calls + the matching tool result messages |
| `AssistantText` | A single assistant text-only message |
| `Summary` | One or more messages summarizing earlier conversation spans, produced by compaction strategies |

`Summary` groups (`CompactionGroupKind.Summary`) are created by compaction strategies (for example, `SummarizationCompactionStrategy`) and do not originate directly from user or assistant messages.
Strategies exclude entire groups rather than individual messages, preserving the tool-call/result pairing required by most model APIs.

### Compaction triggers

A `CompactionTrigger` is a predicate evaluated against the current `MessageIndex`. When the trigger fires, the strategy performs compaction; when it does not fire, the strategy is skipped. Available triggers are:

| Trigger | Activates when… |
|---|---|
| `CompactionTriggers.Always` | Always (unconditional) |
| `CompactionTriggers.Never` | Never (disabled) |
| `CompactionTriggers.MessagesExceed(n)` | Included message count > n |
| `CompactionTriggers.TokensExceed(n)` | Included token count > n |
| `CompactionTriggers.TurnsExceed(n)` | Included user-turn count > n |
| `CompactionTriggers.GroupsExceed(n)` | Included group count > n |
| `CompactionTriggers.HasToolCalls()` | At least one included tool-call group exists |
| `CompactionTriggers.All(...)` | All supplied triggers fire (logical AND) |
| `CompactionTriggers.Any(...)` | Any supplied trigger fires (logical OR) |

### Pipeline ordering

Order strategies from **least aggressive** to **most aggressive**. The pipeline runs every strategy whose trigger is met. Earlier strategies reduce the conversation gently so that later, more destructive strategies may not need to activate at all.

```
1. ToolResultCompactionStrategy – gentle: replaces verbose tool results with a short label
2. SummarizationCompactionStrategy – moderate: LLM-summarizes older turns
3. SlidingWindowCompactionStrategy – aggressive: drops turns beyond the window
4. TruncationCompactionStrategy – emergency: hard token-budget enforcement
```

## Prerequisites

- .NET 10 SDK or later
- Azure OpenAI service endpoint and model deployment
- Azure CLI installed and authenticated

**Note**: This sample uses `DefaultAzureCredential`. Sign in with `az login` before running. For production, prefer a specific credential such as `ManagedIdentityCredential`. For more information, see the [Azure CLI authentication documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively).

## Environment Variables

```powershell
$env:AZURE_OPENAI_ENDPOINT="https://your-resource.openai.azure.com/" # Required
$env:AZURE_OPENAI_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini
```

## Running the Sample

```powershell
cd dotnet/samples/02-agents/Agents/Agent_Step18_CompactionPipeline
dotnet run
```

## Expected Behavior

The sample runs a seven-turn shopping-assistant conversation with tool calls. After each turn it prints the full message count so you can observe the pipeline compaction doesn't alter the source conversation.

Each of the four compaction strategies has a deliberately low threshold so that it activates during the short demonstration conversation. In a production scenario you would raise the thresholds to match your model's context window and cost requirements.

## Customizing the Pipeline

### Using a single strategy

If you only need one compaction strategy, pass it directly to `CompactionProvider` without wrapping it in a pipeline:

```csharp
CompactionProvider provider =
new(new SlidingWindowCompactionStrategy(CompactionTriggers.TurnsExceed(20)));
```

### Ad-hoc compaction outside the provider pipeline

`CompactionProvider.CompactAsync` applies a strategy to an arbitrary list of messages without an active agent session:

```csharp
IEnumerable<ChatMessage> compacted = await CompactionProvider.CompactAsync(
new TruncationCompactionStrategy(CompactionTriggers.TokensExceed(8000)),
existingMessages);
```

### Using a different model for summarization

The `SummarizationCompactionStrategy` accepts any `IChatClient`. Use a smaller, cheaper model to reduce summarization cost:

```csharp
IChatClient summarizerChatClient = openAIClient.GetChatClient("gpt-4o-mini").AsIChatClient();
new SummarizationCompactionStrategy(summarizerChatClient, CompactionTriggers.TokensExceed(4000))
```

### Registering through `ChatClientAgentOptions`

`CompactionProvider` can also be specified directly on `ChatClientAgentOptions` instead of calling `UseAIContextProviders` on the `ChatClientBuilder`:

```csharp
AIAgent agent = agentChatClient
.AsBuilder()
.BuildAIAgent(new ChatClientAgentOptions
{
AIContextProviders = [new CompactionProvider(compactionPipeline)]
});
```

This places the compaction provider at the agent level instead of the chat client level, which allows you to use different compaction strategies for different agents that share the same chat client.

> Note: In this mode the `CompactionProvider` is not engaged during the tool calling loop. Agent-level `AIContextProviders` run before chat history is stored, so any synthetic summary messages produced by `CompactionProvider` can become part of the persisted history when using `ChatHistoryProvider`. If you want to compact only the request context while preserving the original stored history, register `CompactionProvider` on the `ChatClientBuilder` via `UseAIContextProviders(...)` instead of on `ChatClientAgentOptions`.
1 change: 1 addition & 0 deletions dotnet/samples/02-agents/Agents/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ Before you begin, ensure you have the following prerequisites:
|[Deep research with an agent](./Agent_Step15_DeepResearch/)|This sample demonstrates how to use the Deep Research Tool to perform comprehensive research on complex topics|
|[Declarative agent](./Agent_Step16_Declarative/)|This sample demonstrates how to declaratively define an agent.|
|[Providing additional AI Context to an agent using multiple AIContextProviders](./Agent_Step17_AdditionalAIContext/)|This sample demonstrates how to inject additional AI context into a ChatClientAgent using multiple custom AIContextProvider components that are attached to the agent.|
|[Using compaction pipeline with an agent](./Agent_Step18_CompactionPipeline/)|This sample demonstrates how to use a compaction pipeline to efficiently limit the size of the conversation history for an agent.|

## Running the samples from the console

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,20 +79,21 @@ public List<ChatMessage> GetMessages(AgentSession? session)
/// <exception cref="ArgumentNullException"><paramref name="messages"/> is <see langword="null"/>.</exception>
public void SetMessages(AgentSession? session, List<ChatMessage> messages)
{
_ = Throw.IfNull(messages);
Throw.IfNull(messages);

var state = this._sessionState.GetOrInitializeState(session);
State state = this._sessionState.GetOrInitializeState(session);
state.Messages = messages;
}

/// <inheritdoc />
protected override async ValueTask<IEnumerable<ChatMessage>> ProvideChatHistoryAsync(InvokingContext context, CancellationToken cancellationToken = default)
{
var state = this._sessionState.GetOrInitializeState(context.Session);
State state = this._sessionState.GetOrInitializeState(context.Session);

if (this.ReducerTriggerEvent is InMemoryChatHistoryProviderOptions.ChatReducerTriggerEvent.BeforeMessagesRetrieval && this.ChatReducer is not null)
{
state.Messages = (await this.ChatReducer.ReduceAsync(state.Messages, cancellationToken).ConfigureAwait(false)).ToList();
// Apply pre-retrieval reduction if configured
await ReduceMessagesAsync(this.ChatReducer, state, cancellationToken).ConfigureAwait(false);
}

return state.Messages;
Expand All @@ -101,18 +102,24 @@ protected override async ValueTask<IEnumerable<ChatMessage>> ProvideChatHistoryA
/// <inheritdoc />
protected override async ValueTask StoreChatHistoryAsync(InvokedContext context, CancellationToken cancellationToken = default)
{
var state = this._sessionState.GetOrInitializeState(context.Session);
State state = this._sessionState.GetOrInitializeState(context.Session);

// Add request and response messages to the provider
var allNewMessages = context.RequestMessages.Concat(context.ResponseMessages ?? []);
state.Messages.AddRange(allNewMessages);

if (this.ReducerTriggerEvent is InMemoryChatHistoryProviderOptions.ChatReducerTriggerEvent.AfterMessageAdded && this.ChatReducer is not null)
{
state.Messages = (await this.ChatReducer.ReduceAsync(state.Messages, cancellationToken).ConfigureAwait(false)).ToList();
// Apply pre-write reduction strategy if configured
await ReduceMessagesAsync(this.ChatReducer, state, cancellationToken).ConfigureAwait(false);
}
}

private static async Task ReduceMessagesAsync(IChatReducer reducer, State state, CancellationToken cancellationToken = default)
{
state.Messages = [.. await reducer.ReduceAsync(state.Messages, cancellationToken).ConfigureAwait(false)];
}

/// <summary>
/// Represents the state of a <see cref="InMemoryChatHistoryProvider"/> stored in the <see cref="AgentSession.StateBag"/>.
/// </summary>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ internal static IChatClient WithDefaultAgentMiddleware(this IChatClient chatClie

if (chatClient.GetService<FunctionInvokingChatClient>() is null)
{
_ = chatBuilder.Use((innerClient, services) =>
chatBuilder.Use((innerClient, services) =>
{
var loggerFactory = services.GetService<ILoggerFactory>();

Expand Down
Loading
Loading