From 334ce4dfe08ef7d3f3091dd010b31a19bbaf1e07 Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Fri, 24 Apr 2026 11:48:17 -0700 Subject: [PATCH 1/7] Update foundry hosting samples --- .../invocations/01_basic/.env.example | 2 +- .../invocations/01_basic/README.md | 32 ++- .../invocations/01_basic/agent.manifest.yaml | 6 +- .../invocations/01_basic/agent.yaml | 2 +- .../invocations/01_basic/main.py | 6 +- .../invocations/02_break_glass/.env.example | 2 +- .../invocations/02_break_glass/README.md | 32 ++- .../02_break_glass/agent.manifest.yaml | 6 +- .../invocations/02_break_glass/agent.yaml | 2 +- .../invocations/02_break_glass/main.py | 2 +- .../invocations/README.md | 8 - .../responses/01_basic/.env.example | 2 +- .../responses/01_basic/README.md | 30 ++- .../responses/01_basic/agent.manifest.yaml | 10 +- .../responses/01_basic/agent.yaml | 7 +- .../responses/01_basic/main.py | 6 +- .../responses/02_local_tools/.env.example | 2 - .../responses/02_local_tools/README.md | 27 --- .../.dockerignore | 0 .../responses/02_tools/.env.example | 2 + .../{02_local_tools => 02_tools}/Dockerfile | 0 .../responses/02_tools/README.md | 33 +++ .../agent.manifest.yaml | 10 +- .../{02_local_tools => 02_tools}/agent.yaml | 2 +- .../{02_local_tools => 02_tools}/main.py | 6 +- .../requirements.txt | 0 .../{03_remote_mcp => 03_mcp}/.dockerignore | 0 .../{03_remote_mcp => 03_mcp}/.env.example | 3 +- .../{03_remote_mcp => 03_mcp}/Dockerfile | 0 .../responses/03_mcp/README.md | 33 +++ .../agent.manifest.yaml | 12 +- .../responses/03_mcp/agent.yaml | 11 + .../responses/03_mcp/main.py | 56 +++++ .../requirements.txt | 0 .../responses/03_remote_mcp/README.md | 25 -- .../responses/03_remote_mcp/main.py | 76 ------ .../.dockerignore | 0 .../responses/04_foundry_toolbox/.env.example | 3 + .../Dockerfile | 0 .../responses/04_foundry_toolbox/README.md | 43 ++++ .../04_foundry_toolbox/agent.manifest.yaml | 33 +++ .../agent.yaml | 2 +- .../responses/04_foundry_toolbox/main.py | 42 ++++ .../04_foundry_toolbox/requirements.txt | 2 + .../responses/04_workflows/.env.example | 2 - .../responses/04_workflows/README.md | 23 -- .../responses/05_workflows/.dockerignore | 6 + .../responses/05_workflows/.env.example | 2 + .../responses/05_workflows/Dockerfile | 16 ++ .../responses/05_workflows/README.md | 43 ++++ .../agent.manifest.yaml | 12 +- .../{04_workflows => 05_workflows}/agent.yaml | 2 +- .../{04_workflows => 05_workflows}/main.py | 6 +- .../requirements.txt | 0 .../foundry-hosted-agents/responses/README.md | 222 +++++++++++++++++- 55 files changed, 653 insertions(+), 259 deletions(-) delete mode 100644 python/samples/04-hosting/foundry-hosted-agents/invocations/README.md delete mode 100644 python/samples/04-hosting/foundry-hosted-agents/responses/02_local_tools/.env.example delete mode 100644 python/samples/04-hosting/foundry-hosted-agents/responses/02_local_tools/README.md rename python/samples/04-hosting/foundry-hosted-agents/responses/{02_local_tools => 02_tools}/.dockerignore (100%) create mode 100644 python/samples/04-hosting/foundry-hosted-agents/responses/02_tools/.env.example rename python/samples/04-hosting/foundry-hosted-agents/responses/{02_local_tools => 02_tools}/Dockerfile (100%) create mode 100644 python/samples/04-hosting/foundry-hosted-agents/responses/02_tools/README.md rename python/samples/04-hosting/foundry-hosted-agents/responses/{02_local_tools => 02_tools}/agent.manifest.yaml (59%) rename python/samples/04-hosting/foundry-hosted-agents/responses/{02_local_tools => 02_tools}/agent.yaml (66%) rename python/samples/04-hosting/foundry-hosted-agents/responses/{02_local_tools => 02_tools}/main.py (93%) rename python/samples/04-hosting/foundry-hosted-agents/responses/{02_local_tools => 02_tools}/requirements.txt (100%) rename python/samples/04-hosting/foundry-hosted-agents/responses/{03_remote_mcp => 03_mcp}/.dockerignore (100%) rename python/samples/04-hosting/foundry-hosted-agents/responses/{03_remote_mcp => 03_mcp}/.env.example (50%) rename python/samples/04-hosting/foundry-hosted-agents/responses/{03_remote_mcp => 03_mcp}/Dockerfile (100%) create mode 100644 python/samples/04-hosting/foundry-hosted-agents/responses/03_mcp/README.md rename python/samples/04-hosting/foundry-hosted-agents/responses/{03_remote_mcp => 03_mcp}/agent.manifest.yaml (61%) create mode 100644 python/samples/04-hosting/foundry-hosted-agents/responses/03_mcp/agent.yaml create mode 100644 python/samples/04-hosting/foundry-hosted-agents/responses/03_mcp/main.py rename python/samples/04-hosting/foundry-hosted-agents/responses/{03_remote_mcp => 03_mcp}/requirements.txt (100%) delete mode 100644 python/samples/04-hosting/foundry-hosted-agents/responses/03_remote_mcp/README.md delete mode 100644 python/samples/04-hosting/foundry-hosted-agents/responses/03_remote_mcp/main.py rename python/samples/04-hosting/foundry-hosted-agents/responses/{04_workflows => 04_foundry_toolbox}/.dockerignore (100%) create mode 100644 python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/.env.example rename python/samples/04-hosting/foundry-hosted-agents/responses/{04_workflows => 04_foundry_toolbox}/Dockerfile (100%) create mode 100644 python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/README.md create mode 100644 python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/agent.manifest.yaml rename python/samples/04-hosting/foundry-hosted-agents/responses/{03_remote_mcp => 04_foundry_toolbox}/agent.yaml (64%) create mode 100644 python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/main.py create mode 100644 python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/requirements.txt delete mode 100644 python/samples/04-hosting/foundry-hosted-agents/responses/04_workflows/.env.example delete mode 100644 python/samples/04-hosting/foundry-hosted-agents/responses/04_workflows/README.md create mode 100644 python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/.dockerignore create mode 100644 python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/.env.example create mode 100644 python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/Dockerfile create mode 100644 python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/README.md rename python/samples/04-hosting/foundry-hosted-agents/responses/{04_workflows => 05_workflows}/agent.manifest.yaml (58%) rename python/samples/04-hosting/foundry-hosted-agents/responses/{04_workflows => 05_workflows}/agent.yaml (71%) rename python/samples/04-hosting/foundry-hosted-agents/responses/{04_workflows => 05_workflows}/main.py (93%) rename python/samples/04-hosting/foundry-hosted-agents/responses/{04_workflows => 05_workflows}/requirements.txt (100%) diff --git a/python/samples/04-hosting/foundry-hosted-agents/invocations/01_basic/.env.example b/python/samples/04-hosting/foundry-hosted-agents/invocations/01_basic/.env.example index fe302a8adb..4d268b931b 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/invocations/01_basic/.env.example +++ b/python/samples/04-hosting/foundry-hosted-agents/invocations/01_basic/.env.example @@ -1,2 +1,2 @@ FOUNDRY_PROJECT_ENDPOINT="..." -MODEL_DEPLOYMENT_NAME="..." \ No newline at end of file +AZURE_AI_MODEL_DEPLOYMENT_NAME="..." \ No newline at end of file diff --git a/python/samples/04-hosting/foundry-hosted-agents/invocations/01_basic/README.md b/python/samples/04-hosting/foundry-hosted-agents/invocations/01_basic/README.md index 8c14ea897e..307a10cfdd 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/invocations/01_basic/README.md +++ b/python/samples/04-hosting/foundry-hosted-agents/invocations/01_basic/README.md @@ -1,18 +1,26 @@ -# Basic example of hosting an agent with the `invocations` API +# What this sample demonstrates -## Running the server locally +An [Agent Framework](https://github.com/microsoft/agent-framework) agent hosted using the **Invocations protocol** with session management. Unlike the Responses protocol, the Invocations protocol does **not** provide built-in server-side conversation history — this agent maintains an in-memory session store keyed by `agent_session_id`. In production, replace it with durable storage (Redis, Cosmos DB, etc.) so history survives restarts. -### Environment setup +## How It Works -Follow the instructions in the [Environment setup](../../README.md#environment-setup) section of the README in the parent directory to set up your environment and install dependencies. +### Model Integration -Run the following command to start the server: +The agent uses `FoundryChatClient` from the Agent Framework to create a Responses client from the project endpoint and model deployment. When a request arrives, the handler looks up (or creates) a session by `session_id`, runs the agent with the user message and session context, and returns the reply. The agent supports both streaming (SSE events) and non-streaming (JSON) response modes. -```bash -python main.py -``` +See [main.py](main.py) for the full implementation. + +### Agent Hosting + +The agent is hosted using the [Agent Framework](https://github.com/microsoft/agent-framework) with the `InvocationsHostServer`, which provisions a REST API endpoint compatible with the Azure AI Invocations protocol. + +## Running the Agent Host + +Follow the instructions in the [Running the Agent Host Locally](../../README.md#running-the-agent-host-locally) section of the README in the parent directory to run the agent host. -### Interacting with the agent +## Interacting with the agent + +> Depending on how you run the agent host, you can invoke the agent using `curl` (`Invoke-WebRequest` in PowerShell) or `azd`. Please refer to the [parent README](../../README.md) for more details. Use this README for sample queries you can send to the agent. Send a POST request to the server with a JSON body containing a "message" field to interact with the agent. For example: @@ -22,7 +30,7 @@ curl -X POST http://localhost:8088/invocations -i -H "Content-Type: application/ The server will respond with a JSON object containing the response text. The `-i` flag in the `curl` command includes the HTTP response headers in the output, which includes the session ID that can be used for multi-turn conversations. Here is an example of the response: -```bash +``` HTTP/1.1 200 content-length: 34 content-type: application/json @@ -42,3 +50,7 @@ To have a multi-turn conversation with the agent, take the session ID from the r ```bash curl -X POST http://localhost:8088/invocations?agent_session_id=9370b9d4-cd13-4436-a57f-03b843ac0e17 -i -H "Content-Type: application/json" -d '{"message": "How are you?"}' ``` + +## Deploying the Agent to Foundry + +To host the agent on Foundry, follow the instructions in the [Deploying the Agent to Foundry](../../README.md#deploying-the-agent-to-foundry) section of the README in the parent directory. diff --git a/python/samples/04-hosting/foundry-hosted-agents/invocations/01_basic/agent.manifest.yaml b/python/samples/04-hosting/foundry-hosted-agents/invocations/01_basic/agent.manifest.yaml index 9ef34e5469..3e8d120d48 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/invocations/01_basic/agent.manifest.yaml +++ b/python/samples/04-hosting/foundry-hosted-agents/invocations/01_basic/agent.manifest.yaml @@ -15,9 +15,9 @@ template: - protocol: invocations version: 1.0.0 environment_variables: - - name: MODEL_DEPLOYMENT_NAME - value: "{{MODEL_DEPLOYMENT_NAME}}" + - name: AZURE_AI_MODEL_DEPLOYMENT_NAME + value: "{{AZURE_AI_MODEL_DEPLOYMENT_NAME}}" resources: - kind: model id: gpt-4.1-mini - name: MODEL_DEPLOYMENT_NAME \ No newline at end of file + name: AZURE_AI_MODEL_DEPLOYMENT_NAME \ No newline at end of file diff --git a/python/samples/04-hosting/foundry-hosted-agents/invocations/01_basic/agent.yaml b/python/samples/04-hosting/foundry-hosted-agents/invocations/01_basic/agent.yaml index 152179a8e6..f9f9463954 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/invocations/01_basic/agent.yaml +++ b/python/samples/04-hosting/foundry-hosted-agents/invocations/01_basic/agent.yaml @@ -6,4 +6,4 @@ protocols: version: 1.0.0 resources: cpu: '0.25' - memory: '0.5Gi' + memory: '0.5Gi' \ No newline at end of file diff --git a/python/samples/04-hosting/foundry-hosted-agents/invocations/01_basic/main.py b/python/samples/04-hosting/foundry-hosted-agents/invocations/01_basic/main.py index e939680a59..cf89dd7930 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/invocations/01_basic/main.py +++ b/python/samples/04-hosting/foundry-hosted-agents/invocations/01_basic/main.py @@ -5,7 +5,7 @@ from agent_framework import Agent from agent_framework.foundry import FoundryChatClient from agent_framework_foundry_hosting import InvocationsHostServer -from azure.identity import AzureCliCredential +from azure.identity import DefaultAzureCredential from dotenv import load_dotenv # Load environment variables from .env file @@ -15,8 +15,8 @@ def main(): client = FoundryChatClient( project_endpoint=os.environ["FOUNDRY_PROJECT_ENDPOINT"], - model=os.environ["MODEL_DEPLOYMENT_NAME"], - credential=AzureCliCredential(), + model=os.environ["AZURE_AI_MODEL_DEPLOYMENT_NAME"], + credential=DefaultAzureCredential(), ) agent = Agent( diff --git a/python/samples/04-hosting/foundry-hosted-agents/invocations/02_break_glass/.env.example b/python/samples/04-hosting/foundry-hosted-agents/invocations/02_break_glass/.env.example index fe302a8adb..4d268b931b 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/invocations/02_break_glass/.env.example +++ b/python/samples/04-hosting/foundry-hosted-agents/invocations/02_break_glass/.env.example @@ -1,2 +1,2 @@ FOUNDRY_PROJECT_ENDPOINT="..." -MODEL_DEPLOYMENT_NAME="..." \ No newline at end of file +AZURE_AI_MODEL_DEPLOYMENT_NAME="..." \ No newline at end of file diff --git a/python/samples/04-hosting/foundry-hosted-agents/invocations/02_break_glass/README.md b/python/samples/04-hosting/foundry-hosted-agents/invocations/02_break_glass/README.md index e04207e1d0..ef5257ef1f 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/invocations/02_break_glass/README.md +++ b/python/samples/04-hosting/foundry-hosted-agents/invocations/02_break_glass/README.md @@ -1,20 +1,26 @@ -# Basic example of hosting an agent with the `invocations` API +# What this sample demonstrates -This is the same as the [01_basic](../01_basic/README.md) example, but demonstrates the "break glass" scenario where you can create your own `invoke_handler` to handle specific types of invocations. This is useful when you want to override the default behavior for certain requests or add custom processing logic. +An [Agent Framework](https://github.com/microsoft/agent-framework) agent hosted using the **Invocations protocol** with session management. Unlike the Responses protocol, the Invocations protocol does **not** provide built-in server-side conversation history — this agent maintains an in-memory session store keyed by `agent_session_id`. In production, replace it with durable storage (Redis, Cosmos DB, etc.) so history survives restarts. -## Running the server locally +## How It Works -### Environment setup +### Model Integration -Follow the instructions in the [Environment setup](../../README.md#environment-setup) section of the README in the parent directory to set up your environment and install dependencies. +The agent uses `FoundryChatClient` from the Agent Framework to create a Responses client from the project endpoint and model deployment. When a request arrives, the handler looks up (or creates) a session by `session_id`, runs the agent with the user message and session context, and returns the reply. The agent supports both streaming (SSE events) and non-streaming (JSON) response modes. -Run the following command to start the server: +See [main.py](main.py) for the full implementation. -```bash -python main.py -``` +### Agent Hosting + +The agent is hosted using the [Azure AI AgentServer Invocations SDK](https://pypi.org/project/azure-ai-agentserver-invocations/) (`InvocationAgentServerHost`), which provisions a REST API endpoint compatible with the Azure AI Invocations protocol. + +## Running the Agent Host + +Follow the instructions in the [Running the Agent Host Locally](../../README.md#running-the-agent-host-locally) section of the README in the parent directory to run the agent host. -### Interacting with the agent +## Interacting with the agent + +> Depending on how you run the agent host, you can invoke the agent using `curl` (`Invoke-WebRequest` in PowerShell) or `azd`. Please refer to the [parent README](../../README.md) for more details. Use this README for sample queries you can send to the agent. Send a POST request to the server with a JSON body containing a "message" field to interact with the agent. For example: @@ -24,7 +30,7 @@ curl -X POST http://localhost:8088/invocations -i -H "Content-Type: application/ The server will respond with a JSON object containing the response text. The `-i` flag in the `curl` command includes the HTTP response headers in the output, which includes the session ID that can be used for multi-turn conversations. Here is an example of the response: -```bash +``` HTTP/1.1 200 content-length: 34 content-type: application/json @@ -44,3 +50,7 @@ To have a multi-turn conversation with the agent, take the session ID from the r ```bash curl -X POST http://localhost:8088/invocations?agent_session_id=9370b9d4-cd13-4436-a57f-03b843ac0e17 -i -H "Content-Type: application/json" -d '{"message": "How are you?"}' ``` + +## Deploying the Agent to Foundry + +To host the agent on Foundry, follow the instructions in the [Deploying the Agent to Foundry](../../README.md#deploying-the-agent-to-foundry) section of the README in the parent directory. \ No newline at end of file diff --git a/python/samples/04-hosting/foundry-hosted-agents/invocations/02_break_glass/agent.manifest.yaml b/python/samples/04-hosting/foundry-hosted-agents/invocations/02_break_glass/agent.manifest.yaml index 9ef34e5469..3e8d120d48 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/invocations/02_break_glass/agent.manifest.yaml +++ b/python/samples/04-hosting/foundry-hosted-agents/invocations/02_break_glass/agent.manifest.yaml @@ -15,9 +15,9 @@ template: - protocol: invocations version: 1.0.0 environment_variables: - - name: MODEL_DEPLOYMENT_NAME - value: "{{MODEL_DEPLOYMENT_NAME}}" + - name: AZURE_AI_MODEL_DEPLOYMENT_NAME + value: "{{AZURE_AI_MODEL_DEPLOYMENT_NAME}}" resources: - kind: model id: gpt-4.1-mini - name: MODEL_DEPLOYMENT_NAME \ No newline at end of file + name: AZURE_AI_MODEL_DEPLOYMENT_NAME \ No newline at end of file diff --git a/python/samples/04-hosting/foundry-hosted-agents/invocations/02_break_glass/agent.yaml b/python/samples/04-hosting/foundry-hosted-agents/invocations/02_break_glass/agent.yaml index 152179a8e6..f9f9463954 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/invocations/02_break_glass/agent.yaml +++ b/python/samples/04-hosting/foundry-hosted-agents/invocations/02_break_glass/agent.yaml @@ -6,4 +6,4 @@ protocols: version: 1.0.0 resources: cpu: '0.25' - memory: '0.5Gi' + memory: '0.5Gi' \ No newline at end of file diff --git a/python/samples/04-hosting/foundry-hosted-agents/invocations/02_break_glass/main.py b/python/samples/04-hosting/foundry-hosted-agents/invocations/02_break_glass/main.py index 3d63ac211c..f143b13840 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/invocations/02_break_glass/main.py +++ b/python/samples/04-hosting/foundry-hosted-agents/invocations/02_break_glass/main.py @@ -22,7 +22,7 @@ # Create the agent client = FoundryChatClient( project_endpoint=os.environ["FOUNDRY_PROJECT_ENDPOINT"], - model=os.environ["MODEL_DEPLOYMENT_NAME"], + model=os.environ["AZURE_AI_MODEL_DEPLOYMENT_NAME"], credential=DefaultAzureCredential(), ) diff --git a/python/samples/04-hosting/foundry-hosted-agents/invocations/README.md b/python/samples/04-hosting/foundry-hosted-agents/invocations/README.md deleted file mode 100644 index 0cba373c7b..0000000000 --- a/python/samples/04-hosting/foundry-hosted-agents/invocations/README.md +++ /dev/null @@ -1,8 +0,0 @@ -# Hosting agents with Foundry Hosting and the `invocations` API - -This folder contains a list of samples that show how to host agents using the `invocations` API and deploy them to Foundry Hosting. - -| Sample | Description | -| --- | --- | -| [01_basic](./01_basic) | A basic example of hosting an agent with the `invocations` API and carrying on a multi-turn conversation. | -| [02_break_glass](./02_break_glass) | An example of hosting an agent with the `invocations` API and a "break glass" scenario where you can create your own `invoke_handler` to handle specific types of invocations. | diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/01_basic/.env.example b/python/samples/04-hosting/foundry-hosted-agents/responses/01_basic/.env.example index fe302a8adb..4d268b931b 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/responses/01_basic/.env.example +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/01_basic/.env.example @@ -1,2 +1,2 @@ FOUNDRY_PROJECT_ENDPOINT="..." -MODEL_DEPLOYMENT_NAME="..." \ No newline at end of file +AZURE_AI_MODEL_DEPLOYMENT_NAME="..." \ No newline at end of file diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/01_basic/README.md b/python/samples/04-hosting/foundry-hosted-agents/responses/01_basic/README.md index 9e4b36a77d..7081a581e9 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/responses/01_basic/README.md +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/01_basic/README.md @@ -1,31 +1,39 @@ -# Basic example of hosting an agent with the `responses` API +# What this sample demonstrates -This agent only contains an instruction (personal). It's the most basic agent with an LLM and no tools. +An [Agent Framework](https://github.com/microsoft/agent-framework) agent hosted using the **Responses protocol**. -## Running the server locally +## How It Works -### Environment setup +### Model Integration -Follow the instructions in the [Environment setup](../../README.md#environment-setup) section of the README in the parent directory to set up your environment and install dependencies. +The agent uses `FoundryChatClient` from the Agent Framework to create a Responses client from the project endpoint and model deployment. The agent supports both streaming (SSE events) and non-streaming (JSON) response modes. -Run the following command to start the server: +See [main.py](main.py) for the full implementation. -```bash -python main.py -``` +### Agent Hosting + +The agent is hosted using the [Agent Framework](https://github.com/microsoft/agent-framework) with the `ResponsesHostServer`, which provisions a REST API endpoint compatible with the OpenAI Responses protocol. ## Interacting with the agent -Send a POST request to the server with a JSON body containing a "input" field to interact with the agent. For example: +> Depending on how you run the agent host, you can invoke the agent using `curl` (`Invoke-WebRequest` in PowerShell) or `azd`. Please refer to the [parent README](../../README.md) for more details. Use this README for sample queries you can send to the agent. + +Send a POST request to the server with a JSON body containing a "message" field to interact with the agent. For example: ```bash curl -X POST http://localhost:8088/responses -H "Content-Type: application/json" -d '{"input": "Hi"}' ``` -## Multi-turn conversation +The server will respond with a JSON object containing the response text and a response ID. You can use this response ID to continue the conversation in subsequent requests. + +### Multi-turn conversation To have a multi-turn conversation with the agent, include the previous response id in the request body. For example: ```bash curl -X POST http://localhost:8088/responses -H "Content-Type: application/json" -d '{"input": "How are you?", "previous_response_id": "REPLACE_WITH_PREVIOUS_RESPONSE_ID"}' ``` + +## Deploying the Agent to Foundry + +To host the agent on Foundry, follow the instructions in the [Deploying the Agent to Foundry](../../README.md#deploying-the-agent-to-foundry) section of the README in the parent directory. diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/01_basic/agent.manifest.yaml b/python/samples/04-hosting/foundry-hosted-agents/responses/01_basic/agent.manifest.yaml index 4f4749af25..ef8db59274 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/responses/01_basic/agent.manifest.yaml +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/01_basic/agent.manifest.yaml @@ -1,4 +1,4 @@ -name: agent-framework-agent-basic +name: agent-framework-agent-basic-responses description: > A basic Agent Framework agent hosted by Foundry. metadata: @@ -9,15 +9,15 @@ metadata: - Responses Protocol - Streaming template: - name: agent-framework-agent-basic + name: agent-framework-agent-basic-responses kind: hosted protocols: - protocol: responses version: 1.0.0 environment_variables: - - name: MODEL_DEPLOYMENT_NAME - value: "{{MODEL_DEPLOYMENT_NAME}}" + - name: AZURE_AI_MODEL_DEPLOYMENT_NAME + value: "{{AZURE_AI_MODEL_DEPLOYMENT_NAME}}" resources: - kind: model id: gpt-4.1-mini - name: MODEL_DEPLOYMENT_NAME \ No newline at end of file + name: AZURE_AI_MODEL_DEPLOYMENT_NAME \ No newline at end of file diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/01_basic/agent.yaml b/python/samples/04-hosting/foundry-hosted-agents/responses/01_basic/agent.yaml index 5b14606961..eee9883579 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/responses/01_basic/agent.yaml +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/01_basic/agent.yaml @@ -1,8 +1,9 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/microsoft/AgentSchema/refs/heads/main/schemas/v1.0/ContainerAgent.yaml kind: hosted -name: agent-framework-agent-basic +name: agent-framework-agent-basic-responses protocols: - protocol: responses version: 1.0.0 resources: - cpu: "0.25" - memory: 0.5Gi \ No newline at end of file + cpu: '0.25' + memory: '0.5Gi' \ No newline at end of file diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/01_basic/main.py b/python/samples/04-hosting/foundry-hosted-agents/responses/01_basic/main.py index 4b10c9a089..010b1fc408 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/responses/01_basic/main.py +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/01_basic/main.py @@ -5,7 +5,7 @@ from agent_framework import Agent from agent_framework.foundry import FoundryChatClient from agent_framework_foundry_hosting import ResponsesHostServer -from azure.identity import AzureCliCredential +from azure.identity import DefaultAzureCredential from dotenv import load_dotenv # Load environment variables from .env file @@ -15,8 +15,8 @@ def main(): client = FoundryChatClient( project_endpoint=os.environ["FOUNDRY_PROJECT_ENDPOINT"], - model=os.environ["MODEL_DEPLOYMENT_NAME"], - credential=AzureCliCredential(), + model=os.environ["AZURE_AI_MODEL_DEPLOYMENT_NAME"], + credential=DefaultAzureCredential(), ) agent = Agent( diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/02_local_tools/.env.example b/python/samples/04-hosting/foundry-hosted-agents/responses/02_local_tools/.env.example deleted file mode 100644 index fe302a8adb..0000000000 --- a/python/samples/04-hosting/foundry-hosted-agents/responses/02_local_tools/.env.example +++ /dev/null @@ -1,2 +0,0 @@ -FOUNDRY_PROJECT_ENDPOINT="..." -MODEL_DEPLOYMENT_NAME="..." \ No newline at end of file diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/02_local_tools/README.md b/python/samples/04-hosting/foundry-hosted-agents/responses/02_local_tools/README.md deleted file mode 100644 index d8bfd7146e..0000000000 --- a/python/samples/04-hosting/foundry-hosted-agents/responses/02_local_tools/README.md +++ /dev/null @@ -1,27 +0,0 @@ -# Basic example of hosting an agent with the `responses` API and local tools - -This agent is equipped with a function tool and a local shell tool. - -> We recommend deploying this sample on a local container or to Foundry Hosting because the agent has access to a local shell tool, which can run arbitrary commands on the machine. - -## Running the server locally - -### Environment setup - -Follow the instructions in the [Environment setup](../../README.md#environment-setup) section of the README in the parent directory to set up your environment and install dependencies. - -Run the following command to start the server: - -```bash -python main.py -``` - -## Interacting with the agent - -Send a POST request to the server with a JSON body containing a "input" field to interact with the agent. For example: - -```bash -curl -X POST http://localhost:8088/responses -H "Content-Type: application/json" -d '{"input": "What is the weather in Seattle?"}' - -curl -X POST http://localhost:8088/responses -H "Content-Type: application/json" -d '{"input": "List the files in the current directory."}' -``` diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/02_local_tools/.dockerignore b/python/samples/04-hosting/foundry-hosted-agents/responses/02_tools/.dockerignore similarity index 100% rename from python/samples/04-hosting/foundry-hosted-agents/responses/02_local_tools/.dockerignore rename to python/samples/04-hosting/foundry-hosted-agents/responses/02_tools/.dockerignore diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/02_tools/.env.example b/python/samples/04-hosting/foundry-hosted-agents/responses/02_tools/.env.example new file mode 100644 index 0000000000..4d268b931b --- /dev/null +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/02_tools/.env.example @@ -0,0 +1,2 @@ +FOUNDRY_PROJECT_ENDPOINT="..." +AZURE_AI_MODEL_DEPLOYMENT_NAME="..." \ No newline at end of file diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/02_local_tools/Dockerfile b/python/samples/04-hosting/foundry-hosted-agents/responses/02_tools/Dockerfile similarity index 100% rename from python/samples/04-hosting/foundry-hosted-agents/responses/02_local_tools/Dockerfile rename to python/samples/04-hosting/foundry-hosted-agents/responses/02_tools/Dockerfile diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/02_tools/README.md b/python/samples/04-hosting/foundry-hosted-agents/responses/02_tools/README.md new file mode 100644 index 0000000000..e08eaf98f3 --- /dev/null +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/02_tools/README.md @@ -0,0 +1,33 @@ +# What this sample demonstrates + +An [Agent Framework](https://github.com/microsoft/agent-framework) agent with **locally-defined Python tools** hosted using the **Responses protocol**. It shows how to define custom tools with the `@tool` decorator and register them with the agent so the model can call them during a conversation. + +## How It Works + +### Model Integration + +The agent uses `FoundryChatClient` from the Agent Framework to create a Responses client from the project endpoint and model deployment. The agent supports both streaming (SSE events) and non-streaming (JSON) response modes. + +See [main.py](main.py) for the full implementation. + +### Agent Hosting + +The agent is hosted using the [Agent Framework](https://github.com/microsoft/agent-framework) with the `ResponsesHostServer`, which provisions a REST API endpoint compatible with the OpenAI Responses protocol. + +## Running the Agent Host + +Follow the instructions in the [Running the Agent Host Locally](../../README.md#running-the-agent-host-locally) section of the README in the parent directory to run the agent host. + +## Interacting with the agent + +> Depending on how you run the agent host, you can invoke the agent using `curl` (`Invoke-WebRequest` in PowerShell) or `azd`. Please refer to the [parent README](../../README.md) for more details. Use this README for sample queries you can send to the agent. + +Send a POST request to the server with a JSON body containing a "message" field to interact with the agent. For example: + +```bash +curl -X POST http://localhost:8088/responses -H "Content-Type: application/json" -d '{"input": "What is the weather in Seattle?"}' +``` + +## Deploying the Agent to Foundry + +To host the agent on Foundry, follow the instructions in the [Deploying the Agent to Foundry](../../README.md#deploying-the-agent-to-foundry) section of the README in the parent directory. diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/02_local_tools/agent.manifest.yaml b/python/samples/04-hosting/foundry-hosted-agents/responses/02_tools/agent.manifest.yaml similarity index 59% rename from python/samples/04-hosting/foundry-hosted-agents/responses/02_local_tools/agent.manifest.yaml rename to python/samples/04-hosting/foundry-hosted-agents/responses/02_tools/agent.manifest.yaml index ea8c6010ec..28d5fe1c0b 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/responses/02_local_tools/agent.manifest.yaml +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/02_tools/agent.manifest.yaml @@ -1,4 +1,4 @@ -name: agent-framework-agent-with-local-tools +name: agent-framework-agent-with-local-tools-responses description: > An Agent Framework agent with local tools hosted by Foundry. metadata: @@ -9,15 +9,15 @@ metadata: - Responses Protocol - Streaming template: - name: agent-framework-agent-with-local-tools + name: agent-framework-agent-with-local-tools-responses kind: hosted protocols: - protocol: responses version: 1.0.0 environment_variables: - - name: MODEL_DEPLOYMENT_NAME - value: "{{MODEL_DEPLOYMENT_NAME}}" + - name: AZURE_AI_MODEL_DEPLOYMENT_NAME + value: "{{AZURE_AI_MODEL_DEPLOYMENT_NAME}}" resources: - kind: model id: gpt-4.1-mini - name: MODEL_DEPLOYMENT_NAME \ No newline at end of file + name: AZURE_AI_MODEL_DEPLOYMENT_NAME \ No newline at end of file diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/02_local_tools/agent.yaml b/python/samples/04-hosting/foundry-hosted-agents/responses/02_tools/agent.yaml similarity index 66% rename from python/samples/04-hosting/foundry-hosted-agents/responses/02_local_tools/agent.yaml rename to python/samples/04-hosting/foundry-hosted-agents/responses/02_tools/agent.yaml index 59fc4f8f73..6cabe7b799 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/responses/02_local_tools/agent.yaml +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/02_tools/agent.yaml @@ -1,5 +1,5 @@ kind: hosted -name: agent-framework-agent-with-local-tools +name: agent-framework-agent-with-local-tools-responses protocols: - protocol: responses version: 1.0.0 diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/02_local_tools/main.py b/python/samples/04-hosting/foundry-hosted-agents/responses/02_tools/main.py similarity index 93% rename from python/samples/04-hosting/foundry-hosted-agents/responses/02_local_tools/main.py rename to python/samples/04-hosting/foundry-hosted-agents/responses/02_tools/main.py index 02433bb3ca..43b77b9fe0 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/responses/02_local_tools/main.py +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/02_tools/main.py @@ -8,7 +8,7 @@ from agent_framework import Agent, tool from agent_framework.foundry import FoundryChatClient from agent_framework_foundry_hosting import ResponsesHostServer -from azure.identity import AzureCliCredential +from azure.identity import DefaultAzureCredential from dotenv import load_dotenv from pydantic import Field @@ -52,8 +52,8 @@ def run_bash(command: str) -> str: def main(): client = FoundryChatClient( project_endpoint=os.environ["FOUNDRY_PROJECT_ENDPOINT"], - model=os.environ["MODEL_DEPLOYMENT_NAME"], - credential=AzureCliCredential(), + model=os.environ["AZURE_AI_MODEL_DEPLOYMENT_NAME"], + credential=DefaultAzureCredential(), ) agent = Agent( diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/02_local_tools/requirements.txt b/python/samples/04-hosting/foundry-hosted-agents/responses/02_tools/requirements.txt similarity index 100% rename from python/samples/04-hosting/foundry-hosted-agents/responses/02_local_tools/requirements.txt rename to python/samples/04-hosting/foundry-hosted-agents/responses/02_tools/requirements.txt diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/03_remote_mcp/.dockerignore b/python/samples/04-hosting/foundry-hosted-agents/responses/03_mcp/.dockerignore similarity index 100% rename from python/samples/04-hosting/foundry-hosted-agents/responses/03_remote_mcp/.dockerignore rename to python/samples/04-hosting/foundry-hosted-agents/responses/03_mcp/.dockerignore diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/03_remote_mcp/.env.example b/python/samples/04-hosting/foundry-hosted-agents/responses/03_mcp/.env.example similarity index 50% rename from python/samples/04-hosting/foundry-hosted-agents/responses/03_remote_mcp/.env.example rename to python/samples/04-hosting/foundry-hosted-agents/responses/03_mcp/.env.example index e76ca18af9..bdda1d3404 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/responses/03_remote_mcp/.env.example +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/03_mcp/.env.example @@ -1,4 +1,3 @@ FOUNDRY_PROJECT_ENDPOINT="..." -MODEL_DEPLOYMENT_NAME="..." -TOOLBOX_NAME="..." +AZURE_AI_MODEL_DEPLOYMENT_NAME="..." GITHUB_PAT="..." \ No newline at end of file diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/03_remote_mcp/Dockerfile b/python/samples/04-hosting/foundry-hosted-agents/responses/03_mcp/Dockerfile similarity index 100% rename from python/samples/04-hosting/foundry-hosted-agents/responses/03_remote_mcp/Dockerfile rename to python/samples/04-hosting/foundry-hosted-agents/responses/03_mcp/Dockerfile diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/03_mcp/README.md b/python/samples/04-hosting/foundry-hosted-agents/responses/03_mcp/README.md new file mode 100644 index 0000000000..ac43d4f1df --- /dev/null +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/03_mcp/README.md @@ -0,0 +1,33 @@ +# What this sample demonstrates + +An [Agent Framework](https://github.com/microsoft/agent-framework) agent that connects to a **remote MCP server** (GitHub) for tool discovery and hosted using the **Responses protocol**. Instead of defining tools locally, the agent discovers and invokes tools at runtime from an MCP-compatible endpoint — in this case, the GitHub Copilot MCP server. This enables dynamic tool integration without redeployment. + +## How It Works + +### Model Integration + +The agent uses `FoundryChatClient` from the Agent Framework to create an OpenAI-compatible Responses client. It registers a remote MCP tool pointing at `https://api.githubcopilot.com/mcp/`, authenticating with a GitHub Personal Access Token (PAT). When the model decides to call a tool, the framework forwards the call to the MCP server and returns the result to the model for the final reply. + +See [main.py](main.py) for the full implementation. + +### Agent Hosting + +The agent is hosted using the [Agent Framework](https://github.com/microsoft/agent-framework) with the `ResponsesHostServer`, which provisions a REST API endpoint compatible with the OpenAI Responses protocol. + +## Running the Agent Host + +Follow the instructions in the [Running the Agent Host Locally](../../README.md#running-the-agent-host-locally) section of the README in the parent directory to run the agent host. + +## Interacting with the agent + +> Depending on how you run the agent host, you can invoke the agent using `curl` (`Invoke-WebRequest` in PowerShell) or `azd`. Please refer to the [parent README](../../README.md) for more details. Use this README for sample queries you can send to the agent. + +Send a POST request to the server with a JSON body containing a "message" field to interact with the agent. For example: + +```bash +curl -X POST http://localhost:8088/responses -H "Content-Type: application/json" -d '{"input": "List all the repositories I own on GitHub."}' +``` + +## Deploying the Agent to Foundry + +To host the agent on Foundry, follow the instructions in the [Deploying the Agent to Foundry](../../README.md#deploying-the-agent-to-foundry) section of the README in the parent directory. \ No newline at end of file diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/03_remote_mcp/agent.manifest.yaml b/python/samples/04-hosting/foundry-hosted-agents/responses/03_mcp/agent.manifest.yaml similarity index 61% rename from python/samples/04-hosting/foundry-hosted-agents/responses/03_remote_mcp/agent.manifest.yaml rename to python/samples/04-hosting/foundry-hosted-agents/responses/03_mcp/agent.manifest.yaml index 4f1bd75d3e..655a4ee43d 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/responses/03_remote_mcp/agent.manifest.yaml +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/03_mcp/agent.manifest.yaml @@ -1,4 +1,4 @@ -name: agent-framework-agent-with-remote-mcp-tools +name: agent-framework-agent-with-remote-mcp-tools-responses description: > An Agent Framework agent with remote MCP tools hosted by Foundry. metadata: @@ -9,19 +9,17 @@ metadata: - Responses Protocol - Streaming template: - name: agent-framework-agent-with-remote-mcp-tools + name: agent-framework-agent-with-remote-mcp-tools-responses kind: hosted protocols: - protocol: responses version: 1.0.0 environment_variables: - - name: MODEL_DEPLOYMENT_NAME - value: "{{MODEL_DEPLOYMENT_NAME}}" + - name: AZURE_AI_MODEL_DEPLOYMENT_NAME + value: "{{AZURE_AI_MODEL_DEPLOYMENT_NAME}}" - name: GITHUB_PAT value: ${GITHUB_PAT} - - name: TOOLBOX_NAME - value: ${TOOLBOX_NAME} resources: - kind: model id: gpt-4.1-mini - name: MODEL_DEPLOYMENT_NAME + name: AZURE_AI_MODEL_DEPLOYMENT_NAME \ No newline at end of file diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/03_mcp/agent.yaml b/python/samples/04-hosting/foundry-hosted-agents/responses/03_mcp/agent.yaml new file mode 100644 index 0000000000..a1edfa8c71 --- /dev/null +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/03_mcp/agent.yaml @@ -0,0 +1,11 @@ +kind: hosted +name: agent-framework-agent-with-remote-mcp-tools-responses +protocols: + - protocol: responses + version: 1.0.0 +resources: + cpu: "0.25" + memory: 0.5Gi +environment_variables: + - name: GITHUB_PAT + value: ${GITHUB_PAT} \ No newline at end of file diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/03_mcp/main.py b/python/samples/04-hosting/foundry-hosted-agents/responses/03_mcp/main.py new file mode 100644 index 0000000000..487e994af5 --- /dev/null +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/03_mcp/main.py @@ -0,0 +1,56 @@ +# Copyright (c) Microsoft. All rights reserved. + +import logging +import os + +from agent_framework import Agent, ToolTypes +from agent_framework.foundry import FoundryChatClient +from agent_framework_foundry_hosting import ResponsesHostServer +from azure.identity import DefaultAzureCredential +from dotenv import load_dotenv + +# Load environment variables from .env file +load_dotenv() + +logger = logging.getLogger(__name__) + + +def main(): + client = FoundryChatClient( + project_endpoint=os.environ["FOUNDRY_PROJECT_ENDPOINT"], + model=os.environ["AZURE_AI_MODEL_DEPLOYMENT_NAME"], + credential=DefaultAzureCredential(), + ) + + github_pat = os.environ["GITHUB_PAT"] + tools: list[ToolTypes] = [] + if not github_pat: + logger.warning("GITHUB_PAT environment variable is not set. The GitHub MCP tool will not get registered.") + else: + tools.append( + client.get_mcp_tool( + name="GitHub", + url="https://api.githubcopilot.com/mcp/", + headers={ + "Authorization": f"Bearer {github_pat}", + }, + approval_mode="never_require", + ) + ) + + agent = Agent( + client=client, + instructions="You are a friendly assistant. Keep your answers brief.", + tools=tools, + # History will be managed by the hosting infrastructure, thus there + # is no need to store history by the service. Learn more at: + # https://developers.openai.com/api/reference/resources/responses/methods/create + default_options={"store": False}, + ) + + server = ResponsesHostServer(agent) + server.run() + + +if __name__ == "__main__": + main() diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/03_remote_mcp/requirements.txt b/python/samples/04-hosting/foundry-hosted-agents/responses/03_mcp/requirements.txt similarity index 100% rename from python/samples/04-hosting/foundry-hosted-agents/responses/03_remote_mcp/requirements.txt rename to python/samples/04-hosting/foundry-hosted-agents/responses/03_mcp/requirements.txt diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/03_remote_mcp/README.md b/python/samples/04-hosting/foundry-hosted-agents/responses/03_remote_mcp/README.md deleted file mode 100644 index 0c41817a4e..0000000000 --- a/python/samples/04-hosting/foundry-hosted-agents/responses/03_remote_mcp/README.md +++ /dev/null @@ -1,25 +0,0 @@ -# Basic example of hosting an agent with the `responses` API and a remote MCP - -This agent is equipped with a GitHub MCP server and a Foundry Toolbox, which are both remote MCPs. - -> Note that there are other ways to interact with Foundry toolboxes. Using it as a MCP is just one of the options. - -## Running the server locally - -### Environment setup - -Follow the instructions in the [Environment setup](../../README.md#environment-setup) section of the README in the parent directory to set up your environment and install dependencies. - -Run the following command to start the server: - -```bash -python main.py -``` - -## Interacting with the agent - -Send a POST request to the server with a JSON body containing a "input" field to interact with the agent. For example: - -```bash -curl -X POST http://localhost:8088/responses -H "Content-Type: application/json" -d '{"input": "List all the repositories I own on GitHub."}' -``` diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/03_remote_mcp/main.py b/python/samples/04-hosting/foundry-hosted-agents/responses/03_remote_mcp/main.py deleted file mode 100644 index a1c2718887..0000000000 --- a/python/samples/04-hosting/foundry-hosted-agents/responses/03_remote_mcp/main.py +++ /dev/null @@ -1,76 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import os - -import httpx -from agent_framework import Agent, MCPStreamableHTTPTool -from agent_framework.foundry import FoundryChatClient -from agent_framework_foundry_hosting import ResponsesHostServer -from azure.identity import AzureCliCredential -from dotenv import load_dotenv - -# Load environment variables from .env file -load_dotenv() - - -class ToolboxAuth(httpx.Auth): - """httpx Auth that injects a fresh bearer token on every request.""" - - def auth_flow(self, request: httpx.Request): - credential = AzureCliCredential() - token = credential.get_token("https://ai.azure.com/.default").token - request.headers["Authorization"] = f"Bearer {token}" - yield request - - -def main(): - client = FoundryChatClient( - project_endpoint=os.environ["FOUNDRY_PROJECT_ENDPOINT"], - model=os.environ["MODEL_DEPLOYMENT_NAME"], - credential=AzureCliCredential(), - ) - - # Foundry Toolbox as a MCP tool - project_endpoint = os.environ["FOUNDRY_PROJECT_ENDPOINT"] - toolbox_name = os.environ["TOOLBOX_NAME"] - toolbox_endpoint = f"{project_endpoint.rstrip('/')}/toolboxes/{toolbox_name}/mcp?api-version=v1" - http_client = httpx.AsyncClient(auth=ToolboxAuth(), headers={"Foundry-Features": "Toolboxes=V1Preview"}) - foundry_mcp_tool = MCPStreamableHTTPTool( - name="toolbox", - url=toolbox_endpoint, - http_client=http_client, - load_prompts=False, - ) - - # GitHub MCP server - github_pat = os.environ["GITHUB_PAT"] - if not github_pat: - raise ValueError( - "GITHUB_PAT environment variable must be set. Create a token at https://github.com/settings/tokens" - ) - - github_mcp_tool = client.get_mcp_tool( - name="GitHub", - url="https://api.githubcopilot.com/mcp/", - headers={ - "Authorization": f"Bearer {github_pat}", - }, - approval_mode="never_require", - ) - - agent = Agent( - client=client, - instructions="You are a friendly assistant. Keep your answers brief.", - tools=[foundry_mcp_tool, github_mcp_tool], - # History will be managed by the hosting infrastructure, thus there - # is no need to store history by the service. Learn more at: - # https://developers.openai.com/api/reference/resources/responses/methods/create - default_options={"store": False}, - ) - - server = ResponsesHostServer(agent) - server.run() - - -if __name__ == "__main__": - main() diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/04_workflows/.dockerignore b/python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/.dockerignore similarity index 100% rename from python/samples/04-hosting/foundry-hosted-agents/responses/04_workflows/.dockerignore rename to python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/.dockerignore diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/.env.example b/python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/.env.example new file mode 100644 index 0000000000..91ae96ac46 --- /dev/null +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/.env.example @@ -0,0 +1,3 @@ +FOUNDRY_PROJECT_ENDPOINT="..." +AZURE_AI_MODEL_DEPLOYMENT_NAME="..." +TOOLBOX_NAME="..." \ No newline at end of file diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/04_workflows/Dockerfile b/python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/Dockerfile similarity index 100% rename from python/samples/04-hosting/foundry-hosted-agents/responses/04_workflows/Dockerfile rename to python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/Dockerfile diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/README.md b/python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/README.md new file mode 100644 index 0000000000..d3358cdc04 --- /dev/null +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/README.md @@ -0,0 +1,43 @@ +# What this sample demonstrates + +An [Agent Framework](https://github.com/microsoft/agent-framework) agent that uses **Foundry Toolbox** for tool discovery and hosted using the **Responses protocol**. Foundry Toolbox is a managed tool registry in Microsoft Foundry that lets you define tools centrally and share them across agents. + +## Creating a Foundry Toolbox + +You can create a Foundry Toolbox by code. Refer to this sample for an example: [Foundry Toolbox CRUD Sample](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/ai/azure-ai-projects/samples/hosted_agents/sample_toolboxes_crud.py). + +You can also create a Foundry Toolbox in the Foundry portal. Read more about it [in the Foundry toolbox documentation](https://learn.microsoft.com/en-us/azure/foundry/agents/how-to/tools/toolbox). + +> If you set up a project with this sample and provision the resources using `azd provision`, a Foundry Toolbox will be created with the specified tools in [`agent.manifest.yaml`](agent.manifest.yaml). + +## How It Works + +### Model Integration + +The agent uses `FoundryChatClient` from the Agent Framework to create an OpenAI-compatible Responses client. It loads a named Foundry Toolbox via `client.get_toolbox(name)` — the toolbox is a server-side bundle of tool configurations (e.g., `code_interpreter`, `web_search`) defined in the Foundry portal or by `azd provision`. Omitting `version` resolves the toolbox's current default version at runtime. + +The sample then narrows the toolbox to a subset of tool types via `select_toolbox_tools(toolbox, include_types=[...])` before handing it to the agent. This demonstrates how one toolbox can be reused across agents that each expose only the tools they need — here, the agent only sees `code_interpreter` even though the toolbox also includes `web_search`. + +See [main.py](main.py) for the full implementation. + +### Agent Hosting + +The agent is hosted using the [Agent Framework](https://github.com/microsoft/agent-framework) with the `ResponsesHostServer`, which provisions a REST API endpoint compatible with the OpenAI Responses protocol. + +## Running the Agent Host + +Follow the instructions in the [Running the Agent Host Locally](../../README.md#running-the-agent-host-locally) section of the README in the parent directory to run the agent host. + +## Interacting with the agent + +> Depending on how you run the agent host, you can invoke the agent using `curl` (`Invoke-WebRequest` in PowerShell) or `azd`. Please refer to the [parent README](../../README.md) for more details. Use this README for sample queries you can send to the agent. + +Send a POST request to the server with a JSON body containing a "message" field to interact with the agent. For example: + +```bash +curl -X POST http://localhost:8088/responses -H "Content-Type: application/json" -d '{"input": "What tools do you have?"}' +``` + +## Deploying the Agent to Foundry + +To host the agent on Foundry, follow the instructions in the [Deploying the Agent to Foundry](../../README.md#deploying-the-agent-to-foundry) section of the README in the parent directory. diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/agent.manifest.yaml b/python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/agent.manifest.yaml new file mode 100644 index 0000000000..c6df32950b --- /dev/null +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/agent.manifest.yaml @@ -0,0 +1,33 @@ +name: agent-framework-agent-with-foundry-toolbox-responses +description: > + An Agent Framework agent with Foundry Toolbox integration. +metadata: + tags: + - Agent Framework + - AI Agent Hosting + - Azure AI AgentServer + - Responses Protocol + - Streaming +template: + name: agent-framework-agent-with-foundry-toolbox-responses + kind: hosted + protocols: + - protocol: responses + version: 1.0.0 + environment_variables: + - name: AZURE_AI_MODEL_DEPLOYMENT_NAME + value: "{{AZURE_AI_MODEL_DEPLOYMENT_NAME}}" + - name: TOOLBOX_NAME + value: "agent-tools" +resources: + - kind: model + id: gpt-4.1-mini + name: AZURE_AI_MODEL_DEPLOYMENT_NAME + - kind: toolbox + name: agent-tools + tools: + - type: web_search + name: web_search + - type: code_interpreter + name: code_interpreter + diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/03_remote_mcp/agent.yaml b/python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/agent.yaml similarity index 64% rename from python/samples/04-hosting/foundry-hosted-agents/responses/03_remote_mcp/agent.yaml rename to python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/agent.yaml index d0ce27c958..f4a3bd9a8c 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/responses/03_remote_mcp/agent.yaml +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/agent.yaml @@ -1,5 +1,5 @@ kind: hosted -name: agent-framework-agent-with-remote-mcp-tools +name: agent-framework-agent-with-foundry-toolbox-responses protocols: - protocol: responses version: 1.0.0 diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/main.py b/python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/main.py new file mode 100644 index 0000000000..6b82811c66 --- /dev/null +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/main.py @@ -0,0 +1,42 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import os + +from agent_framework import Agent +from agent_framework.foundry import FoundryChatClient +from agent_framework_foundry_hosting import ResponsesHostServer +from azure.identity import DefaultAzureCredential +from dotenv import load_dotenv + +# Load environment variables from .env file +load_dotenv() + + +async def main(): + client = FoundryChatClient( + project_endpoint=os.environ["FOUNDRY_PROJECT_ENDPOINT"], + model=os.environ["AZURE_AI_MODEL_DEPLOYMENT_NAME"], + credential=DefaultAzureCredential(), + ) + + # Load the named toolbox from the Foundry project. Omitting `version` + # resolves the toolbox's current default version at runtime. + toolbox = await client.get_toolbox(os.environ["TOOLBOX_NAME"]) + + agent = Agent( + client=client, + instructions="You are a friendly assistant. Keep your answers brief.", + tools=toolbox, + # History will be managed by the hosting infrastructure, thus there + # is no need to store history by the service. Learn more at: + # https://developers.openai.com/api/reference/resources/responses/methods/create + default_options={"store": False}, + ) + + server = ResponsesHostServer(agent) + await server.run_async() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/requirements.txt b/python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/requirements.txt new file mode 100644 index 0000000000..1ed4f3c7d4 --- /dev/null +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/requirements.txt @@ -0,0 +1,2 @@ +agent-framework +agent-framework-foundry-hosting diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/04_workflows/.env.example b/python/samples/04-hosting/foundry-hosted-agents/responses/04_workflows/.env.example deleted file mode 100644 index fe302a8adb..0000000000 --- a/python/samples/04-hosting/foundry-hosted-agents/responses/04_workflows/.env.example +++ /dev/null @@ -1,2 +0,0 @@ -FOUNDRY_PROJECT_ENDPOINT="..." -MODEL_DEPLOYMENT_NAME="..." \ No newline at end of file diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/04_workflows/README.md b/python/samples/04-hosting/foundry-hosted-agents/responses/04_workflows/README.md deleted file mode 100644 index 0d93cf2e62..0000000000 --- a/python/samples/04-hosting/foundry-hosted-agents/responses/04_workflows/README.md +++ /dev/null @@ -1,23 +0,0 @@ -# Basic example of hosting an agent with the `responses` API and a workflow - -This sample demonstrates how to host a workflow using the `responses` API. - -## Running the server locally - -### Environment setup - -Follow the instructions in the [Environment setup](../../README.md#environment-setup) section of the README in the parent directory to set up your environment and install dependencies. - -Run the following command to start the server: - -```bash -python main.py -``` - -## Interacting with the agent - -Send a POST request to the server with a JSON body containing a "input" field to interact with the agent. For example: - -```bash -curl -X POST http://localhost:8088/responses -H "Content-Type: application/json" -d '{"input": "Create a slogan for a new electric SUV that is affordable and fun to drive."}' -``` diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/.dockerignore b/python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/.dockerignore new file mode 100644 index 0000000000..008e6e6616 --- /dev/null +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/.dockerignore @@ -0,0 +1,6 @@ +.venv +__pycache__ +*.pyc +*.pyo +*.pyd +.Python \ No newline at end of file diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/.env.example b/python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/.env.example new file mode 100644 index 0000000000..4d268b931b --- /dev/null +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/.env.example @@ -0,0 +1,2 @@ +FOUNDRY_PROJECT_ENDPOINT="..." +AZURE_AI_MODEL_DEPLOYMENT_NAME="..." \ No newline at end of file diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/Dockerfile b/python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/Dockerfile new file mode 100644 index 0000000000..eaffb94f19 --- /dev/null +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/Dockerfile @@ -0,0 +1,16 @@ +FROM python:3.12-slim + +WORKDIR /app + +COPY . user_agent/ +WORKDIR /app/user_agent + +RUN if [ -f requirements.txt ]; then \ + pip install -r requirements.txt; \ + else \ + echo "No requirements.txt found"; \ + fi + +EXPOSE 8088 + +CMD ["python", "main.py"] \ No newline at end of file diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/README.md b/python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/README.md new file mode 100644 index 0000000000..fe1228f5a1 --- /dev/null +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/README.md @@ -0,0 +1,43 @@ +# What this sample demonstrates + +An [Agent Framework](https://github.com/microsoft/agent-framework) workflow demonstrating **multi-agent chaining** and hosted using the **Responses protocol**. It shows how to use the Agent Framework's `WorkflowBuilder` to compose a pipeline of specialized agents — a slogan writer, a legal reviewer, and a formatter — that process a request sequentially. Each agent receives only the output of the previous agent, and only the final formatted result is returned to the caller. + +> The workflow will be used as an agent. Read more about Agent Framework workflows in the [Agent Framework documentation](https://learn.microsoft.com/en-us/agent-framework/workflows/) and workflow as an agent in the [Workflow as an Agent documentation](https://learn.microsoft.com/en-us/agent-framework/workflows/as-agents?pivots=programming-language-python). + +> This sample requires a more advanced model because the model needs to continue the conversation from an assistant message. Not all models perform well in this scenario. Tested with OpenAI's model `gpt-5.4`. + +## How It Works + +### Model Integration + +The agent creates three specialized `Agent` instances sharing the same `FoundryChatClient`: a **writer** that generates slogans, a **legal reviewer** that ensures compliance, and a **formatter** that styles the output. Each agent is wrapped in an `AgentExecutor` with `context_mode="last_agent"` so it only sees the previous agent's output. The `WorkflowBuilder` wires them into a linear pipeline and limits the output to the formatter's result. + +See [main.py](main.py) for the full implementation. + +### Agent Hosting + +The workflow is exposed as a single agent via `.as_agent()` and hosted using the [Agent Framework](https://github.com/microsoft/agent-framework) with the `ResponsesHostServer`, which provisions a REST API endpoint compatible with the OpenAI Responses protocol. + +## Running the Agent Host + +Follow the instructions in the [Running the Agent Host Locally](../../README.md#running-the-agent-host-locally) section of the README in the parent directory to run the agent host. + +## Interacting with the agent + +> Depending on how you run the agent host, you can invoke the agent using `curl` (`Invoke-WebRequest` in PowerShell) or `azd`. Please refer to the [parent README](../../README.md) for more details. Use this README for sample queries you can send to the agent. + +Send a POST request to the server with a JSON body containing a "message" field to interact with the agent. For example: + +```bash +curl -X POST http://localhost:8088/responses -H "Content-Type: application/json" -d '{"input": "Create a slogan for a new electric SUV that is affordable and fun to drive."}' +``` + +Invoke with `azd`: + +```bash +azd ai agent invoke --local "Create a slogan for a new electric SUV that is affordable and fun to drive." +``` + +## Deploying the Agent to Foundry + +To host the agent on Foundry, follow the instructions in the [Deploying the Agent to Foundry](../../README.md#deploying-the-agent-to-foundry) section of the README in the parent directory. diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/04_workflows/agent.manifest.yaml b/python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/agent.manifest.yaml similarity index 58% rename from python/samples/04-hosting/foundry-hosted-agents/responses/04_workflows/agent.manifest.yaml rename to python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/agent.manifest.yaml index d561ec043a..55192cbba7 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/responses/04_workflows/agent.manifest.yaml +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/agent.manifest.yaml @@ -1,4 +1,4 @@ -name: agent-framework-workflows +name: agent-framework-workflows-responses description: > An Agent Framework workflow hosted by Foundry. metadata: @@ -9,15 +9,15 @@ metadata: - Responses Protocol - Streaming template: - name: agent-framework-workflows + name: agent-framework-workflows-responses kind: hosted protocols: - protocol: responses version: 1.0.0 environment_variables: - - name: MODEL_DEPLOYMENT_NAME - value: "{{MODEL_DEPLOYMENT_NAME}}" + - name: AZURE_AI_MODEL_DEPLOYMENT_NAME + value: "{{AZURE_AI_MODEL_DEPLOYMENT_NAME}}" resources: - kind: model - id: gpt-4.1-mini - name: MODEL_DEPLOYMENT_NAME \ No newline at end of file + id: gpt-5.4 + name: AZURE_AI_MODEL_DEPLOYMENT_NAME \ No newline at end of file diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/04_workflows/agent.yaml b/python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/agent.yaml similarity index 71% rename from python/samples/04-hosting/foundry-hosted-agents/responses/04_workflows/agent.yaml rename to python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/agent.yaml index 6afb8b777c..a58893ddf8 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/responses/04_workflows/agent.yaml +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/agent.yaml @@ -1,5 +1,5 @@ kind: hosted -name: agent-framework-workflows +name: agent-framework-workflows-responses protocols: - protocol: responses version: 1.0.0 diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/04_workflows/main.py b/python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/main.py similarity index 93% rename from python/samples/04-hosting/foundry-hosted-agents/responses/04_workflows/main.py rename to python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/main.py index 83e2507b22..d70edbc7bf 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/responses/04_workflows/main.py +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/main.py @@ -5,7 +5,7 @@ from agent_framework import Agent, AgentExecutor, WorkflowBuilder from agent_framework.foundry import FoundryChatClient from agent_framework_foundry_hosting import ResponsesHostServer -from azure.identity import AzureCliCredential +from azure.identity import DefaultAzureCredential from dotenv import load_dotenv # Load environment variables from .env file @@ -15,8 +15,8 @@ def main(): client = FoundryChatClient( project_endpoint=os.environ["FOUNDRY_PROJECT_ENDPOINT"], - model=os.environ["MODEL_DEPLOYMENT_NAME"], - credential=AzureCliCredential(), + model=os.environ["AZURE_AI_MODEL_DEPLOYMENT_NAME"], + credential=DefaultAzureCredential(), ) writer_agent = Agent( diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/04_workflows/requirements.txt b/python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/requirements.txt similarity index 100% rename from python/samples/04-hosting/foundry-hosted-agents/responses/04_workflows/requirements.txt rename to python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/requirements.txt diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/README.md b/python/samples/04-hosting/foundry-hosted-agents/responses/README.md index 3181cb5ea4..c6bd6987e4 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/responses/README.md +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/README.md @@ -1,11 +1,215 @@ -# Hosting agents with Foundry Hosting and the `responses` API +# Foundry Hosted Agent Samples -This folder contains a list of samples that show how to host agents using the `responses` API and deploy them to Foundry Hosting. +This directory contains samples that demonstrate how to use hosted [Agent Framework](https://github.com/microsoft/agent-framework) agents with different capabilities and configurations on Foundry using the Foundry Hosting Agent service. Each sample includes a README with instructions on how to set up, run, and interact with the agent. -| Sample | Description | -| --- | --- | -| [01_basic](./01_basic) | A basic example of hosting an agent with the `responses` API and carrying on a multi-turn conversation. | -| [02_local_tools](./02_local_tools) | An example of hosting an agent with the `responses` API and local tools including a function tool and a local shell tool. | -| [03_remote_mcp](./03_remote_mcp) | An example of hosting an agent with the `responses` API and remote MCPs, including a GitHub MCP server and a Foundry Toolbox. | -| [04_workflows](./04_workflows) | An example of hosting a workflow with the `responses` API. | -| [using_deployed_agent.py](./using_deployed_agent.py) | Connect to the deployed basic Foundry agent with `FoundryAgent`, `allow_preview=True`, and version `v2`. | +## Samples + +### Responses API + +| # | Sample | Description | +|---|--------|-------------| +| 1 | [Basic](responses/01_basic/) | A minimal agent demonstrating basic request/response interaction and multi-turn conversations using `previous_response_id`. | +| 2 | [Tools](responses/02_tools/) | An agent with local tools (e.g., weather lookup), demonstrating how to register and invoke custom tool functions alongside the LLM. | +| 3 | [MCP](responses/03_mcp/) | An agent connected to a remote MCP server (GitHub), demonstrating external MCP tool provider integration. | +| 4 | [Foundry Toolbox](responses/04_foundry_toolbox/) | An agent using Azure Foundry Toolbox, demonstrating toolbox provisioning and querying available tools at runtime. | +| 5 | [Workflows](responses/05_workflows/) | An agent with a multi-step orchestrated workflow, demonstrating chaining prompts through an orchestrated flow. | + +### Invocations API + +| # | Sample | Description | +|---|--------|-------------| +| 1 | [Basic](invocations/01_basic/) | A minimal agent demonstrating session state management via `agent_session_id` in URL params/response headers. | +| 2 | [Break Glass](invocations/02_break_glass/) | An agent demonstrating a "break glass" scenario where customizations of the API behaviors are needed, allowing for more direct control over how requests and responses are handled by the hosting layer. | + +## Running the Agent Host Locally + +### Using `azd` + +#### Prerequisites + +1. **Azure Developer CLI (`azd`)** + + - [Install azd](https://learn.microsoft.com/en-us/azure/developer/azure-developer-cli/install-azd) and the AI agent extension: `azd ext install azure.ai.agents` + - Authenticated: `azd auth login` + +2. **Azure Subscription** + +#### Create a new project + +**No cloning required**. Create a new folder, point azd at the manifest on GitHub. + +```bash +mkdir hosted-agent-framework-agent && cd hosted-agent-framework-agent + +# Initialize from the manifest +azd ai agent init -m https://github.com/microsoft/agent-framework/blob/main/python/samples/04-hosting/foundry-hosted-agents/responses/01_basic/agent.manifest.yaml +``` + +Follow the instructions from `azd ai agent init` to complete the agent initialization. If you don't have an existing Foundry project and a model deployment, `azd ai agent init` will guide you through creating them. + +#### Provision Azure Resources + +> This step is only needed if you don't have an existing Foundry project and model deployment. + +Run the following command to provision the necessary Azure resources: + +```bash +azd provision +``` + +This will create the following Azure resources: + +- A new resource group named `rg-[project_name]-dev`. In this guide, `[project_name]` will be `hosted-agent-framework-agent`. +- Within the resource group, among other resources, the most important ones are: + - A new Foundry instance + - A new Foundry project, within which a new model deployment will be created + - An Application Insights instance + - A container registry, which will be used to store the container images for the hosted agent + +#### Set Environment Variables + +```bash +export FOUNDRY_PROJECT_ENDPOINT="https://.services.ai.azure.com/api/projects/" +export AZURE_AI_MODEL_DEPLOYMENT_NAME="" +# And any other environment variables required by the sample +``` + +Or in PowerShell: + +```powershell +$env:FOUNDRY_PROJECT_ENDPOINT="https://.services.ai.azure.com/api/projects/" +$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="" +# And any other environment variables required by the sample +``` + +> Note: The environment variables set above are only for the current session. You will need to set them again if you open a new terminal session. if you want to set the environment variables permanently in the azd environment, you can use `azd env set `. + +#### Running the Agent Host + +```bash +azd ai agent run +``` + +Right now, the agent host should be running on `http://localhost:8088` + +#### Invoking the Agent + +Open another terminal, **navigate to the project directory**, and run the following command to invoke the agent: + +```bash +azd ai agent invoke --local "Hello!" +``` + +Or you can in another terminal, without navigating to the project directory, run the following command to invoke the agent: + +```bash +curl -X POST http://localhost:8088/responses -H "Content-Type: application/json" -d '{"input": "Hello!"}' +``` + +Or in PowerShell: + +```powershell +(Invoke-WebRequest -Uri http://localhost:8088/responses -Method POST -ContentType "application/json" -Body '{"input": "Hello!"}').Content +``` + +### Using `python` + +#### Prerequisites + +1. An existing Foundry project +2. A deployed model in your Foundry project +3. Azure CLI installed and authenticated +4. Python 3.10 or later + +#### Running the Agent Host with Python + +Clone the repository containing the sample code: + +```bash +git clone https://github.com/microsoft/agent-framework.git +cd agent-framework/python/samples/04-hosting/foundry-hosted-agents/responses +``` + +#### Environment setup + +1. Navigate to the sample directory you want to explore. Create a virtual environment: + + ```bash + python -m venv .venv + + # Windows + .venv\Scripts\Activate + + # macOS/Linux + source .venv/bin/activate + ``` + +2. Install dependencies: + + ```bash + pip install -r requirements.txt + ``` + +3. Create a `.env` file with your Foundry configuration following the `env.example` file in the sample. + +4. Make sure you are logged in with the Azure CLI: + + ```bash + az login + ``` + +#### Running the Agent Host + +```bash +python main.py +``` + +Right now, the agent host should be running on `http://localhost:8088` + +#### Invoking the Agent + +On another terminal, run the following command to invoke the agent: + +```bash +curl -X POST http://localhost:8088/responses -H "Content-Type: application/json" -d '{"input": "Hello!"}' +``` + +Or in PowerShell: + +```powershell +(Invoke-WebRequest -Uri http://localhost:8088/responses -Method POST -ContentType "application/json" -Body '{"input": "Hello!"}').Content +``` + +## Deploying the Agent to Foundry + +Once you've tested locally, deploy to Microsoft Foundry. + +### With an Existing Foundry Project + +If you already have a Foundry project and the necessary Azure resources provisioned, you can skip the setup steps and proceed directly to deploying the agent. + +After running `azd ai agent init -m ` and following the prompts to configure your agent, you will have a project ready for deployment. + +### Setting Up a New Foundry Project + +Follow the steps in [Using `azd`](#using-azd) to set up the project and provision the necessary Azure resources for your Foundry deployment. + +### Deploying the Agent + +Once the project is setup and resources are provisioned, you can deploy the agent to Foundry by running: + +```bash +azd deploy +``` + +> The Foundry hosting infrastructure will inject the following environment variables into your agent at runtime: +> +> - `FOUNDRY_PROJECT_ENDPOINT`: The endpoint URL for the Foundry project where the agent is deployed. +> - `AZURE_AI_MODEL_DEPLOYMENT_NAME`: The name of the model deployment in your Foundry project. This is configured during the agent initialization process with `azd ai agent init`. +> - `APPLICATIONINSIGHTS_CONNECTION_STRING`: The connection string for Application Insights to enable telemetry for your agent. + +This will package your agent and deploy it to the Foundry environment, making it accessible through the Foundry project endpoint. Once it's deployed, you can also access the agent through the Foundry UI. + +For the full deployment guide, see the [official deployment guide](https://learn.microsoft.com/en-us/azure/foundry/agents/how-to/deploy-hosted-agent). + +Once deployed, learn more about how to manage deployed agents in the [official management guide](https://learn.microsoft.com/en-us/azure/foundry/agents/how-to/manage-hosted-agent). \ No newline at end of file From 2c6aa98b18f4bb4edbc68208d67c535d2ca4b723 Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Fri, 24 Apr 2026 12:04:58 -0700 Subject: [PATCH 2/7] Add file data type support --- .../_responses.py | 2 + .../foundry_hosting/tests/test_responses.py | 41 +++++++++++++++++++ 2 files changed, 43 insertions(+) diff --git a/python/packages/foundry_hosting/agent_framework_foundry_hosting/_responses.py b/python/packages/foundry_hosting/agent_framework_foundry_hosting/_responses.py index 9078c59d22..16fd6e1d58 100644 --- a/python/packages/foundry_hosting/agent_framework_foundry_hosting/_responses.py +++ b/python/packages/foundry_hosting/agent_framework_foundry_hosting/_responses.py @@ -1117,6 +1117,8 @@ def _convert_message_content(content: MessageContent) -> Content: return Content.from_uri(file.file_url) if file.file_id: return Content.from_hosted_file(file.file_id, name=file.filename) + if file.file_data: + return Content.from_uri(file.file_data) if content.type == "computer_screenshot": screenshot = cast(ComputerScreenshotContent, content) return Content.from_uri(screenshot.image_url) diff --git a/python/packages/foundry_hosting/tests/test_responses.py b/python/packages/foundry_hosting/tests/test_responses.py index 237a3c7634..60a3532ee2 100644 --- a/python/packages/foundry_hosting/tests/test_responses.py +++ b/python/packages/foundry_hosting/tests/test_responses.py @@ -1507,6 +1507,47 @@ async def test_text_and_file_input_single_turn(self) -> None: assert messages[0].contents[1].type == "uri" assert messages[0].contents[1].uri == "https://example.com/doc.pdf" + async def test_text_and_file_data_input_single_turn(self) -> None: + """Agent receives a message with text and file content via inline file_data.""" + agent = _make_agent( + response=AgentResponse(messages=[Message(role="assistant", contents=[Content.from_text("File received")])]) + ) + server = _make_server(agent) + + resp = await _post_json( + server, + { + "model": "test-model", + "input": [ + { + "type": "message", + "role": "user", + "content": [ + {"type": "input_text", "text": "Summarize this document"}, + { + "type": "input_file", + "file_data": "data:application/pdf;base64,JVBERi0xLjQ=", + "filename": "doc.pdf", + }, + ], + } + ], + "stream": False, + }, + ) + + assert resp.status_code == 200 + body = resp.json() + assert body["status"] == "completed" + + messages = agent.run.call_args.kwargs["messages"] + assert len(messages) == 1 + assert len(messages[0].contents) == 2 + assert messages[0].contents[0].type == "text" + assert messages[0].contents[0].text == "Summarize this document" + assert messages[0].contents[1].type == "data" + assert messages[0].contents[1].uri == "data:application/pdf;base64,JVBERi0xLjQ=" + async def test_mixed_text_and_image_input(self) -> None: """Agent receives a single message with both text and image content.""" agent = _make_agent( From 9cf4c9169aa3b1b0a719cda5760b4691247b6942 Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Fri, 24 Apr 2026 17:18:21 -0700 Subject: [PATCH 3/7] Fix file content and add more tests --- .../workflows/python-integration-tests.yml | 49 ++ .github/workflows/python-merge-tests.yml | 66 ++ .../_responses.py | 23 +- .../tests/test_assets/sample.pdf | Bin 0 -> 11901 bytes .../tests/test_assets/sample_image.jpg | Bin 0 -> 71328 bytes .../tests/test_responses_int.py | 566 ++++++++++++++++++ 6 files changed, 702 insertions(+), 2 deletions(-) create mode 100644 python/packages/foundry_hosting/tests/test_assets/sample.pdf create mode 100644 python/packages/foundry_hosting/tests/test_assets/sample_image.jpg create mode 100644 python/packages/foundry_hosting/tests/test_responses_int.py diff --git a/.github/workflows/python-integration-tests.yml b/.github/workflows/python-integration-tests.yml index 48d15bceda..32c25efdbf 100644 --- a/.github/workflows/python-integration-tests.yml +++ b/.github/workflows/python-integration-tests.yml @@ -336,6 +336,53 @@ jobs: path: ./python/pytest.xml if-no-files-found: ignore + # Foundry Hosting integration tests + python-tests-foundry-hosting: + name: Python Integration Tests - Foundry Hosting + runs-on: ubuntu-latest + environment: integration + timeout-minutes: 60 + env: + FOUNDRY_PROJECT_ENDPOINT: ${{ vars.FOUNDRY_PROJECT_ENDPOINT }} + FOUNDRY_MODEL: ${{ vars.FOUNDRY_MODEL }} + defaults: + run: + working-directory: python + steps: + - uses: actions/checkout@v6 + with: + ref: ${{ inputs.checkout-ref }} + persist-credentials: false + - name: Set up python and install the project + id: python-setup + uses: ./.github/actions/python-setup + with: + python-version: ${{ env.UV_PYTHON }} + os: ${{ runner.os }} + - name: Azure CLI Login + uses: azure/login@v2 + with: + client-id: ${{ secrets.AZURE_CLIENT_ID }} + tenant-id: ${{ secrets.AZURE_TENANT_ID }} + subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} + - name: Test with pytest (Foundry Hosting integration) + timeout-minutes: 15 + run: > + uv run pytest --import-mode=importlib + packages/foundry_hosting/tests + -m integration + -n logical --dist worksteal + --timeout=120 --session-timeout=900 --timeout_method thread + --retries 2 --retry-delay 5 + --junitxml=pytest.xml + - name: Upload test results + if: always() + uses: actions/upload-artifact@v7 + with: + name: test-results-foundry-hosting + path: ./python/pytest.xml + if-no-files-found: ignore + # Azure Cosmos integration tests python-tests-cosmos: name: Python Integration Tests - Cosmos @@ -402,6 +449,7 @@ jobs: python-tests-misc-integration, python-tests-functions, python-tests-foundry, + python-tests-foundry-hosting, python-tests-cosmos, ] runs-on: ubuntu-latest @@ -465,6 +513,7 @@ jobs: python-tests-misc-integration, python-tests-functions, python-tests-foundry, + python-tests-foundry-hosting, python-tests-cosmos ] steps: diff --git a/.github/workflows/python-merge-tests.yml b/.github/workflows/python-merge-tests.yml index 843253e788..9529e54d97 100644 --- a/.github/workflows/python-merge-tests.yml +++ b/.github/workflows/python-merge-tests.yml @@ -38,6 +38,7 @@ jobs: miscChanged: ${{ steps.filter.outputs.misc }} functionsChanged: ${{ steps.filter.outputs.functions }} foundryChanged: ${{ steps.filter.outputs.foundry }} + foundryHostingChanged: ${{ steps.filter.outputs.foundry_hosting }} cosmosChanged: ${{ steps.filter.outputs.cosmos }} steps: - uses: actions/checkout@v6 @@ -80,6 +81,8 @@ jobs: - 'python/packages/foundry/**' - 'python/samples/**/providers/foundry/**' - 'python/samples/02-agents/embeddings/foundry_embeddings.py' + foundry_hosting: + - 'python/packages/foundry_hosting/**' cosmos: - 'python/packages/azure-cosmos/**' # run only if 'python' files were changed @@ -488,6 +491,67 @@ jobs: path: ./python/pytest.xml if-no-files-found: ignore + # Foundry Hosting integration tests + python-tests-foundry-hosting: + name: Python Tests - Foundry Hosting Integration + needs: paths-filter + if: > + github.event_name != 'pull_request' && + needs.paths-filter.outputs.pythonChanges == 'true' && + (github.event_name != 'merge_group' || + needs.paths-filter.outputs.foundryHostingChanged == 'true' || + needs.paths-filter.outputs.coreChanged == 'true') + runs-on: ubuntu-latest + environment: integration + env: + FOUNDRY_PROJECT_ENDPOINT: ${{ vars.FOUNDRY_PROJECT_ENDPOINT }} + FOUNDRY_MODEL: ${{ vars.FOUNDRY_MODEL }} + defaults: + run: + working-directory: python + steps: + - uses: actions/checkout@v6 + - name: Set up python and install the project + id: python-setup + uses: ./.github/actions/python-setup + with: + python-version: ${{ env.UV_PYTHON }} + os: ${{ runner.os }} + - name: Azure CLI Login + if: github.event_name != 'pull_request' + uses: azure/login@v2 + with: + client-id: ${{ secrets.AZURE_CLIENT_ID }} + tenant-id: ${{ secrets.AZURE_TENANT_ID }} + subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} + - name: Test with pytest (Foundry Hosting integration) + timeout-minutes: 15 + run: > + uv run pytest --import-mode=importlib + packages/foundry_hosting/tests + -m integration + -n logical --dist worksteal + --timeout=120 --session-timeout=900 --timeout_method thread + --retries 2 --retry-delay 5 + --junitxml=pytest.xml + working-directory: ./python + - name: Surface failing tests + if: always() + uses: pmeier/pytest-results-action@v0.7.2 + with: + path: ./python/pytest.xml + summary: true + display-options: fEX + fail-on-empty: false + title: Foundry Hosting integration test results + - name: Upload test results + if: always() + uses: actions/upload-artifact@v7 + with: + name: test-results-foundry-hosting + path: ./python/pytest.xml + if-no-files-found: ignore + # TODO: Add python-tests-lab # Azure Cosmos integration tests @@ -569,6 +633,7 @@ jobs: python-tests-misc-integration, python-tests-functions, python-tests-foundry, + python-tests-foundry-hosting, python-tests-cosmos, ] runs-on: ubuntu-latest @@ -629,6 +694,7 @@ jobs: python-tests-misc-integration, python-tests-functions, python-tests-foundry, + python-tests-foundry-hosting, python-tests-cosmos, ] steps: diff --git a/python/packages/foundry_hosting/agent_framework_foundry_hosting/_responses.py b/python/packages/foundry_hosting/agent_framework_foundry_hosting/_responses.py index 16fd6e1d58..ceaf8fc865 100644 --- a/python/packages/foundry_hosting/agent_framework_foundry_hosting/_responses.py +++ b/python/packages/foundry_hosting/agent_framework_foundry_hosting/_responses.py @@ -3,6 +3,7 @@ from __future__ import annotations import asyncio +import base64 import json import logging import os @@ -1075,6 +1076,24 @@ def _convert_output_message_content(content: OutputMessageContent) -> Content: raise ValueError(f"Unsupported OutputMessageContent type: {content.type}") +def _convert_file_data(data_uri: str, filename: str | None = None) -> Content: + """Convert a file_data data URI to a Content object. + + For text/* MIME types, decodes the base64 content and returns it as text. + For other types, returns a URI-based Content with the filename preserved. + """ + # Parse data URI: data:;base64, + if data_uri.startswith("data:") and ";base64," in data_uri: + header, encoded = data_uri.split(";base64,", 1) + media_type = header[len("data:") :] + if media_type.startswith("text/"): + decoded_text = base64.b64decode(encoded).decode("utf-8") + prefix = f"[File: {filename}]\n" if filename else "" + return Content.from_text(f"{prefix}{decoded_text}") + additional_properties = {"filename": filename} if filename else None + return Content.from_uri(data_uri, additional_properties=additional_properties) + + def _convert_message_content(content: MessageContent) -> Content: """Converts a MessageContent to a Content object. @@ -1108,7 +1127,7 @@ def _convert_message_content(content: MessageContent) -> Content: if content.type == "input_image": image = cast(MessageContentInputImageContent, content) if image.image_url: - return Content.from_uri(image.image_url) + return Content.from_uri(image.image_url, media_type="image/*") if image.file_id: return Content.from_hosted_file(image.file_id) if content.type == "input_file": @@ -1118,7 +1137,7 @@ def _convert_message_content(content: MessageContent) -> Content: if file.file_id: return Content.from_hosted_file(file.file_id, name=file.filename) if file.file_data: - return Content.from_uri(file.file_data) + return _convert_file_data(file.file_data, file.filename) if content.type == "computer_screenshot": screenshot = cast(ComputerScreenshotContent, content) return Content.from_uri(screenshot.image_url) diff --git a/python/packages/foundry_hosting/tests/test_assets/sample.pdf b/python/packages/foundry_hosting/tests/test_assets/sample.pdf new file mode 100644 index 0000000000000000000000000000000000000000..2dae52067f64fab7ae8b28140a5bf21dbcfe1e21 GIT binary patch literal 11901 zcmaia1yo#3vn@_C36cbtFd=BL8D_BH?he7--911cNN{&|clY3s-~@Mfx8QHc_x<;` zd)GT_22Ss;?&>~ss@ATqB9{>mq65=I0OVa8ryF~DXXztdJpc$01hmpM1#oZx8H7yC z?G0>U|IKym4FnAItn>|l3=#&GM)t-)RwgKriwj_9Z)>1q0dR)TiB+@u1;X@Qx~K7z z1A19b4sp(#?7XlNiE!3-QEf+*ix7Kk>tb=-H+OW)u%NMc?Pkc43fY>DJ5{9bK3!^* zYs`plvGdTlr%M%XL-E})#m~VX7#6lY%}3&z^D@68 zTjaKQE2F4)ehfd;n=83YVdOWW@U7UWM8&RB$qRSf9 zO&sxik&oIE8awdGw0qTo-l1J+3n|R@jK7D$mJ6X*4<3B`g_Zr6;Et)FAWpu%B>mb^ zQXJFO5u%#1nggSy)5nezrRa$o4Ow|=Yd9v4_DHQ=q*R^cSEy89_*bf9Z{VgiZ@Z4+ z#WJ#45ipOwFk&lNe0|Zf&SwTjNyn3hlysDsnS#`>kx4e2@PZ?v7XpDTY4P)&r?dSj zR_4L}^UG2_=H5Lc+lgREBS?P#WodVN`KBaId;z)+*Lt=%ce=()x|_DUl1A4V>f^X> zPBbZ7Qm=lqT_o@{rr?N1>&eo9fNHos2u@Hl|4Z%=A+wmapTs3I!Ju6LN1bic2dZoI zGllohl9R2X@TW`{*@xsm9dQUsUUKDL!}kFa57w%$%_1e5f{_y~xZbI2DpW(M`UZZ# zJN#HE5%^kenr@poCsDV~666;xWwuW7?&>r{v~TP}44pTf>Wx#>eC-YWT#CcrCY$H%6?QmE zrT3mXH;IcJ*`-U}l<=7r<%I8!s?PA6mc9e%v#MBV$8pq6wDrWk8YGYRXGe~Dm+ z2iQ!l#Bz&5OCl0Pq7#TWQT5aMCUr#}juB35F=M8`wH&>hRJyGi8u}G&QE?tMS?>=Qidgn3 zZic&^+!Kr*>XR*ifu;W61_rx(vP$s3Sf_%EHOw4kbd3J|wKcG`2QtFC<$w%w26k2s zwt5D3K{0Y!B1`GVoiOTiME6>*yH(pPU819T>>;XL>F!n9G1^`~xc} zz$Sl!VY5FujK3AI{)+-B*s>USpInV4%+CNB1PmNa^bF*L`Tob*Fg`8ue|1Oy>Vi^Y zofPC}aQuf`SXxG2lLE=l30$88$Jj-%kz@lzzsMl?`A|3tAPZr=qW+|>@Eu;*pHxo} z>%~`Z-KpoNm@+->vI2q|3&Cd3k4l)%78CD#RhDM9>ZWFlj@+jj;biW-kvi;^;lDsL zl(7!?+SOmb`Pp@afQJc>t_=Tu#n2E*__qMUue%n`goJ#No~p}3`Y-xTbF^(T6n2x3 zo)M%h{A4I_0Ko}n4i5_44TNaOWuvUms0J+xqKR-DS5)KUzmp~4Kv)S{16vds z<^ha?SontlrwDM*qPy6dZPA0S*^E=+oO6xV1Catmb#q@$#hYww_Zi-tRLNiKf@z{V zC0D*VOAMb^zYGQ8ekrqx6@({Ou}CnEI4B`HI8d3~G`?<|S$gXsf6v+HdaHOsu|J&3 z8=R`LWWHa_(1HVZNv4`aT-6$*2v==^!n3hbEPVuoJ4J@8UQ9lF>u0VR*|qwT@U?9%#8D!AmN?dHFI- z-{TMkD$Hu|`}3!ufm=!97q@FenlyA(#!(?%Ko zTouX88{=Ipkj$J+0h8d_*0Xz`R~=AUuY75HQa?F#vsVFQ_KQdf7 zGU|kCF9lIkQVj}Dik5d#{U7y(ZShoiG^I<)(tujz7?IpE(Nq*z(qcq{d^1vPVhubl zJTCk>a%lOMzmyB(8&g!JTLj$2+!=(UAaQ4LYyBlrMG}kmwPf|jS%X;+u`o;a?ReBq_c_US%lUHyas#yustuwIf{m&lawr`j z?cWeN{G8yZ;HKcPGw5c+@Y?VkbD4?eV>5Nk^#{ujpJPa43}fJ9!U`o6ttPN0QVJ~! zrGJ4;DNJ=tXH0J==!-&z^M;*Lhf}nmkL!2-yXB1muJt#j7oEGDyHNy*1g!-41d0T4 z&>klIM9akf#Erx)Xm7Q#>WdmA)eY6H8ck44!r_2|A$e60TSiIgK_1;GSN1{?XNg{Z zm9kNmRMpTMjYxVm7T!AbI(a8PX>kT+2CXvLqYN(Dw34*!HsLlk&mZ4MfV0{`yh-i( z8G#vXI;lEYD|+t>BZ@{pWT|Gc_ps{K+610(t*KFz2c(EbiiY${%1{4VRM2i5t{#4A zwrNZOEn;fw%`Bf+Q7IOhOlRxl==+@Il5|b;f-^`tCf6z5{rRQGNMB&R~Q9J?&K7N$98?(1)9FvKw;goApc^P;v0hF@rY ziz`!#P%5j7ywW`~J<8+y$Q8g9!u6)rv9-vZ;kV8`!oB0o!r9dE*3HzdG!iRP@$+p! zHj)t1CseK%jL%;qJ0Nwo!?!nilQ4{Fk*WW}LEyXNcl70W&h%0P`^FBBo)%h6vF}o^ z=ey&Yi=6YRgHSn^q-nL9!vdn(p!dadr4FZkWw6z#}BvKoFPvA$zF zS?&Eza+La#W5y-Wy>N%}KJBu0L^V(~V{(2npf;P?#7)9&Y3Iv$3h@vz9e2{>=kwsp zgv6>JM&iQ}L)ghlY-vs&r+huWCqV(Y{vVa>xf-k2$5KkmOBa87#g%EEG}so*wcOq* z#uXWrvT3lpr?|Hq_}zz0zHLg4)!O$k*{M0QXxXu@x{qBe32jAo-@4JbWmvpybl(Vl zOd&)JLb*AlZ*_aroafweU%VehEx^#^9&ly7H@?@~U(2KfNPkEzv3nr|ERL+w?} zXi=Jn(`}29ZhzOY+2~#Zx7cESE8ETFrNx&1_WHP!$x&`z;9^lL_odf;#;x9YZ^X{N z^l#~yv=DBm&(H5pohd8~HewYaA}{ME_(bn(;hTH9N}R{Mkr7})9An!wt& z0Ps^J01FKitQ0Lx{zUrVzt#z(-L`i2{Kh)AKnRSdOX&RFU}gg{D4FQn8$WG{g$)E` zgs=gb7?}VN2o%V~#0Z44G6NwXFc88B0WyLhKqgi;7zXQOWP$a=f(1s{Jp>FpNq_)Z z7{Rc&=OpeLLe2xWokWnqD}S%FX} zBWxT@H%#tdAMl^O{NsWBeS!gH0{#DE3XQ0C(>We2!K<4tkCy{|Sp$26?CIiHlA=+$M>s9LUtH z30{@O;p#{UNo=dbmf;XHl*B4-V#vQDOnR$wuMH12rotB013#;zoIrGqf?djIAUy`Hr zcH=W+!IVEg9PRC1)C`#iKgM)Cv}7G{$8OFwzFx$w7%JotZofU1C6M9yTFujb>rTIr z!BZ0#OC=*2bVVgYr|8b(W%|xtp#64UoDd67kH$ha-HG8qhR12_8-l4Ezt^I@Oc;Wq z1$O-luMgV5BJGz4M13FfYvn2dg&(QH6=e>x^|1$%Ye~&ST*McIU;-5gD3t)5Jk3hK z=(^7`F~#0rvADw((;F{&KJjK|>!VRKfD0lOWhBKW!%uM%8-PW}9(myUA*8~ufJ%TX zb{nv0SzS^wT=Bd_iS$xjW<%FS!c!|`F=p{`%k7}_XNSBbx2S8kYf7ha+T$q4;4JV2 z6&G`Z@4qel&x`(#SK%)c`FB(J%QU2PEMTGNf0&H9j?q(0{l|9r{>0C8Odt@Dj**24 z2nMq<0>L0KNCUti3Nv04Jzh&Aa|4)FG4R^yJw@kiFav=Fw|_P07-8ZJ{5sYm1|~+v z_CRJP79fMXy@AE2rxD73p#UfgGii zL8~4d$lf}yj*`pm7iAY6mx|iNR7*Tsq$G^2=G1G+bBcR>xYBBSv-nc#pepyp3%=JRhli(5rtXZP zMxrbz)m(LO!}Es^?2cyl(YPlZ6Lp-JNz$DD9{g>dPUfDcX6G9g4-fPqnhjx>sNNbC zE<^m!(y|BdqpZ;Fq$>Tk1NfXj(`Li*6SXn!fLw{Vv^%+v>k^)GABm`*;)4nCtCl#L z&QL7wN}I}^lauOS*Nw*qmcBKs<}QX1rT&J0N3q3v)vX00Ns4!6T^J2@ku_Pn%dgG= zQk5{UQq0mr3 zox8iEkirkKFtOUzouJvgm}~~qVSAP_M-+=ZNv-6?g;=@Rc~iQoY6>^HMW4#wpEU57 zKCFgbQN)ndCjOKt@ndv1%bZ%Da0_559hm;0J-NNnf^_PLutS{Wd(-tg?rup_RhAZJ;%A9H6KF-BCZ{GQ6_f@eX>;p2=*^iGsjxM2?>JsF zCyYY3wrmPp`IyRi9csk=ID5N(4NehQH)Q%G+fuUlqx9t30{m&VcwXr9?1>LLU z+HwcqEBSpk#}-m;9UI@ol?yeepOm`}#0gW~=wMA6y_qP+;44u(tJ#_DHxc|Ksz>z; zpTZl_e7+XcOVAkT)-uvnl%J#?sBvQNr8q6JdO+6-s9vR1OI8mdUI)LJ@nJ8Q!e7%Z zVGjFn%j{;xBkAn_qjVE2SG+$t%cY3kS6S#pl|;b#uIa}$mRj-Bx>q-A8=bz=e&rN9 zjyp||9F61gs;GP{1U;!L)aUkwAz3) zgo7e&{AC4f$x5_UtD&DS`ChlN7N$gcnT<@3?=}_QRLat)qd0_=&y=ou1jVj8daWadg88 zYeQOl^eg-G%*RlS9X;I5e@>w?f8Ct~=Y(3K>g?tJa`xMe{vjnZGPhI!_{Q2a9}ppayU8^ zNgkYyIgg6m?H*IGk*P%s0`7HfK84JfrWM*L6m`OLpTkkSy9CMj!_QPbuXyRF*gB%$ zrIWCoJS)0@vb5CkI{ZhCw%HHQpnz&ew6N*=Z83Ck*Vc{nJK&Q#eAHtIAc9r9S6bG z0&Y2Q=({S(4{!5`BK=KMZqfvTjM&R&?d(@Q@API|%hSisn>q+zh9x~GQA>q9qb#QjlP=rg@gy0HIP7hE8o{;;Tb2P23-+{7(ODJg*%;w zX;4U4td37ReMw?IP2w{{NlgoR&W*C3Dw?4T0%Ggx4}2&Vvpf?C>;1Ess%nT4z|vf} z+3CPZHdnCV=ZetnFEj07F+zoK7tM2F#aA@7728Q6-$h7Vxw`jq=GOS8N3nw)MD!#E zk%xNP%mSySW;YG}O;NBFc?m~aCDE~QqrC_0r65GL-!Y@&4&PKQc5f#kaVm$-gd~L0 zV-mgp^-5$+dC_6!mt|Rzyk73q+Zskp>^@@UPAS5JK9p`txX^Y4IH-z+W5zeog3vU8 zk}sL>S%_Gv*gM5<8OmR?-^5Tc5#5uD(B%kijHZ}U;~t0VE8vU?Cs%?7>`>ARv-pCq~iW zN}F?qh!q7<`QPX9XM7={EJh1}@~@?Q3Z~@GW6b}AGmcg}gV92`a~KW}sIm<5FSHWE zEMErebs(^}=J?j)N8wOC;K0#QTy+~qfBynD9kefJ_+7yoCRx>$KCs=WLC>J7cz)?s z7`Rv=!Pr{~eK5->(JQ-#9u`IYz=Ei5$54k><8H3n;Ne(&+~1C-15J@`ex!u*a#vFg z;3tC-1hSD$=>fh2fuH2C0FSsfaOC^eqRl*8W$5|**6vjL62EUDJ!RvR<;c+Ml?|T) z%8l3JsPew1rRiI^tTG{4tWews;zo&1G}-phH+9kc%mVT!lca1Rvv2)+UMqiQM}&y6 zn$!)_<}f*GkyxC()OllC@Dt=GxN-c8?clstcDy29J_3JEL-U}INkDH6Q@ntmUgfZz zt;oC6Jv zkG*s3^p9reGQ1#KG+tr-B7RmT0g;U_ z9|19ERPR~FjUk>_YefRitVFoU?)_f$UYcYUt70`=*)Md1x|BpdjiUW_oeVx%0~b!< ztFIF(i!#qPpxGl%g*9&vlB8^0RL=Ezq`_cSHJrs!OUJbeEP@b0t^Uk1arvf$quUw&w-0xyqBslsMXUl+L4wYQy&7V*r`GE4BlzFuveV!kPjsWLAq1(3$fyv5bU;Uf#J z?M4*XOeOUxBOhLUyU7p<_s+Sl^Y+cd>Isd?i&V!|9vb}n9kQ?bkz~5>+0rY*1)_<) z;zbA&c;!TJM3Hk6$img{C>TTf1pJqFUpl7+z88wdd7fN}Z^ZYpkIE&YxiiNG(oi%1 z_R!^!F|DKS7~|v!u*2tjoJti~)it5p#!BIl9_5f^|GrUc2kqvyd(lI??I5`-yE;4D*-O5EG>=BT6s1Y*+LIjuFW!IjuDv;jVAujNICs_^+k0 zTx@PB#?fjXm=F;xEX_?fd{@+ZdDi(+;4^+36~4B35_>xt!G-HxH9l5Cd3Z%}dLMw= zYAcXcu>1wJV|$;wO~CCLVdtBp#ruy}R*xk#Z8iaOZ}3;>?s#{3v55wkjVZ<#PCmXL zz;Luiv}dr#mh)IPtjXh*bS&b#^@&>opTK#3Qt}`j`^>TW@zvV<&*)q>WSthN>u(?{ zQw{li6J5-$2CITUGIplB-iK!~+yqz_lp@=}w~(nwk@X9`ix*v`K&GzZi~NzS-s;S2 zYbm?(l8Zm#XXo-w^{?~u%#O%NIkyT7@{LcE{kT#e>#!jKHH*i`N=5Nny6`e}CF0Tus(m1CtW|!Y{G6+AoGQu*N z&qI)2Kh7$!%IIOKOehuLGwtn2eWeHxu?)hD19tW91-$*$L;Zpx4tu1&ol$Bfuvl!r zKy!1kY-7<4(o8G&5ig-mLW%1k19aa*e$suyfV>bVLOIBi22Sc=5#klL=OBU3cd+%~ zzNSuABwq7$@-Qb=Q+n87=t8hgeB2l|9+1H(Ly2?NC;1$u%^X!t?QZE>4@&W?(yUM` zx#Ytc_)Wd@H6v}_rdQgJRHgWfKf_=lYx18Yd7Cm^hBsU8GIzK}Q@I;@ zF0s#{xy036vDw?vvpDMzeqew828{fX95JYJA$CWo>Rmo1ZwhrhsYD=(IhDTcAoP2| zZK@upk~H7#CLxAIxb5DR?@n%~`P3yigkwBUc%IOu3q^ud4{G@K8yFxX%fpW4 zji{thcvL?|UCCm8p7k3;5Zkg2aJl+Y0`>DDKzlc*CcHT6)p@g)@zb}rmwN3-Y z_&1W!vEB;IdmAZTyd6w(&))0nYhPc!s4{vm6Lv!+@mw1lH9g|gn3ylkT@+_%bEL+> zKjOKPP(_^&sI`yuLCQjqD8vvDrPp)*p6M<2WtnN(=$Hqg@S~9t-!ZxLgS~ujnAF|g zk+Vl;s2ayS#2Rkbsg>a5te|5%wdIO}x^gA0zN z57aBBOo|pC-ItdO_~Z_1_aW(%d?p15duEe3BW5Fe;%)F*R+fMc&cs>S^Lzj@9(QEC zXL^RC-bSwwMq5m%zV>n7i~%3N?Hhp#KK?5Y+EO8n0a6$J=LPRVvIkNYWSR@}N^3JS zS*I%OO*|Z3T{YD`xTq&lqeNbJ@@~7w9GcSc!Id-M>K-%Gv51K?L5xqf6tMh4sO`Ms z$#d|*>9iBZZTQ7dCqo3maoh8Gt+d}BKlv#)+|xAg*GRPL>z0>?)@XLo$?VUEH5Mk- zn+H}jESB6I4;r_w&{IrIxmflI3C^8fF7($FTQ4+~m>TJ48#*cOY1EE=6+U$J2rF|P z8Y423?~imW9jLBnHhO3*Mixb$DUkcIf=0)wv%~*xS(1y^Ca!3(!M*0*5$>Jg|YXw*$G}Lg&2wKC|yP z`>=K`QZbBs_4%kf;Py9~;#@2$j&wOyZyTL0q5Z@@k+#bMne9!yT3OFDpbqPL+(->z z+(D;GrcTU&6&5{|ATM?lA56k8q#72riqyhm@A@IvMoz-fG~G6nEDJ$dI&GteUf8sL z>49SvVY{vS9$IO=isJO%n@`G^`W{l>9o2QTD>8zmhk%!3i5T_EUI5?8X3DF<2+(|e zafK?-$vSmNm=o{oS*%WamzYw5<#ZY>(WFt8z_ksxIUb)A{<3MMeYKWqjj*;#TJ=>1 zPgcL{4Ry=jz(ZbG>Q|;ZB8PyYNfTfO)uQ8J`6HS8cfqX7vm-Nc6FQ|VzWdL9*iz4j0-dT51MY5QXDBDybic^}Z z(r~Rd6xjUSv12-zfCI^5GXno`z1i7)tQeoVK}a{v@H>8ZjCM4E>*s<2_+YGeH}NPC z>Rh;E<9o$wJ%&Ol6xV_;i-eNBD6wg+o@TwR2Uo}>R#a&73+1NzKO^(5+~!aKsO-04 zQw)a<+-!9#+G^JP7UGgI3!fI2KC7kAr#9I-iC?K+1tyS}nj|o>bI{Z&ZOIU&v(D3d zphln(O;S=MuGJC@+-TdG2^Dmu=1)4IelP>?Y!G?8Y@ed!A0%b^_J8g zV2INFh|S}%OK)Q#Xg2N`x95d4?QYKePE*gDANiSysg8@YUDjKRBk;a|)^uK{+9 zdCizEn7>a`K}OA(MT5?$gq^$2uC-8keK3JTbT`?Dk&oY#5xJg`*+enSE#6nJaETrRiNy)$U|l`EeG{+&sw43<%>F@5eN-Ne3*$a+j_Zsml^ zP+u!3ZaAk6)5Y0xsL;zHLYQB?h*M^}F`QGAwcgSUzI|Bd8+wJL68*;vw1*%R(QY+{ zh3lE^t$Dk3hVrZ(?BY) zgMX?Io$aY%=?+-2usv*$0IbdrKqc^z5yZ#@27w?TRz_whiz*023H!k0B(3zHM1cRd z$Un0j&b9`I01yxi1%UqNf@M>fAWT3*;6E@nC@hKlbO9~@fk9Z=nEw+4ff!*)lz*24 zgJ8MD|G*%uuuRNTfktW2m)M7DoQy5&t=4Peg-RwHU<1)c z6@cY)IdmBfz&fmKEP5=4x(0@^U=gXZFZ*&cDTf>i|-yvZ^uw4GjQ5dwKx>RsaeB4D|o--y7oz znAn*A0T&w^3mXp?A0H1F4-cP^gb<&Am;euth>VDsgp`zw6rYfsf}E7%>6`RFA!z^Y z#K6RT+DJ-(NANWJf5g8Y02wY?HF_8Z8Z!W$3=M+}?O#8D4gf&MK>H7X{|`(o3~U^9 zTr>dQ)3Pcl;6KIaSm+qo7}!|oSXgN27?=PoGHh}tJ~EFB=fb?R?e5sQ3yWABFU6gFkkzhwX+#*;`g3^ITWz#IQH z3zNx^XxY?`ZKc>hfOuJ7UIeZ-d)`nE5^dZiRm@EMRcR&F8+&qDK^NU{$%-?BvKeF_?_ehD>>I?o-s$D1|L96xXc3@gNwY>e*nvPf_2%11JofA_J<@y2sjHMJdimUM z1V01P<97y5RClY>sPOLp0p{~pwN=@X?W5`S-Cp;h9SPjcG-#ON6D=a|me%0;%~;(v z0H;;e3DxiD zm~jjO25x&5k&kc`9jXkuU~w#N&si|CcThPJ1LQ{b{_17Y4UcQ>8!S2x5{V){q#i!%rH^JgfI9Tm;id4Q0IyxxJZPC>D?>HC@>c3#?r^g8pYBKm4!Nu0BDq6YK zT7$oLztm)=AYn?Lg&ub;`M`p{n|}+`&4Juo zEq|8c!3^An8SIRK&78Ofw-RTOAZ_F2noFTYgas%If6^|iFGfKv6Hkn$5qUOITZQ`loG3*<1!=SPg zp@=zyHi^7;A^V7;1z0VaekuW`FDd0GU#)mPa%~)|W9X7MBXL9-0rgE8jB)6|bcjm^ zg(h_ESqxM?ocXKo0>@a@_5=+JLRqO+pLMVnq&R<+B+Pwmd7b^aI7N$QWMUj#$u^~A zdXUz9%ps)yi7jw0HSt`mx@DM@*ckeCXNr502(!98Z@Bh20M*J8m4Uh*5sIh!rUJ4C92t7mH0kB({Ql;gX;)L0%!d$DDg9(aP;Au=7 z;I%I9x-gd19+d-5T@OiWyS&ZaZ2Io}OgwT`8zCr=0#2(=V3Wq`IH-<8T^}u0i43=r7ySolu^wBR z`8xSBYfOE`e-ou;HGau1WcC2QrX*`5T!)I^s#M^5^Hy;;$F97eA+a)Ux+3OHR72m zMOehaT2NpOE%;rBo#x+q+L?DLSz9Ma-fM&s?aRa4kOBSKqntC@z&(5Gz-0`2U%bNJ zCYB)@Icx&yy)FR~i_giZ_2tBvr;H7>r6}(%4$c7qs)iD~wS2opyQ79usvG*>{kNo} z?E2Ox5Y=>d+Q~2e#vkt?i+z%hRe*O-OS`y|}HLQm9 z(n!Oq!xpvmNVDei-OoW#j`u_MRnVsxl6W4wysI!Mon8>f~%r(}#= z$;7f9o>OhE3^#ng4L5kd51N~@H}wz`MCkepS?4Z}lzM7&I!0uv2y!8%D*k)`@~>1C zlValkVvRpyFNF#yro7l-oE^~&FzU%QQeB^Q42NYU>N$?v(m8QE;4scmkNeatQAH{t zSni2o8YX^W#LgX*(|lUQgSBwS+oC=Q)X%XeICdKEBv zQb>`rPM(&t@r{G2Q3?5?Bz%nTjg{-%8;KjpNXZg1wCban>kOrRD*4=uH6~H`g>B5mpMn2@$y(fiZOK|o??Qs?*~`8GEcmnXZ|Yg7;E-tBL0+*(9(0bsV%-d z>u~~oQ4i~7u_c6qk_u1_)V8OT z9qqpx570BNrC5$kOx8xv{py%&SP}7z4k=rrJ3+^N`Nj`(YQzD7MTRxr1e?y#Z~&Yc za+o}w56$}en-V6S{HfmIQi|tSApDk9+Cg}H{dbpHCtV`SC8Dds%*g!oH8nr%aM;IH zLnlOR_Mi8A+@@fJ0+P7melZ9s*WLQndw{9`g?)5ykcyyxtq-N*GwFs4d^yv3%%hu6qezO zyjs5S@Q2%RqS4`UX@Szh6Sp`Jq3HeGIu&ZOZIs%p>E`plgl&L!&wNXlkS=Eddl@B4&x*gbf(wz z*@=RPc(W6C&?$j8pWg}O@Je<}!2bJYgPn4&tzt+16|>Rmq^iek_^1I1gxjw^|4raz z6+C|L`{sK(a=8!(QTFhhLSIj*>eCO=4uYUI5p=nI)5cCp!R6w#J&q255w|zXO0u&d z8)&;470mA;esOAPFjkaO^ud!)sqF>vWjO!ufSf$?n1Fagr}!6cYSO#$&Z<&GM(;D? z#o}ZHZT3gUkt(Aks6k)Dot7?A_CEf}ZrUE&1+3)HyjZ*3N| zxzH1HgeHlIn=cgCvp*--5|YK+0xvhST61%xU(wIfzc-H|)7O+4&1p;<2b)_0wyw%* z&;zH22*ASy0Fs>Osg&R<4?dlCBs|9p7Tn`%`DKO z$y^fUW1*2DA)EQ~4k$iSVF5$w3$NCt%XE-zHs~wuK=2E-+3X-c@U)ANbGD#`7C{fx}IElp@>7rzw7EozH z7`Y6S32|{tw_hCb7>%*d7A%g8iAm3ZB8yY0Fr0e*duRP~Q^e~T(IO2b zPCmH4`l_QA9fe+&cnbCLx-iEGGe|tX?voX~P*$}ud!g~^2Zi%Y7;?iav|IXin+bq% zujGN&3zk=2wbM(|cgsqn*E76LYKV$Gvu0u#geizVmy-ZcZ?Rd-MNZYPl+YR;XOFFt zV)zU^(~Y(s;nwW0wH(p1w&EFe!n^p1G*9{1%*Mi6Fy2fspYmo3P+afW0Q5PsE@m}w?!?ZpErxn1^IReJFRdgwEXafn^LpSIWpd>B zYYzweoq`=h3w0laG3VDnt3$3bpTY34rj4kwz%sHSEjQkQP7wgtjLXq^L1sCK4bvzY z?iqA$|JrYpZ2wzqoLiAi*-81m{TnL`L0xv+l&U&r698N{t^}0tqR$G>6cE`hG2a*& z0ftS8Zdg=3a3Bht!%MKV&M zeK%m12Tc`oubVq1byd&k9XwK2GQT)Fip#$tP`g^01ZsB?GRrlEm{Y`DCUL$aS5ntG zA_~F#Q>?^K$$@ajAyt9$L9Qzd3*r+=1?Edtp4|Jl9L%5GK-28B_^+=^mQ02VP2{Lc zOhF_81A>l&plCarPnd`n0&jkQh79(YA(a8{77x_@!W=r5a?Om*b3@NI+(IjFe=$iN zZYlV3=BNoEb!NGMJ}fnd?RVOz$edrpCr-Yz;eW)swZ{_8tEkqhW%0_=gZtx z6mql~MusKz$3Er4bEr4P@sf>GJc*rhooH^WE`7xgHHxdtyPby;PLzLzd|etOqK?kc ztr!J?W~e48KY^5_kd?+;pjkCdB`CHg+zN)V3#3`sRP{Zh{o@m>2vaDW5oX)Hv!9^BC_ zXX*-)Ph%rub?yvTw^a*fAv__9j?kmu{9KevPhq4e9y`P$cupZ}6COXr!kpLV2pcfL zC=uw{0y>)jjE2WR#C*!w@5J>PJ9ckMFH8?p4MkSg*L6+CJe)}@R?Prh)p|N(X6`)U z`CK2Rq=NT7i75ARLDPFku>lceWSUwY1PA4i{fjSPQbW;9Y>Od}YSVvT2Y4}^b`Gq$ z`x+{^#$o}6W8}aHy~<_P-|^A>f~)5^#M3f+f2gu=_r}z_32LE~3&_y2 z28X!Wo?n(G7!`*)x7PG^*}K_1B@1<6i+Po4h2V2BH+*fJ4)m^nfb2@0<+CM+-zVQc z94|dH(j^R3s|>cgg2po1p05TUr2bWN`Oq>qyle(lG((;=>9*;wD81x)XVBQp>Iv1~ zFdg)hE^uD*B1(Qp{=E^mVA9+1jP7D>v_ZekBXe5dXpw_KXzyEDOKY{|k1NXa^Y(#X zVELfZ+}L$wSr0=vD=Q;>qMpuEjS$3K{8M8*Q@zKx zWh?D>{XqqrBih4jG9;{C!wbh`JU)&G!rua>SH0K?I7Qjy7;YSUw>Y@8K}c1dW8;?S z_AK10PPT6!5{KdBY%11d-K=MnS-GRF$-COmWw%aiW!qQuC`&WkjK30^)w1EK$$I}} z@`DV~Fw%!OYv@tsAb%^$H&zbd zfqw61iW^@}yT4m$cgPUuX(wan3gqY^7MC+WRU6Khx9U3K-+7d+H-C-ODu4U8WlcY@ z%-?x6R|3adu8VX?kFp1r;~i08<|S6Ue$1pU-!MK1;jElK!>0Qj)m0s?EI0&ZYBv_s z?k%2rMpxYe@=Anft%jF1aF!ueFi<|3_$tt2PB6^?pM2uE4{5f*7gKs->&wI!)RbPmT113K4efHC37+Kh92mLk4m+E6Xpg zPq1EC>AgtntE5hW&zIXD{V6NcK7M&}XmQG_O;~f5!@~enDmfoi!aoS%oaPs+Fn9f{ z-4I>ZHt`g$9q_5Ix-4pcbCFWg0TB58H5!XZ;7K*le5goy0W2=o^WRracZk%<9CzkN z!|vGEWy+;bhdAVh>oMhC*IW>hHZMTI&1CV%t&PiNL|Y(3)oU3BhyIdo_4G3fCThw> zISgP{nPVJ&YU-HmCf{@tc&o4iq~*-|tC=U*o@V_5x-H7@l>UhJ_W>j$Asw69Eot-Ni4w%k@Q{2w^k$_g;4vl(g1a~;=HR_1dM36@ z^RT@N{p*o3PelhyOaJ=tI#@hg;j>)0`K6(3NrL)VY91d0tDbRV8`%I6p2R|gxIqNa zeCtUP&B+L}A_hoL*wzD&@`dAy>}ImNC`PI}597=7n~M1!y2?sB+i-2sMM@1!E zYwQq<>n;p1TM)_;sB~0}Dmz9e2gD-$OYh%Z$5Df3{9coT%`i4UC}`rA%bI}(=)g5z z)5Ddl2ymCz`GrSwf5_|j>qlGyYiSgSFh@c7 ztAPF&5_MS`pM9Q+{-|vxEECgMx(K=I7ndk7WB%f1{teq({cgRNdhP!JY4qQ}B{Ge5 zFle89P}jZQqbzy$4y`eYk~ie#hy8@ zx&RtdC!_gNzgoGl&~{fGA$)6{PSW?4$?D9!2TM(o&}ZNDCBip5=|E{wfAnMlPnS$u z!Aw>0jVZ^7(`)*Jk=|{j-=E^qJC_5qb`htLKNV~!rvTHbov*Eh`KI<=ZmwI2VdflC z0!COyFFPjOuFhZ0`1}!4_%&-f{#V$(jt@$cOlSY@gDia%V}&gToxI0WB+iUoJHyj- z;I+~^0FH9go3a(-5@gLMu>SKZ`X!wQiQa(Ssrluy`J1)l_~3_M#(38I10P(RPcuJ2 zc)6u!>k+vI2)d?_QiZoxGSM{DYE(CN!tW+!O6e+OADmT#6YB5Z4;Ign?PP#I)lMzw zYyL^2j(T)4RJ{+Q@2je_ta}+wKf3ix7lHp6K8*^Um8)ge59TXB{s%AtlQA_!-t%wm z^l3GD6zKc|q=$GbbRNvxgVmzR*IQ%o;P6U(hW7xHFz`1Dokg7jjv|)Sw>90nnx^Ku z-HPb{IphBOyy)cjMx7&Z>;mFV^;JyC>=yy+0g#nUeUXB({waM_(m?Y$0~D zJXCU$H*Gqm(0!chRIDQYVsc>sf6u8}motNl^+KsZK)yLQR}c5t1Gj8Y340LOTXr#B z7(XAV$8NF}?`co=?p%zrZ)E^PGrVtJnTuaW-S~6^9=fdRDK&H2x1Dg2nwK@sxAO$z zi)q-j^UM`zleuiRpMUHH!d#X+cpO^HILUZk&knm#a+!|o>k0p~;g@F1E&LtsoHX&x#HPmjnNV21 zB_$V$w0Q4sPt{A>dUoqx@$5E6z4j}c3h>&^RM8Tg=d2}*r#eqGy#VGmDF5b{O71Om z7y13yO75+b@=-}k#h33bfh5!MXtMUiu9BVsAzltKchc<5^UnzSOXyyFfl9tFA%J_uuM&9g!fm9^;%eGk++l>}yv?HPft(>;KYkLl)M)=$9)X9P zy1?1oi!h~!|8Nem2I8;O`WzmIqe9!@)@vYH0)@uO%s#UFDyiPv-PkyvJg|Ls<{29* z_hcMM@JeLZtwzY40bq8!xkYlpPsv%n~4f-?GTiRK#=9CAy_pL?R z_GPY_tDTMa{n8n#q$Bpn*`wJ_el{yA7^rbA#oU>hhuoR)FTrvBkZZbtad#1HbVa$Dk7GozHLo z0V>*_eVvBFI@m~Y>)#xII|8kSqFnR@k1Z7buEZQadaYRfuqH9gqPo^AYWqT7HhV-2 zxXa?z%I{BIPDYVGhs%dZL>r)NLu7!w2nnYo{3>LhO?o)7UMA9m@5AR)S5>T@!tKf@ z3J1-=C2af5@Dox?$he&a>GYpVE1MsY*%hf_&x$Vl2X%_C_>PgXxMW$}rrfyVEJu5J zY6bm6zuYqDxSiv#!{cRbxF*v-K#%cb29wpU%&cA^ILSuO@u5;(Z%in8(lGlHaxEB} zH{MDjU-amu7gP>AQ7y7AFg&%Z_y@4nl|C?y*Vlq7d+WUUZk1i{{H@7NXMs(1RKYGq z;&1Sw&h&v%%p;|Ig0z6E`3~hO14#uNp7%6{oI^WQc-wnBzS!}(8SPIi#c(c>JoL#d zjxp1iLQGOK*MhT`3@L&*$0VDWdKI?)U2Iy+ioSa)=lak@FMIsqVU~bda-=kgd(dNm zW|r#@u&4L$L~*;7g;>+N4W1AExJM&7ms#wR!1V@0rLP)Q2bQ{e;PTbw;pM!F z#3)3O7%S34g80hk_0mgRQl+UCe7lzPK1Ak{JS|2t7{hZ^Mb>Y^JjZ<56ryi<#G=|W z`J0!sRmEn`qpP{L?iQ>NOT9H|q5JUI#C*8&S>of7zrr zH--^&KQ?*FFiKyps&f2!CIrspad4f@0^{c&J|nP+)wT~@=jZ#KQB%#3XJb^h&Bvti zrSgPCtU%j%_#fbVTH9qE@7?@@w^#A8TMXavYBjvdYPYaKwZhus#iQwW^drHF*QKoO zxev3dDv!_>U-C@d&KGs+2{+mO)!eI}mY=aqmJ74MoFWngssIsY)WJ@< z*SqfrY{Lf>zv4)No0a(x;G1pbxZ=iKJ3aQ2)aB1A$usELO$(G&FKDsP;mgHl#>eX4 zSU~#3A}5>?l6SwadFfUJcitD2i6O~OApVW%HPHC& zXYSLiwGw+YfAh;q@*i3|@SJ-xgc7rLNlNP4=cv^AQoK{E#a*1-Z zL1m0LrkE{omiG}9|K3aDT`gmc6-QAiMT5CEz>D&AY6KBeJF9yl?dw{MJ|4=qwwi5$ZR5PH9o);$QwsA9 zk_uBZ7Y1Kxif%1e6uR0Eq-R)jg!xO-cHJo7e|!7xddQ6%**prHsCZ-NOuD$@P`}Q-G+)_8^X!5Mb1a0m*9||#3>PB0;Y2;u%OWazU1E440!aVj z+CFMJsG#0jObQTXnv_zInw#0IGL6(xGw5B?A%M+JHEsvZS-p7vTON%D|xY>swUY zz=YeEH9neTer_dbfWR2=B~IP6I0I~gMO3KYC1KDuyuZm*XBoJz!;&xGA-VNAQ0Kf5 zwXh0qrQtHwHN*EF)U65J;KE6~P@+D^ay)*Pz+3um)g{$!_Pf2N{bg9|OawkOG}j#` z^+5G_Os`&?>P>;ah?J0+GoC1(DPru)WY@WbwMCP_d`Fi;tJ$*8^GpZrRn%>2e+Fl; z_|5px*>ApM^&{GZfYQYl`i18yd{mnmY$k5s!)89fMh#McD%Zb$X>lZ<432Or2c(>; z9ieQb2H+JwgFSMS{)Y*OK46T9$UMeD){wzKv&Phx01$TO` z7e~2`l1Yv4NKz}Kr7cgKMu1_AsLb1(lN(FdpzMX))bWYvO`pye?xa|_O{zl7Kk%P7 z;r^XG^l$*o?>b03V53~^X2Fb8FSg=~ag0Dstthc`~$8jV89)R2E}~TZ>1%Lo~q^H zuqS7hMkqVth?O=rVi{K;(AH;9(6Iw`Z;8PK!{cBQ1;aZMyK`_CQ@VJ;LUy;O1DnbQzi!NrZ?^OpNfIP?{ePE#Hj-I81~H=qZj95 z$luX>U8*TQmg-6=h1g(v)oK#uo*suIzNC&R@tUkqGnBM%!uIi2boyklHt2AF2dr zpl$@V7!CnPIsTacX@5e?^4VsT6V-e|)AT6g5F&M#dTMer!4ab~vjxkxL3SeGlKsmZ z)dNjgtu)<|E~{DsVV^P7^T>zRjY*SY^3E58=os_2uhps?&9+3u->dCbcRnvcTG+*u zD8Ru^xE#NC0B(3!m+f4}( zD*mE4xs+sKz8)IJE#8AuCC4i|*dzF~=dn{Z{s?I2=d@@XI_*`Hw@KR`CBNu?tHkIW z(P|P+=$i9VoEW9E4&_u7Nt*?#s<{e&mNV%K+r#TymBMxr5CXox3pc_MJTkuM(+&H3 zh*nXP+&2x}!-J|ctonRxFW!M@I7N;#RA7*uB zS`4qK8k5C%zD9$y`AjcSt<=0}h%wfb^oQThS;9fAJR1G)YRYFfw_ma{q=@2dmZ_f4 z(Rysv!>#Y4|58lP+H~;E+HG(zrY1MRRrZCzZ-tp(QFO>Mv}k8}Ww1lsC5m_=(3Zo{ zY8}3+z0ED{W|EBp*>2B=GINQJwa>UNdx`&Xx^<mP*a)ru(@5jUih$feI z@^ocG`2FXbI()u~ZfY}@i;CGb(bAF36h7*G^VGF+FnTcy4rQi#kc1dZw{o4hepIS< zJ8U|fnAvm;_h7>L{gzzk=N5ZoNz|R{^)Y0x#^D59lf^gaR|JI|&MPdb_I^8e%C))(8#xq|;TZx>AST=s zVqp(USJ7GB%~8_z6o4$k^PSRt`66@Uork%!Z*5Twd^PlP$7iNr!sgqPeyK(qy|FZ9 z7yqm3v{TbC!VoDL247;isL2;4W<~ec-mR0y(YkZ<^_^~#E@~|uP<*GI>kq}5{&K6u zl6q|1wRAo>eX}%s7hDN&)@3<(7CpaPmLA2)7UCQ+cv6TNMl$Xhyzj*xdBU1-{NtgD z8#ghkIVDVtqmdyb_)J66z3r$&`~ncoiK5Z7OXQ%&WlVI)=X>;q#?M}tG<_ZAgCBs6 zzsBrQZ*V+t=OvvRdjEVgW}g{j8~O>no4sDNm-9Re5pM)aetc6?ozqCbzf0KwBsT4n z|BQHwS$CdZjWPph$-rk~(` zw)$w{!eE(At;eg@pG>`ihg+MCkTxy%-D)f_4b?aVI+Y+ z4(hL4KA9a-^lGXfD&8%f6(R%!-Jcw?Daf4sYWQ<=nI`dIxS+7c1C>!#4e(Y6c*|>2 zt14K)EU98G$~+tfzF5LJDn+dS0bWbC{;+RLX=oNrkq=+rLpfXrerpqBIQm@bU6%iI z&+Mn!2$wMnD^qB+0}__6!Xmi!M`KSzUVu^DK{mGGDY51GZf>@u19FJyxi_*Jyo*S? zyl_?~%;#3i6;4QV!7``i7u_Zas98DX+f`FZIe@?b;ioyxy`0_vFOJ`7k1D4YES%HK za*R1}Qk%5?dnJwt@T%N?gDAJM$AHiK6qT7O0Bh-HQDRoNOXy$Q&w~5Seal5B_p}dM!OG@iE&8%h^f+(USe+ zG#~;aw98WAA3%Rh#F%}D}L-cd4n7S$69p5;qHglQB9P>sN%b1Y4#gdkTp{*CO!msQ;VB0pW2z!yB=4jK=}RZ;me9fnoL=m{>wHiVWc9>itXm?ptBa@|W)sYbAQ*$o381qSvT&2|D*9jw0zq z?|u2L=S$AIH@1-FgQXPx28jzzx6T8+P9;5e2DSzAwar&4HBnpALofypsYAlq%)lQA z2{`IO5fa_608`WSxYNZCV>!d<#c5}p8?>>n>xG@uLUWtU=vK?(SSFmDByN7`^CiP` zIH(6J=rwX+XV_~t)#R_5C8r^hp_?d<2j~9Xnl@A<;|Nq&TFL)|MVzpk9M5dmVW`E6 zT2AdP+cl-QZ>d*7X`s zjLJe&YhC&X%Ox*&CrSPXNC4NXJr0cxwSIZg#`A}|qj#Csh+BTLhLkus`H8m=&0?QX zUy3~W=3Z#MGG5-fYW}_QcP$Sq6F!mWfxT9y*l)su}^UM4|F}DZ2oHEho(Jq^4ul*uzc90m7fc+S0N3vcr&P&iKnL<40y) zCyI%PNg`$C^j)^S=&y8GhZSRpcic@VU&RSQwUep2v9x)I+UjAvl$ztOr(0LpLIyS; zZq-$XALD}XJX!c2llONEgQI~1Jz61Q(y1)WoAn2UyPL~}4L`-c9_e#lh#!e>Tp!Mg zQ0^-G;~y@iPf%)YT~!FLSme3t;FcNBRrCG_8;T-H$);XigJ|=5_;QH+#1$q*BLf~F zXSgi1B};8Tqnh_fvh5vd`e!80eWjM1^45T-^}zY$g0kbayM1{E!%S{EYg*c(!4FTH zUu;nm+9QVNX!p`{gDLOnzmjpnoG#FM#v~R@-4|*FBb#i8At_POV8!jtj$6-~`p94V z^1NgpL7)7xm~S^<_KY$T?SjwG2E-ju662|C15di8G(19Ss?|(RY{N|{&&vqiBHs+w& z*Vw2>8D;x9NK}5u5d%%*42r*Kc#sX6iLH=vR>SHIdulom%@;e>NGL{;G%UZ`M7T+6 z!x~@{I_l5yFw)Fm-MFa~aRb@oqUqJ;>oifaYiqKPtUiD;^A z(~KKfM@t^dR8q>6#joRadSO!~t`D;hi;pL=t{_8?=ag#5(s`I<943x1Pj3f185L?MQ>6?4doC^Q8 zH0i4rj7BuEdoh_^2z_7MPb#L?5AZ*j6>ZccB4aS9j*bpT{{1z^mT0|Hu}uDKOui$3 zUnWQa3}fQ9l1p8DxFhxyP(QOjYJIEYmLr`vKkp{0M@5Gc!TH$s&Fwr>8jVR}_4b#* zsj1Wl!-G$oAJh}1>Q-n}`+%*;w6=RD#e(l)I00XIPJh@iWvi`QocJ;_sJ`A3m;F>Hm$&z}#d}TCg;EMEW44?udK5CG_m=`ZJfG;uxJgZ+GI| zXF_rKLzQ4V9!{BT(0zVzSk>-moA z-vXWEa4o+bO3#PHuEHyz4u`YQL2U8})?WcfwInvTotDV9w4@~?DS#$7Z>M9ma|Fr4 zhpm2lPS5#wRuX~fNc5OR49h6=;qtCncj3$K&u``zX)x~60joG8+tV5rMFub;)c{E# ze>zhC5RJ>PAo?7zpWl6hI^n&O^DPgMC+@>a*h?aajBr1VX_ zeD7RC*CD)CRX*ilHmZUim?5m6cFn~6X%d;ULyvGR-rUd8744NeFz8%SWle#oivBeq zKCUk`MUIwAr{A6m_hG{`z_PDH$}dZ4zw}r5WRIqAHz)IeO~lqqtDR)52gsDxuBsVV zrZPl{{b)qTm2BymZ>pNho*G;#VUDiueK9oGbmO6;)tQHVL2~oeGv}vZ`eM53BR@tgWJcfSE4*MqjsBY{TSO_FN^4w7s(t zgYe)1{?`2rU1GXO(Z6&;m$+}lBd-vF;`rIBWYEGBek@dFP@R znZ1(0W0rQc3ViMTXl?|NwKSUv>x#F*!uRaH{)Dc@vDRop7CYo~7LXg0g$f|jj6QNk z+-9lb2>!wS!fZV^a(}viuxhh%HJqoedjl)i@Oj*IP|ib&HoK zEJlnc>@jktTZFSdn_M<0Y;K7jY8dm!rHq;p=<&3!sClzQ=&!_Cs<2ArdU$*=^>aU| z`t-!BeIg1u?7AIXjN_6vS`be;V98(@q`G5eM2n#8B2J(A`f09jd9KQ0=kKhs_AY_` zaFw$5ibMo(ro6~Z^}+-9s;GH=N7B4aJ&!O*Y?i9lp}e`fK;Nl+VNZ0Y@yh@CO8!=$ z5j`~YZ>8Jj@|Q{`j{MnR4St7cU$NE-5dlsMwn69JXhLB7X#DCEe;9SbP$prxP%5~C z8k0cp^>AwUH47Rs;)LF%v$fo6j^dh0iCAwByGDbeP({hwLc8@KZ^@3%&Bsb|43w{d zrVj3+Z!Mna&}CvG+2Z|VMc_Yx{H^JOhUo_V!lrO?GS6aav6@m6gT|{3wyReWqvR#` zRd_DXJIQ5FO&)%93-Nw?tG9s0ZZuOp;RNMyZa|W!t^nZYS}ow;{{ZIf*l8b!I(Rf! zG{!ZGcj%f#ssxTv{hSnwJbV1oXpiPefx7!YxJka2z4aN~K)J4NJ!nI_hH?|<&BV0M z$r`yN*H5%-ZtXUB68qDBp`3{%D@4qM8mj{l1j-*;gi9%1b@jK{GRe~PdJ5^v^q%MC{G+I;!Io}apjsKe+_Oa|~9i@vw z$TfFeRojRsyjDHLLr$SCbNZ_W$C&pO2ac!qvhQ}vtAZ46qWL@I*e2V7(6aZ^-{5RP z*#sul3l`<^d4S4huPX=aK)+bUl=iB^eWtnQLz?{pnO!!5UI9yM?K+r?T*gU zswkQ!@to;&Cl`B7me;I3McGHJdLGYQe>32wPa(P%3alCWOp?uHU(NWYHZ9{5@vL*> zz`KD>Q+5lU+vRUWSi*g)H~`G;(`(k`kQPma8uQcFtFA|No15ofb!RaM!|>Lc5k+9k zi$SaaW`<}bqeJ(+)&o4AqQxU=0=_|GMEc&$r=)GV0q2e9os=LY+}+C-iYmO~1v zOl?8$YUd}_G-?u$eoiSmGbR8=Fuo=yL2WV#8#PxeR4AW z*v!Onsj;Nt?@XZ~C|2(6@5PH0ZJAuJnhS`FgPn=l8{gmC%Jh2S3N$Q<{{YTcQdR0^ zsWAMcy(pp>zst*dMwc-u-xlY;#p)uuGt-7;*Clw%8qbn-3MjBmMCdX*X!B5|8$Bg| z3iT>JT;NnoUX3}NTV()&wU%`c2UK-XH#Q~9l2Z#-o3y_{i;E(2_59ix!(?Lps%+fDl6q3D41A66@pr4I670`WOC^mJ3JUmE z%-PCwxT-WkAuTD8?qO&{#_LpV_FFLbh}ECsOC?iNz<%ls#DSFp0 z3oVWG^SK2}g%s(Ii17-ow53RZYE_%z!4^(I-!^fW6urez(?Cs_^5>d()4OEGDKMq{ zx5y~g)cF)u(~Pv)ndIas%sG%+3GFLo=X8Wn$*A&7g*eAfXhQ?3} zO#MdNRFZh1&a&6O8DU*4Dz%?Lv~K9{DAf82p*kQM%csWNfVYrb zx@&5Q^_2fk(M4~P564%%W=Vy*ea{B~A2M&Sn7gdzg39B4l2z6>G}%W&+uEwVa>X9i z-!l!H(dlNAFB-LLigHBW8e;f1i&1xetHF$VL8aIBe6FBBcqqiLUfvNrvF3WTm~yBg z_)P`PmivDIT|lD0By)k;IjrNvjXMBboy3VrCvZy%#?AQTr1V$k(!D2vm1V@TC)u7! z;>zI6$}|X$tLkn}DQu_IaHFxqpK;2+EPd-y)0)>$=<7lwSXdUh5>Q0ThSd3pMma-A z8Q!j(XC2LE#f3IoM240aO1Sd+r1ssOPx(@)(^CrM?5V6R=b2U;4zDOj_>VrL>@ooB zU3^n%*m*Imk)~~NuPn-nRNPhJN+hMn7SdC@yUR{pb;3W85rA=l(yX>P&Rmu)QLVB> zhM6w9+(txp;BJ@HV=zeTt2mWvb2KCzPR#O_)9WRw1sE|}0a$na2`LVkuC z_aQ-)2WCriIoOpGwZHK%oOH<`kp9)+yeV=7d%dwMlE5WV{KD&L+;f%gQ74XtM<E~SXEbaX%IB|bU z6yioZsYe6vip`QYDuVvwp_AMC7(vsQ9T7doZ5CG_wo52(Y*269$Ome95P&ziWC47CyJqW=7(z0@r{{RzB zf2kEG?HFG?J036iXsejEHpTWKFRe&eD_S>YL0-o^gZ?3k!_neKrqb4#R>#YVlc!vd zAq!RynHekc&rj(t4F3Slp4IJ74m7klR>_XpWint$LGwZ2vXi@WE}!O7a*~yd1p%A` zkUTBo1;Q1#T-{d7LUc${-)Z(a{v=i9E;a4yN|XWqq7Q1{;$)OGx-#a9$gTDtsr+NR zYSwLtW@nV-JBu&w&kFL%DgOX5`cvYqbt2Z}bgwmjat7r`B`b8cpn8JddixKG z&Y@@l2U5kiu*|EZ3D2~|MWqT)m>t@v?xkR0k`~z`W~k79p1ZbQ!_kiM#@Kk&Nm-jDJZKeU5nd^gai6BV zQEJ7&bPeMLH;UR zQdflml9J22y+9vIm5c*gX+8bK7&f=iwl}yciE@e{+A z-e*(ZFUm{PAl_`ZZ9j8mw6<5?ejU+mz%}-D?W{HWrle3eN?lU+j{m zy7L>#mnz;%V1h>ugn-+DL-2v@I9#sl@BBj})s@{TcCc#81(QcxWH`%LDXYCj^9f0b zEtxAw+r|jXq$|EaU-OzH_I!TFsx8xWhN890kS|WoOtc4An37uD87oj3!kunLQbt0Q zoN%mFGfG(@>75{NT8iw`EM4A>b%`mdD{0Ge9ZJ+r(fu!{3&#Zt_!w>ABzd-5rl-3< zb&R2YW#q$<2`i6r2x3DESR?`qjkoIu)#m#skXoIj0(?yd&egI4^u_kv<8$ryo9bSg zxm#Bp322I5WGOizXp~0M|73-@-b{xKc6K~j+Ut$^xayGOvM{geO1lwDQ+&9x>sD2KDTiRymMguE;TOvC z+eG5xa#YK^9AG+uyCfjK+Rwls_BFZvea}#7LmI;S1*_GuFeb9&A;v_PoJm60%1Ts) zhe8$Ca$Ev->I#StlaZv7_b+MOE8{3q@`gcAL@fts^Xn(Wi{`De#u}-=j ziiwGXDWl7Q5?*O3NY5mt=Md@vt(tf~69RmGIhzevlR%;8?*}76wkuKLlBFK)&A@s;)1*ZxXk+n)~ z#?%K*#Egouc)zJHR{6FYev=4_n?13Ca)jVc_T32DL!9KTE+xe6=OrX`1lDQeeEaYF z4UFn1r0uL}%Pr!3tDl(siTIata5cTwn9lT|5yxBvr>6i80X08%e2F@EhP!Fj|7)J6!8_7=X*#PnLh%!oB zWeE6pqqP*XZL(VDL$b8vk>dswU!8c84=N-Aah|83{`ED_!233qhHh{eWpi3-Ver<& z8X?X+zdc80{>mWr;TF}p;LfdFS6O@p*{G^XqYj>dt%7-BbB=S|oBfsfr zS9V@=TD0JLD@k_UYg)G@meMelubd8Yb^(*r`&OluF2G7n=<)`l(29uhlYi8ja?T{3 z?Z<89$$M$&N?J;}Kl4yF>sFAqBh51J7eVHEf5MK4s8Rc@sejm4qIFk=7G}zBdZtXK z(;>3tXsk9qqDs$0gUVCT0(kZYvE5M~MxLJ4t{t^D_$6RCl=|Jz_?CIk+XAh)%c1z& zeX+lXZ`m!$oZNje6)8?!#HXBjT=zL3e`?%3E4|!q@4IujqAYl@n+3GD9Vx)` zq$4)v@>7AsX?ZsQ6n_9cQGj8CkHgt>{O$)Ic9U*Cvl_LlCnZnPJ7bZV2D2bEv?s7;H>hQ`e;i zVu|Qyrv4a;{)2N&#pjtrz*$ZukX)`DXanwA(v_o zW!9U=0Nu@FL8Y)p4@x#I?bZf2@G22HHzqYQ3PMAJlgBh$T7|b1L0&n|YYt~r ztdI(hJ%w0pdVnn;sm@2uE^)1ng!e|d`^~+X-aa{~T$Gni;g4#d4M>B49UOeoku?a& z1wi}PH$A6Ds3cmPRz`FE`_L~d9Oi~i;io^Cay|N2<)KImQtA5eNHH8ry zK|J%?n65DL&H&=IJ)D{mH5VPI`0=tjROnz1)LRT^wP&A0Aht;oHdUH+5 z6!H{Bs|vsg9n_!)_pUFO(>gdKZ3Ww-jl&|Hu*V%Ram7Jyj?hrt?tkS;KiuM*vX%h= zG^G*!O2_se_pb@{i1N02UkOr0pTma=FO9V0yAW_nl#;At;8mx_oiemTYT*sG@Y*+T zLN_fW_5g9Wq3KjhWz<_8F1HbdZW~p>Cm$Tv$qv^!{T`34N*gIad4PfI2|R@y{+R1s zU;ZiNMtB|$+Ss3~;c%RDf1&1!f+vPsmg7M2>QY7$uedc^d3O2gax3hiryK+!eS*4- z{BerZH3j}l7gwUWc6!*PWF#OBj`zvP=BqNU*QXvyTkT9~M+IwTr$^rfHS1uSc4v&6 zTPwE>MIy}xK9sRApQT|z=>(*d%8`y-U-bdeDLeoNQ(8xc_Iqqy+Q(Q*RmN0OgvMoV z1V?Gr{t!=faxjoEGoH0jyTyH|?a4)^&hR`bE4L*pA0VKLX{9dE(({)YRvUDQ7Dmw~ zDpcdF#bxq+4mwt{lY^CG9a1{ZGI@ zub@bd!u$CwZAl=1QiwkSt?k<_Z)V}B_1ueVs#Y>)Ho-|ej}v!REGqY-1RLf^d#fDM?c=HdX7YSL28iB#tKMNWum2h$pZ$gPin0brJyB;?suul zDZ)zG3NCw)r2rM5jEc@RSB!CAd+4;ymDZZ&C34wPRsD`JO5wdA;F~iYW#PrPOYU5B z=BPth1!5r!89xh2Mia;3$a91a%CX!!nI#XdJ{p) zLzWApj@ra|ud)P~Vyh~rxEjUY=$w^9( zdN|sM-i~?&45dlV){FhiOx&V5r-jHdG1W3s+7L@5uvZ zQdv~AJh-Ym69FY^ag?n+QbBRX@(wVj$8Iq%+A2P;x7_6G$$=;~d9!Tgha1Aakf?@` zzUxB5TmYUyP60euqBR7&CA&~tG>!B-WSM4|?X^-`A!6XejVacHi9hjMIU80%}E0{?Uw3*aivIRLrWXEbvg5p306{;Cl0iv zou~Pe=~hCkU(u~UZqRd;cKS!;X(>(FuF})H%y!K91&L??VbnH~$sqcIQUUj^ys0BS zS-$FvQFz%4gH&C&01}nlQY7~&4d2rrs+^H%! z+S|+_J$>`K^zqf5tD;O z&C5uXk*i$VcH?L}5Z#rd zRc(58nc#4ymi$o~WH&SXt^$`*4luHCaa4T=stg@<`<78t_KU0Q)6yAoIo;*SVZc&T z#vEmFY{Z#ym)|ZLQp3DQ#~;f*vk_vPJ@ulO{5fKm;X~4DC>$ zbE$GcsOpMWjh5+leP)|17UMT+eg^AferDF(TEk~pC=ub9V(w zWR$qlbK5Pb;ax#*-8|}^23xG!em$yImAT?g#_29AvR(lRLsD5v5?xP1<8P&*T8P5 zk3rg`-EJ*k<<3}5t;9Clrz%rVwx2R6KjBi)8p-LA*m3Fq00?v(9X+iZg2Z(>=IU=M zYSPy4rU|$-whHmUY$++i&nfd%I)W<=@XJeCuF-AwtBbNCNRsc?AxQv<5ft-CEWDhJ z>D~i~Bb4q?AdC^~T|C6SEw3$1+Ab}<`}%Bqqv3Cb6+=`&wF05SiFh8|Z*!;e$1YP* zDw zN&>UVJa;4-YIPEkpkqI7foQle`fU2HO~U(iYDb&=H3EhnX&;{{SxXc&nd@w%Ca0NGcyoY10iUO)}GuzWR|mQaB|Z zN?Hj)AggxcgyShK_jw*Q+Dt2bx#_NI3&Y`0lZdR^;w&XH9X^nj5(|Y07)d0WuE8oxtV=!bmrEihu793Go83m&H*0pCOhX6bbE7SV2oP}JjH=FS? z;WH_`d29swrpnJKPI6Gt86@%t6`pn4V<3%E_o}S+<-e;gxRr5vljW)8g(xQ&NC5B; z`Bn$6WyZKU^)es^RVhXNi8)aRsVA_hE_V|??%k}rCTonTWexr(B`>;&93ca~Ym0~O ziECV|lcrqO&|3&fu#N^cf$S<{lg87wi|P5P&gQ^$42UwV7kY~oC^_7&)LXHi2=kpu+Ig;_S^#w}tvEeECckPM znp%z-N`~y7^c=Z5FKDUW5$3H}8T(Skh>oG=5}b3*SS{I*;0gef%_G#!sfh|8^Pa|? z3De>(alMYMmv&KrRtL8|sf4SeOK+8riq9iH_TqE>sN~6ccm$k%>EEaeXmahgwYw_(vUjR4F8t0$%N{JheES zb4r@0=9XL*w~odWl6|TYzT|S>NlJoGKNNxYR)vyOe_CQ;$Vzaq zbKau;)~A#K8Of+s?1Brbby*!LlBT&CvQkJR=81=CODW2M75Y>}FVn?YTlp3zQAkp@ z^VXfizB#lHFoQ}LYD;-iQS(FXakIy2T(QPXWOBIZxHqXBYXwUpj*YC6lZCGzaa3UX zumLAMyVP&3c*;nm%g2wx4n9Z2G}bj^PLN86>qfVBjPTr425TK|d(AJEr~&gwBT+in z2zT|Ryc8PC7HQHY#ZV+T589Djrp8D=#1%<@R7*MjCm(u8KT%3vL3EFiMzJ(gpUH2C z;R6)I%q1AdT1y$+ccv{nxZ^pYQnX@mrWAz#0N#y`f!2g*XO5I~aBu}KL*Rxn5XG=Z^ zd01Q|wphGW&>*fu{+Fp3HF_1|}$Gv)W)}!j(I?K=$ zOr*21#Sl)@>`!x=$Thx z^I0y1@RoqUPql3Ht#W|(?b9b_{{W~a+>+y$DQsJkLgXn95QGkx+B%YW1N{M|3ZFXT zs8wl$W^!C(kko%9x!0*DABgoT{{T!;PEKCs!I2m=*ilx|&@0}WGpHABhJ-P5c?YV_ z3| zI3FUu-S2R&OW!|K$hAgw0VSx&Lk(^1ouDWu;MTZgPqPM0QdZB`xbxjo=In&Uwh=;< zh;J#`VHx!;#&NlzJ=K)|0EmkE+?#6_1B+zG^8m44&Y11*g)}NbwZcO;LW?`YmRD>5;LP1I7qtTuHxXyA%6vopI4x+F( zE+T%3%R>dpNly2ag(Ui%X(WZHx9L)w$WlT^GLcu#*iA8mNzh&sVYJPp6$4#tptp1^Ab*Q zO>*HE%8_%LeWkReXVZqb1PZm=^xdB46Hwin z(yzpsX-n;0EX}P1vAx!vQ_+^LP`36G5z2B~DaPzq+1G~U>&w2L)o=y!Mx2V|E2AlG zCCkOGW9>NH=5fcEg0!t^>$yl@JC|KNQst%aV4A0A^Y_FHMefx#QSOjd*^-eVw*WX; zBh`c7l0RC|qwDc?&=yH+yZr^?J1Vz3B_xEr87RX~0r-O1AmiN%$5U6o5OjR_m*Zfz z#W#lR4{ngufRNSBGeD`Aw{%CF7mdj{$^krrYaF|1i16o5)B17QuR^$&r&-q}qCTmX z+fvJF1BAMi5Iz=@*;S{MXIJnB(miS5mbcSd&K0UGBBi$p^hB8*X-P|tB?|~E2LVV^ zC&^JfsQ~v##p_UL?H%GOr>6C5Td`s@4&fC+f``K5D73V=0re>;3n4&~MnTV)it^ur zmj3|Uw#&xY3U_t#^S{2!478!IZKSb%gH187T^bfMX$lX+6n76vU5zzxT&A3 zn(L5W-j%nuQ$#d_*zA0_`6*t`8ypx5;|y)pfO9T{3T%Q3m5{81rD< zBD9sIXeX}aA>D*$BPq$itF`)Ny5p#BEi*C2%WBLUZP`(Yj)gK(w__CP7ac1EKL(EExjJQ>|(}jf;EkPt?i~)j3p?2CsBGR&WP`?g zH9cc)n_#jg3-Hx!QV}J)TanJ@q0M9`&4#RUgozBc^E?bC*1+Wi7fu{dl_w^TDC@*q zwO@#?X-SgfL5`>0ZrvdY1TUr3+gPtp3(Owg=Zqm0FuP9tdvtvWL z#eP6b$cFPuems_!xMwJOL)(yX!ZJJ7Y2h}IgRZr1qrOUp9fYmOeWfxBVYkT(ajuvw zJR5W*5&STugPpnd4Zlv@q-hz@?lGNV>x=J{p$SUMseP~G<2*V1XrN%=g`q<}ywsJy zVkv8E$aNNrYRw|kH4HKyfWi|6Y`3wMF%ka&Gy;+Q=gE+Ro`nfHAyi(SmwJ`rUxoAJ zszZo=OV)>j$WBt*4^8r1Q3EAPY=rLVf_G;$_s0=&F28A!4VIuw)AJJ%Oswu{NsbV_ zV}Mqcns5`&)hL6Ib4t+q+s@VAADzl&F(==`kWw(VScqH%lhIq3_=g9Ym~ptzf6bRU zTtl?JleugCQ>blmWygUY?(A4;OU#6{`>x0V^OMf=3AfTubd;0SS2DE?k5SCj_sfjt z(`~|~tJKI~?gFHof2`Wo#b-=`8 zlz&!bxsp3aNyq@vH~#<%>~Yw$S(tK6bXzR#U9QrSk2+JnY=r?0ykRMI#l7iII4D-^ zVZtW|N+oLB6zSWu*8P8a@iW9no|hu)A$Hq6tq2Y_`bx5=6o3g+LH(^~ql;eCl!t2J z6HCK`cf9I-O*!w~zfv!kY4TJZZMT${H$K`^!rTv~4K{=P>Pf*Vtgc%ZYvF%h>jxUK z*lfzT#kVmXg}S#!@V2dNAvjZxHML1m&Jv)#>I9m<4M8c3JGwH?iFT%BI~9iK%0PW` z5;nT>!->HywxX8M(?KC>IVw=aDoa848wYQKH@O)m;v*H7Int?CeNOo19<*u#>R8D%^00Vrh-C?4C1&p4|d zp0vAl#=O?ALB36#n1a=7ab8mqp{h!j9C^;=#e`%66s)TohBKbsbj|+&QtD=G*XXUh z&CcZP%c8Au)LKYhLrpfkU=OQ;V1+s4)q^b@;VauAhh^lZJ%>8oN-y+ z2IUN^J7NI*s+*4WvMYa3(lEDFprt8Z=QW(gjr7ponB&TIerQ8zNgxlf9Zgzv zWXqI!==!kicG1EcbwiCIwmT(VRTcxEs?Zvj%e})fA)x1r!+vHA?;wAZBc_m2)6^%!_w`aJR z?X&_>>D;5w-nQeK$sg1PxU)>Z-H{QLHy80JV^Tf1B zK<+rBEYjUTC|bx!xv-~SxUwd z0nILg>U^9ebriPeDah;HzdZEK5(ViZ^323+04uc=#Ed8@EALs|zViE|ZYPeE>6_6( z8C^mudo?>X)U=QTnH0urpHSX$o-x>qC_lnFOl?p4Cg$ z(9p^OWg|6c2{n5KYHD2o0i66-*P*Ft^EUoo^sdt(w%3Jq&06CWNI(D#bg9v5rP)<= zryFySaZ;rU*i@uvBCPIPSv>8{2`(dOY?L5p?NAhxb!9eQ;Ic@-p*+A2LB}**(GPit z&~}b!Mv$eRL8whNpl97#$ie3{s!=NZH2O?MZQ}s@d(wBkKH6eW{dK z=Q+g|#kplbgyNaTwyUnpX1Tc3pJS3p6^071d(#N)9)gRHZzC8_N4ei3A>#*aa(t{!rgZ#yD>-GeiXkXgDt6;zps7S+duJ8qysrDFK{-YnEmfR(C6o)LNLpBNV<|MJth-!yl)(%aTV^9%a)m_74=-qaZyg))xB> z4>Y9*)G^oZ#c0iHO*`UFo@(!Aax;i|Kg4BYJ4yJg8Y~wThTL@^D}n&rdHCu2n$9)O zrPR76^5KUfOJyhcCAYl2>Fx?p27g-g-Wqeq>Af^?yzD9&)<}Gs;M386+g9z{PnGo; zSU|`~&#JaId_A;AX%54V+S(&Lr;;lR@IO*&El+ujjyb;<8Lm*lBx+aq!lxk>(k#|iiMts6?}`>b1nTe;~- zifzrS2y)nlqbakDfItdUo%uNX)`tz~%bIN>&!jZf_R^tyzeKy+ZfI;eTQp+M$&2GmH?2Qc2Y^?JY`&BR2;p{VOIylIwF=aipm$Y1EPByaw&L zI4c0|8?!-*TpckJT)GfP)$mbjl@+E2aJ+KXuZYve&4g+6*yqkhbm+ zCJoURhc>(%ZD`{g)Tc3x=mo)rCki^?xb7mKx$OHc)jG$iB()mjA!_;UHn@sig{e0?CC#~r`ei7g=F|_<$c(UZgsg60 z=Z)S*?u3^2CiY5l*JW}owTH}Wo|vB7=Vi9?kc0@5Fn0NFwzM_oB(X)YuA92XZ;t>HrjmiNR4I__;1q!ICxtV8j2Pwf$IKcBCjNSoE($a0W?GnV9QQwg4RILD& zxaXKk6p#i6&<;;?)7rhgWr!Tq_J?kcAc$^VE)d;mM3>+#oIb`A$xutESR0r zUi=K6_38Ud*IUPgkaZ1_Vk%@<*9GVgDN1n(TGW`zP$$yj3L}6>+5kS_)O6}dx@olj z1{z(tck5Pz@ix-?skTOl?0c&SYtCF1Iuz6D2eGO*(K=4^%O&k zHXoNB?-TlGQ9tu>P9HnBt zVv9<}z74Sp4#9oNFTj`+wMoE662JMCwJwgk)w~inXAkhs(QVLimj+aK8ie}PmZu>} z%TdS$0l=@s8d}ua(s|lKhx}7siH~oSrNaUocHgr_c`B0?Tigkb;3`Y3EAeh^OIR4? zNGFZR8B9HsX@w`sD9CjUnj|@TY7~i5p87Y7C@u|-d2U2s)1}f*KN6ckKZSTsPrsQp zjA_~@U#>^)#leFD9;Uf6vC0CZFn2bCjCu==bnKyC*Ve4tB{>0=r4ouAZMf}}WB8@b z!6{!wBRF2tGJ~{`QUR|$_~|E2%ZVjV!C{#2VwnnMq^Tv@e^NndLm5w)v>_`TU?*=C zOC?T=;+$-w_<4FZ36>2H0vmEw9mf8-hj?jkwAz+8tBt5bk{&BjI7%A#z^_`_A~hYk z7rD(c)utnC%d|7JwEfn5ttqukc}h7-6sMM!QN~Ki9PTH_AG4;JnX9$Lh0T2rx~G)n zrrlQGs9Kg*prf7}DoTogIZ#H?fK;LE4;OVszes85cIKZp_o#|uG6V`;%!){rr`G^Ons z{?O_~u*QsYRa`A_lH5f|cWf7?IO5!S&4^wQkO4v(N{UjHZc@hvwA%)s)+y8G`>3@P zG)O^94clD1B!xWKDe{oCl_am7xn&Apr6>wQiCrPZ$z5z>mlkGvp3kluyT;V2FccYd zyJOiMLnvs0^*&J;DNYg-z(Rdk&N8hjKT0$lU3WVF0DDaLUurxllVYuq1~x24VlX9V z30dB+;wXr$2$_Yo7-T_ag3dRQ|RgWrY(+MWZwxrf7v|s1y zJ$LntX}dh3Dm!zdr7cOCTmBPjPIBVfPQn`<2pjRzxQDD26HmvNadKUJxLKXIMUZkW@!JgrH=jvyM06w%m0(pnQ)&6uiEf*Hm`2*N+Sj=5+&DSU zVewkjD~j$CoKsr4Hn8J?>5?TY>K#LY9)_7|X9Yk6fOe7zJZ7o0US6U@Fyu;;E?Y-? z?S`+^OnhzW+huJ_B=GXqNIyJvs}0IzCs=jZR|UQsMmA?+N&8l5X_IijxVwQ_ zio+@X3wv{)lTt#vq9m6UaFp6qIZAL&E5@o!PA*B8x&S`amET@}WiNc5^@@Pcr8;mK-WF#Cb91PQL z+oIA*D=zS?3R?s`M&s7A{cmAJ2TGE1Rh}_dn=R^7$Ve&5l}j~6vCD(GbQ7MybfL#7 zDePpTxcf|n8XgvaF`u-taj0#VBq` zQ~Ok|KKxad{{V4BmdksNI#pTLl8{u4=NwVN@>amf?3c83)ReY>bI?;8b=PXf)noIj zBBwrA<%RG#6#ffq@`BQNJesf+o!T{BJ9!Ez1wak~&06J4ZOFk&S2*oh?W*MDRRg@^ z?^byC)jW-*XQcy+Y|$Aef~HGx5(=9qt}#w6S2;=v1gHVUSJ0$7uvPu(vkx}QV1tYv z)MyDeR$E)TVHi>7km@Nx_LaAsCjx+S;FH2tgVTzJXmQx*Zb>4qp?a3tQgVBYO9=R24;asr%edMP{$sUhc~XOKuPcN;Kv3=}x zq_pZfXB4t@4d9SFWYo!GYiSq?HPBnQC{hTjqU_n&w;NWZniam&6kgjQy zQ1kq>5XPt;ZHgMVVBin!Ml%5CflZo(4h|^BVqgw4MDd}Kn?lwgcLI-U9zT|tw+sWo zqMM7yPAe{79?f{OP_;1e&{yB2=kU{U7bB&8R9uXKSSR5obn*FABwS9!R4alB!APgd zhz~(rDksJ&L%>LX<7sv(jc)+@QZhZNmE+cxV&hV@1(USE_}+MR4Xpfj|Ak)LG@ewB+%bHZIysvK^|B3JJ@PB$@q zRwq?t(o?3Q6lFoZTBE*~pHq9Xmslgv`A$Jf781R`g=zp+cWT!6O)AIHUi9t8YzvDa z`+ILfem)eJ2~!K|T3S>JleI@_Cmz%;GMGtGSdnjA_IK@e3C8hnx;gcX+pZW)sER(d z?nfyoB=1YCsZHSXkHt(hGk3ZAfA=iL;5X;TvCv$01)`Ep?Dc+{Q*+?Vk)F{S&d# z9f=nvEiRD|xWD5vR?BT6861G9=t9WJB%~ewL; z_hkB2l>j*al%IN|_(coLu%_j5c@9Wvn{a($Wx(6;kYYOFInGqsY0P_W1o2wzI8C>v zpR&tslsOV2zLCI$2GrUWl@Np2Jgj5uPkn?FvFTW{;Iv7*Dl%y1^3mM5j7&TjVfM-ra zm8v5?xr=Gbj7e7IFbl0J8?T}~g%tsvr6Hd+C!^?5;NW0WeB6EwO+~`UJUgE5l<`XJ z(qKoE5J;OX@U*D6akRJ-<|(t(l(e=~MleE%#^j892Jfb>ZRw{c*sfv84WYMC=|<%( zDOXT=0Yw0$@KR1muQ=Epw?fn!8vfi<1y=1|Z8KN^7R|Qn5T%32$ZRDn+8pM@uB``2R?a8zTK+2b!5d@p*z5#ZsO5?y$em<>G#*|@3Sgw$gcvn zR8-sSu(SuZ@}5{)4*j|8a(J&gc;#Sz?#1Rkkh1gAUk)h}Tp`!?Fxr)n-wtGiB^btB zR(Xd3 zcuu}}Ktm`h3rPv-xzIt!OmV=)VU9^kc84mAX_dd(x5B2hC7Ex_f)MmoyAoWIf<(7G z=K_)!jHLv$(oQmbw$wQ;aenbLO_?pnz0b13octK#G&WygsW04QE6a|h9PTb9B}ZU5 z_3Z^9=C3fPLbWi}w%*$kl+r$(h~XPINOcQ!OWo6U)fY4TsMrT5mA6Z!s@W0AHsp)T z_B5tS-+ZW}>kB|DAI-u5!W#A90gzlOt+-_vrt&j1+my75MXu0|BKvTbsgo}*DdsgD z@&gG>HWZ|UsfJ!yOCE2eA#Nvf!m@^&&I)6V;p{eGF&b8x(>i9^3Gjv=pJWBbn{Z`8 zB*`xmk_wcel5#6M@gMez)A}y(%C5KcF*@9hh_>5dC3hB-w1S|g`EDa%ZAfXHF93xo zQJ@d zpYd__tls|s`#^Y`5?WXeCeZtCmk-jgh>PB?>Qez5{{ZUEUQ833>WYR~r%BoB?-(^s zki9n!MgZ7AC%=Tdrs*k{8$bSPl|F{kl@%>v#3dVwy+umNJST1jDt#*Zejtag7M*#2 zxZf?wXqa(is;C5j9nYx!)r%FR&X%@zKPTy2m9^2I_)A#`E)krsCyboaTV;lHbjtnW{0NW0Ky?m%N1&|m z5T&G?f!C-=JxRtZ%DR^2;y#0MnGu$4l&O)XV71? zh4SBQddAd94M3K#@uxzC^kipdSyEy-;%#GwuoODZYlYFEi8-n5%fh*rCxY0XV~ zw*)*wU2Oyeme2kgNaaW2AP@7ZX0DBj+l)#9SM4ONWrs4+MLYRJ~8A znx8zXEI%nKIS3sEK4#Y`waApTB~CfYh#QJi?p4@TVfgu!r`*~SNh!}61HEg+qMe!B zW1Ahq?ug_tk~eXlnW(2sZqBl~I8=q4p7edS-OMBDz*4b+jw|udQtlJcakxp!1`f`@ zsi0FzPnLq;z~e|eCQDybw$2uPITd1(fJd0%$GM?dx@_Okp|*$zla6a*)(L9r`ZlZp zI)FJeWuwFOTq1`$#Uic!J3AwOdiv8*m7U)YLXB_;RuUz8EjLA_M%U>xyH<6n}}0u-(XN?7wnE+{PrAkc1~ zS>S%O^BHFs?QL3a{;z0nXx4k!JdS6H4{s?BPV z8J0#5QC2Cg(mADPH6N;T(oXl&P`ioMmBmeh=YJfN5YqZAz81Ql{V+))A&0(R_AG{G!mkSMld z7YG4A+lpdTXOy`+C>D6FX+3`u@RtL~tE%NNBcs7?3N1BY~Njc!Bf-BeZ+p%i9NACzpX-Zf; zsUV!zC#dv&_Q@jQg*W9&XqMKb`G`;WNc&a{IVtSvM?W=Bvp4Yj#M@)~fvbhZhyJAh z01d?=IdN_AjO|GT3Hv3bwZ@R+Dl7{`Ucdja-{7CKbiaHwoY-1*_$j} zwp=f|qQhgiI^zETeNBxw_=TjP7m?E|Z7Ud7-na*~WxDd&s?EmJe$dy5(NQ7hoSPm~ z5#hf26Vga&B&&E%RCX(NLFt;bY8kiNgqNd4zf40>&}F?E&n&qikOmdGmlCuToD2ci z7~`#TKT}%QYGo?aR!J@SKTspcim@z(ejtW{3JO<0!_ES;fz(o<*J#ex>4-1dD)V@n z_33jOc2SDieYmQ0H8G3^;~ufq+AiFSG?_PRjke&xDrV~I zd!jmmJg}$HmjU2mYF_S{C-ED0$(R2C_n0>~?)rLjjro>_VxmI~EF6_Q)PR*E1Oj%c zI3RVenvcVKUGn0m?G|;pAtf6o+Z|y-6QlwhTxSu1pXfr4M+dD8W$sDrXB{@rIoz%~ z*x19b>M54Or76j2op4xaq!1E+hD(K5ByyCJ06F*B{1G-=wwY=y>(e(?qEb>8{8cuj zD?XK_=I|D?lk3WJz*i@d^;VSGV4n^Kmz!p}za1)Cp~Yq^kG2v*A4GW#Z)|T$w{9sL zbAmw?pJ-1Cd`ZxWdsazl7Z#e96!H~yOmP{2QVN|Orqjxg^ngE@1IFav5i;W0>8)R+ zZ5L|`h=+G^$50DMl%;^Qvnph)swxlG{u_ z8_JM&Ek&Kk#xsSTj<(=e^Awzrb6S+k+YI??j1-A6V>@t;mq6U%ORWc*B=ibK)nhz_ zC=OC<&HfW;ZxgheNz=0>{Ukjmcq14)4yLx<^$ZED z#)kg@?-mGKJIG-!t;WDll%bH<@lpvVj!!*nH1jCVld+B#wp2V)@jc5fosD$5HyRXG z;?nA5AU>56kOQt`fV6opBn;q{418LQIG3pwYx^lGfXfa>bd{a0GjeOqLxdi!rQlM$ z=c+rlD?IUmg=*ayI{j}%34yfzWXQ5nw6P#>Q-v+0WVV&M;zo0Yt$g}%R;>z{cMGfw zR-C>*Y?o_!vc{6KO3?Bn$llsX95|AO&JSFLCp$bnqY)|z2#OR}`2JG~CJ2yrCgSx7k@4hSJb?_ONcnmRqd zQNXw&?I^JwOSfDkN52r79b`W1It@aDTVnyi5B)S*2Ng zNQ0^VwJC5REct0j$wpKZp`JGYzB?-cBzlHX=U=9p`;5a}>4Ir@EYgElGdJoc%5r1v zB?BINA*DRp)}GF8&Pn5PG1TPiH&_UnrlUo+vW>D^FjgBFDUP?=N|K-P4>iILcw#DUpag;w_X4m(?!uGZ6z>qVtX(x}X>Cem1uA{b^`XFm-? zGSn0gl+t}}XO2{*5zcrS6`J@J=Gt|WTB6weU#qdSUy5H~Ex$C4sU;*3a1=<$Jb_w# z4;}e!PbXKSKbhn&77vGXQhY%0`&W2=00 z5?NA6k;+g9IR5~+_N^1grnR?Xgcn3CP3k|aoDRr{i6xxxDjN~2& zBrB)igInaCHe=mhNw%e8OH8)noLb0^#DxL;G^piDJ2$CBq$Mci9sv))9W@a)V>PHt zESA)VR)e&xH;v#0k%C526h{DYoO0V<>WgKOFWMQm&mv58A+YTk@89=A^SsUh`&%QnrVn$baja?%#jJT0Ij7<499VMRCrDo zBhrMYe;f+&pNKjx>k8GdL%YHyPHIEMr~+J9l8}rjpGZjiWOH8r)G{>VHhI@4 z<4qSQSTSy`T4R+soun+h^tnKQ*8BickQ`9sAs`GZN=LElMkL%P&Dx0XDM9%AeyQ-K z@P<^c!cNhbFjb6!fNPf@GFV%0q0bVUO`c9>753jOu#~>HQ|L+%g(+%2z+;NiqfOS9 zdvh0UR)oTauo-2y4%ZLBN=F0lUYot>bW4vJ+hvY35ZVE3M*7s3o%wNYWo1PBfGZa9 zFGyVHOLpsZy0}|NEru5U=23yrDJKMvVTy~+hm_K3FZU^9^!i~|E47mC6^Q7D{E(rw zD-P{Wd-`~%MT>h1OK{kR!>ogpwvEK7^xUJKYtL()c+wJ^aJwx`6PEA_8(K%PKj}PvW-?)hRH3&mtDn}G{7K7j+6&f=RE}<~(%pN9k_JHgp0!76(K_Uy zw-&J8c)-A-d^@x)PEInm688># zIM5KHG$qJcSv-+kYt3rSvQO^#Whz)ZaFf!GxN68YJ7Un&sYwaP9jhb?k*F^s;ILH6 z*XbnZIHJOyNjXfpwH3xDm!_Xfpj@s_s{mvJPv>~O8qpu<;Gio!dPZugI5vxOaTs5% zV07(Hbl->VNQ{}YSsRo+B~;pr>>f@!Ld9zP&ZBW{wuOz|fUnT>4Ewu^dQv?DoZ~gB z=#33&M52O(^OY#olTbThurOO~*20oG zeKvO|7_Uy;EUCJ=^!}cln$G+}@ZMu_l;l)&=OA^h7_!!zGv&s+Mk^I;2AW5f z%1~H(o<+zmXC!W;7%k%M?2n2vdK>`bCWW1r*j%hvOK?^8J$Q24! zJhxuty8cw99G`xbZc8uRV_+pp0CQAbDBDYrl{iOg+$?b*rVc_=(uV2mI^<53NUB&`*+K3RBkj(T>hO`>8}-U6~JqckVY=lZc$EjrjOa8EUF zqD9j^c$Y!!DrBq>I%1QxEGUn9F_|FqyRB*jW`aA^?U9OJ+uZe{8+d!w5MtVgS|zn} zid@^nr4ZZ09ePwC!3VLa(a7sd(D>lyr%UM^W}yR;Ds%vPngI`|--A+pQc=lPDiCJU zK{)S6vQXH-IjZt*ARZjXK59{O9YWQdRnZ6XA8EGAmB{w2{{U7_fa28KaQ7`o0OO@u zM9(~J1CE5yQmx@_sYNIU92^7r)k-$0lEk_~=XJ2j5JUy7ms92^XwQ0Ut=U!b%In1o zPnwbf5Tcc&fO#0DcJxVAv3hR_fnjVttmwOWOuJQpBD^_ z+ZozYJfn27-8ALZC}QU#F!PFvxK8xvzIe~gY2F@dIQo8Sn7Y8ZMSfF&2?>yg66gu! zBxDSq%CrUeLT~og<=x`8`BqEmSUq~-KF8*zvUp)=)6o{T;xy}Q6*$rs-#7EoD*N3fn9yJFetl5aNg-KKV%m(~V5KKAnmlyj!dc zUB>p{vXGU{ASVU3l;T#004F38G1Vk=qjXEQ`HiRQo{b7wKmPzynQV{-4^(?+qDn@4 zRd>W$YmSq7Uap>?hc+~bmYAE4F3Q&Fb8V2#L;>d6N+y~+ieEpA+S)b)g zB?%;!+GQ>;oCet33RB7Ts{;qAr}*4u*+tAess(84a;>UETIyny3=b=#uC#DN1L<`QZ|5blbXP;{{XfOG)8&b4W{>b zot!Z#_>kd8Mg|5FQ1jsSa_r31MxD3o$=Qc|wOk~AcM#DHRIn5ziZ1Xu5QEBKT)OGle9LeEh+RV zUxfP8gpe{a4<#Uuwd<`ndbH1JtVu+;6Wa-CZJeRa&E+a>S@k5IQge`?IOO9DaVXD> z<4n`flx{A{j=S)}1=hw`V$6v0=d`isv;abU;-oE09aNpqg{?hFNjJ*XN%l`ln6j^ark2|a%J zC+=&?W;fAYtU7}I8S`~{$74u}jD<3sG(5n7eMlHWa(Kdz#d<%%hg%jeqg$bGvXrEq z#N`=MK;#YvIPcDDSBWh&43@{2wl@{C_{;6BJ5S->Kkh5bf3&8bm3Xrm5u9m>EfgVs zX(>v9C_;ffzYam^kdk^=sr3?8!%dcwR+mcfM^He>AKI~>6)g^2?Q&W25czeqx9dnL zJ^jC=e_(r4yqJ2J+9X{x%rC+_v{z)f>z3(BklVph9ZdR9Ft*6Un{qu^ zt6zwBHX5{>tQk@RYElsDTS?1fw1Do-tIv4bbVbP9-RC?k=Uk zH#m7pi(}QbTXK2fVQCmCNj8rZS#sHAl=2qndEg&y5!L}^#O~Ow+PB3em z2kqj=xLlJ*^Gn5x%W}L?zA4Ei_Kc-A(w2QeTxCi)>UQ)005wUZZz;3gn{=&4JHNwT z)4>P7<;5uR64I76?dsWajw8J$Qe-8?WGSFY3P>IBdv!Z;NF19o!#DSP(;0j=e}wBw z-j@;CNk8a$9SwDSIV^CDQHRXH@=8anZ5~A1dPprgox8Uc4Wu65+nS>rR1H6JnEV2? zw<$pRWaTITlZ5xiO#y9w`(e`-+w|FOe(j#0s6taAm91p{ETJnaNBv0@B>h13>R@)? z^)IOBXpb30twiyh`bBjmk~Et*IQ&eCO)aJyanzn#7(x`M6}!-oIX?&B9-o~&@RLg1 zwJx!THOOzJ5*A0L2h4YQ8`h8gAQC|1=78|~!mUrM?`@^)EU8LbL&=WPO4PKeL=TJ- z2*z=m_P>Mv3q{jG+w}QpX^ z!_7)*`0q0zwW-9GV>+OHC@t^wZ_1RnT~J5~+;-m)yi<*$Z8s(wQ|~tlFvI}?GEAj7 z3L9-jSqLgv3BfzCufhVW%w8gRvaLNP+T=?RzI`!uFBuHFq{^6$ic*qNu#ki)U+Vi zO5{V~ABT@dgv^iKS0_BMH@>Zm$6JcVLVUf>x~v|!80V*2O*TdR+oQp{u;E%=Q=;1m zzoKE!H9LSy2ni}^1&pm{oM3$Hnl=To8eRkkQz8V(VJ|Kla(1EOP7AJ+jmhKElAwXu zD!lKTk-=Bo--&W8R>*cnSbg0sX$IF6A396S3@Lc!2up_?9_J*Qr)&8C0KILFNNvQF zslF8Yfqk@om`B+led}1ZM7SGNcvl%yK8ckp3Xc^5riVX)!+=Tm$K_QmeQ0^e-$2Ca z!V=Usrb?)3vD07 zbt?dze2VB~`lO>DPacbpil4ehNpA4AT%D1kTic3;M%JPO>RI-(*iEw%) z#^-K=;uJw`Qb7abfNONr&$nuBO5;f}(1aWyFA7olJx|)O+sR{dlU;NwL&{O-Dp~jg zwO5UnQf;;`Bv-*(xOjZpoC$wg*it{0212<10MlIB{1=%i)jbOHW|R*=+|+~r0IL+P zZK*1Ca-=q~{{S}_u4CRJ+gd1LT#sX2E^nkMCGuEW@as|8UsAt!7l7mVE+J$48j4J* z@KT{R-zlJ;g-omd)t_AN^4fh;Q*n6;&Q2U4QrG-e)r=&>w_YUSLma56eukSL9#h*V ztHmF+KB<25ayIzOBonlZ^IDdT({6vVttcRcnFTG3bpa!B{b_H6_S?RokYr!&&?T{u zww}lLj<2;*0<=F4GhW}VOBSyF&#S_EIrOW%2n^yt2S7}{5X;cw``|r?GE{t z+tLtLG6~(#R);DqNKnU`;7$fkNB~kDQLl7$wm261^oZPJ4#x){noVLl!^QIRjzf)c z`h;fzV<`&x#a0+kBjjAA80nhi{{Rzgwh-bIF(F9@kZTgxzAewc%T!x*f)SE34QKbO z^HV`mr5(yR#(1LieS}=(q?LLyGEaKxC`KD*S2<5eXAOFS;Nn_gPdMQtftu9x=9kte zVIE2rRGsq<&}iv)d(E zm@nMi^Ibfs4u2{h@p4VcB@QrBoL~Yf-E^02T3m720Ark1QGM4C8Jwu`n=1h0r6H#$ zXkwFnm2F?5)jhR1%ODO<;jCu$Zj)(}q&V7!4_sHInbeVH#`4NAPrntP>uoZ{#VU~4 z>COPA;>%Brfys*1#K|G8QWlhsD(_>pM`eKO(g~`PVXEISv{G=AaB)ok0C-6#@>^Q5 z@M~k*GT#@n%H*{^{{Yi42GP`tQ+kM|_YujXED2gwz0s1cNv0FvrgFkZ=6$F+RH$Wp zY)_{Y^Dd=C5-VD>OqjUo2~vk0s*?s3I0;A~9DCKu#WlQV(lPT_H=?2ks99R_pkQ64asBe@-Yr7?GRYT3flRg#bTV zAvZ!5f}ihIgi1)kD&W+jQ2w#U_Nn;TlH^*Eb;0~3eCDP_+yjk=^{SwmoN_tunvhIT z{$L5?trGJh%egMgYA*n1fyd2Co7V&c4nM_J6wOfeZpj_9U5UyVA%8t+mzNULWwc(_ zq~sw#nXiWIw|^3aRYdh?spsd4`}@IJ{73bv_{f&5wI=9PdZ#rzS2dHuzbdDu;G#j# zPWhr*nn?#B{N}0ou|%d_qg+%v5t^P%#Vw1KyINuGr-4u4pY2rnJ0QwRE_TMRjFGq%Ef2kvtQ)9Fi5cBo9%mhM|t? zZHD&6wV2a#RKQXGBRhcHP=^6R0!BTud-F+jY{~YdTYI<*mo0ePofanf;=;PHXc zBd5(O@z2EzU5ei1d(FZ-kx zFYkR${r&0E9U%-hw$SXh!P=7QW9?8%x`DW1An*q!ov-!(0E-qRN7DKWOPez)ZOIL| z#hz=O1FJQMig$<;^tFNnTXJQ+Ds9jy*5j-v(~!v<3KEng zBrhOs^r!icp!Ft;YWryBDw}nWEz_|<54xauSxLboo}D?*u%q$udz@MsIVi48+5F0r zVx6fqtZOCjR2OGGE#%5+O}XX}lAW##%)GI{6Py%&U`!mDWWe^!*?Jv}sS*L_9d2A6MI-l0sm53IH^ZnDKyxTT;{ z*Qv&bRFx@5l6nqDw}bpWys9?2nF-=j#Gg6TjO8j=S@nKk^Y*V$UiAalYZFR?>S3^3 zLR?-|>Hh#AD+jgN-?eL7jm2ysQC5@mpq;j2zWac=8^(c=3{9} zh~pqDuYI8RrJC>uZEwqMl0)n$p|%hMjwuPs5|h+mC9Zh)?_OZ=FIvsgH%5Fn)N>t{ z^6inNLU|HNeQ{kuQdxAKasqM)$q6azUaeEH?$&3nIA!ESj*y8hIHuF+2PsLwAO*HJ z5y{RytBaOC){jQC(VS`FHsKXB(Q}v=5)y5fh3)maMi!|EUqTccNLD$*F@RHBH-%2I z_1Ac!Q7#!yg&{tXpEo;(c|A`(2u^DW(OQG*wCu~nPd4gPJIQiKkfxI*=D-{_bF~Fd zEMRAHKv!If)-+Lhy+H~2K@2jd#B>vl+tk8IN|ZXC?k@mwz#MZ}JUsQ2i~d=&HXU+C z&vrtjGV4HL#+~z!ukh~VVD}mR^+(ipMjf|37Dx%o$Cj*~6f#Ht-WuDBZt#B0N>)-G!Z{D0kQQ(vl zydI@1PB5g9d7e*?C6k;wHQ}6OxX-i2nzK#D)K_^A&T+OC^$e+R7K?scD&9y5?VR-O zTL*_;2ii3dFd$i*Ewa&3KnZPc9Do5)3iS|FbDpUkGDxIqi|^6umu5KWxS=dHF(EQA zEmmjmpUR$0XvM zt}wWbi-e}r?0*OLJ7G$XbhYY5TNd`hTrpPUSB^7;tQEDmpXFIeJoGpROVJ;){h*L)i(GsOgo(0YZHzX#Hd#%Ty4Z- zAujGt^y9Ktl?)%nL+Xzkbo^g8PhDRP-E+PZ2y`Giq~LB$gzO0Fk?JW)2c9VL=Z_m~ z%QZD7k{eibR4Zdq+kt435gnHrhczc+OhTZfq0q5{MiaL?R4^)$tbBIUwpANMo3wP@ z!%ieM4Gk(nRWC6GiO?|LjG@RQrzrG_ydgQRvY{;DR#!JCnR<_cwHN-TLl7b6lpTZMG zJgU8-lr7ULm+unpSGF#0eLc~lmm6V5RL9pTrjhC|wA^JOC`eJe@0+xzBe448e+ibGKuT0R(Yas+E3#36jl=~Sb8^v4UEh-3zTIuJ!HV0DF7~D! zOS4oz5}XL(w4p-?2wHoc$sG=u=ZC7cTW!}XVy@v0JM`~3#EC7s6cg)eGE@?@4Y)~h zX+}vr^f+@(EwPY~*hi~1WtP(pUE=Ynq{wlF6i045=v;{o0(Rt&N>RwmXc+(!c4ukh z!&utqQ_hQjxIH|yl6T8~N|3djaF*IU06#vIA=6riSj2IgEuw9@-35fQB};~w*-1MN z#34B23jQ+Ac*0UT*PuKe@POM|W%g`RCo>@-!nuv4CB)+cm5tfX57>^i!}GJl95}Dc z&A8(GeHIY>ac@kimnljGu-HorIRJIZw8$t@Ty0;%Q0bf> zNa@Ms)syy#)H5vBwOQ42Zre3&klF)?XM%VG2~x3+;0p4Ut3Y^>aGs}M)QgW1#d(W% zJe99@VK;D@4XuAwFs) zYZ*$1<|On}Hn^n7T2Er*fhXRXYL6SB>4YjKi?^mr}1)F4X{{ZdA9(up)kbczI z__2hzH&y)TM6)?BYB34W^>(Fw4MWqnSqV`ED2)Z@0>0Z)b4*jsr`*3K}CNni8#0l9qA5z6orT266Lnwh29lZ^d3^#o^M$8+Vf zR<1kI`d{{LHu(Z>rn`M=(03*^s@FPN3I6~ajkW!U>s0%`q|;!zVuh%nSncap7Y@xG zOIydM@uaWT>HGS9fT=BTZH*;A?Df`4R6h!p=qK||De{*EKtW1hCbOunI(JIKOLrYp zBH0~)Hb!eqOl2wjo+!!rCz{SKo+fF_i!1s)Q>fo>Bn3vYve?Jn$p(Q&sqqQ4EevaW z+ijzTHfK1d&%sKGPCqJ|*R7WGf*tctY5UTZ!CaeqT6^vtW6N0kl4*GH;#+FiZRBpO zrr&i%o%V@TEnP1?3$vp~2j|ez$opjd=`3}oVo&bR57>8iM`ulv`o8!|n@|3l+!JQ- z>hz7Fnfi87$EET6!Vy37q$T#p-7Zs@Iy3h?QF4(EJiBIb`@#c(_`v%Wj$1zq2|pC| zh%gO(_p;B}`#s zL`6vj!ZV)rZ@65c#Dt)NGq$M~_~a|iD_@~+bP!9A7Nn&B*xamc9?7?a57ygs=D#-)N3KWB zOqh!4gH`y(T_P~y`Nw{1pt5iWKOEHQgmIENsgZ9d`FeN73-g(D4B1z4$F&yLN88|0 z4O`B69MNst&H&m7pni0p$XrtFfwef1lw+Fu?aj83O7eavMP^R{F!C(B6$ zXPP+qO_3yy^o`vqxus2i4<`rM*3yfcs)W>Vq^XgGE7m|Yi^R6E42MHOBzFd`Hp{~e zLBr1$)Ho_%L-|)v#>>x0el^QTy0H5*3i8R2+aq#P5~7r={c4k}wFI{32+ny=l9Jlo zDOMRBzYb1*)yzBlawiz_pw2PVosPDRsB`QxGgQA5rq8p=W>ump5RihR^o60MpbmK{ z8-e!cKWcJLQVmi?fz4@bv}t#)*UOL4qTUwLRHP|IMDhxXQc6ZT41#OL_jniY7j>ga zoh-F(TgS?N;^=J!Iox(2k?F@L0ORjhKNRk_L}`~*s5OL_W?UaGwE?yfv;Ji%OD+$N zG5XRE1oVvf8j*MVk)IlLH`+@L7;HXd6T*-~s89*ydcTx({HXl_B2!`M2Msnszk>} zmdQZnwIL2Sl6nOYLGRMDs9KfBrhN^u*seG9YA;ET9J<2}7Xy_#Z1% z+nv4GA~5q3u;ZnqlJXr$19DW8v<;vhbAmWE;?&$O)_b#XE*7Z<;9N*tbgO8XwJRQb zNH|JXzNUUs(s2Te8;3MDt*)!0}Ww2PEt||x^&MSt9!Zr z$(|!MYL#k*t|ZKchEmatE} zx1K8zQeQ(Zge@8K6}tr`K`J8&<0U0TV2aaqM~0S*E}liQFlDBpElOdwoeKJOEnA&H z7~DbHQU-lW3qFyYV$bJJn@dMW#PD#!OLor&L4|YDjzrZLDbie@HdF23iJ4YD&K$LY@bm9-V!i6LKrrL^TF z5C}-*bW!P9{H@}1M%ZzdMFx*&*x0yQ;J0gW=@Qd%lGNks^hhf}X-_8^N1MV2@$Rujh17RA~Z=YsfBrJKx@lh6OK4ZN>r`Hl12%}YbK8L>m95t&24Ql5lpn1 zFNHSH-V%f%NWn(XrDxNU1~NIVtHSHayFoK##ElWi3rCvXHvSX-uJ>*Og6FV)vS+SW*WGURR86Dk?n_gYD2)t4hui zRw28}Zd}7Dw2keI=W#u`Yyi89iI9kxQ&lZVJaz5#e4Blqd~p1E(5D*IG^{H zz}&6qNLbtVQZvBnQ|&}*zXvp@h*DdVe}NV)!YeJuh^?y%X(!aA`C-7470!2HoaZ?d zG^ZL8t}e_6Pf5Ew5-Yc7q`T(FQbOFD=C;yMq6QGO6{Tl)s3BgTWQ=CdFHYDmt~zcF zYMF0a;H|fkg6ziMK--qWRJ67fatEjtV0XZ)-G}yDvs$JXZL^F8C1kYpc%ipi1mL&3 zaZ1}G1Aq<&Jhdi>D z&o<1_o*LOLa3IEhb(J%arT*=YW`?KIK>)N;8{X=aLC)o^hf0-*jwcrqghmuij1}x8nn~Nl!RK zY9OV(--&7|IrhjELe(~HEn~LNgmn9pS(3Kgn)z+TN}QwTukA*kUB3r^CK2~w4~9H}D|vTul*FG`|J zjUMweE|Hm18+5Sav)l)9FTU!_zD^QB*c4W!XB>38YCAH`v8Yx%Yub4emK>5yC+Dng zZ<2UQUUY+iRm7}*e#a&-- zad$hMH!ZVN6SfF*dRsjL-g9Ekamg4JscGL2Ef%{wGeTn_n9IqZC1N~OulhkqCBg^t zpeU2=M%~?%Ag+C$*xcsQar@^|KB#u2e6=5T_x=GY&-7OZHc!M!I7Yv({3`zaex|5N zEh8edcrN|O3B;eHj_2{@y|I|Bf&T#1eZ_42ii!3WYK5lsR3_B}PugbdS*<5}qg$3@ z+u0q|?1dyPN%;Y^{{V>6i{17zt^(nEx!v`&ws>Ke5^dIa$Jv$>hZH`+ocz*lL2vP^ zUdC!OHdh<6C0P(6$4pCl20E1FH=pq>N>|NDym5PW{{Wxq{{ZM?jGDf`pTGMEd_~sL zLsD(G&2J9hY;8n6rt~8r<-L#Pks$>wAD!;Jek&@C;vAT&W*&pnakQLgRkag<5^hRK z>VBa>29N&$qK5wFwFq7Z%Dbu9snk)@hO_liEKU7#n;rbew<0rNbvsvW9pLYWkzYuG zp>Cqj(%CM#{{T?}uzgEq@Uu?^rdF`F-U!JlZt%sbIX^21 zdDVVfYW#RRx4RcPr}`O=r}2rq3lCa6F41vpFkJMiOqF@EpZRfTl^=p}@mW-jHR6|u z5>+9n7t6wwbN=rc#~UAQqi7Ymv#AgyV?qA__(N(~JUWY#CCdK*`_j^*pOK1>Q9L%{qX9+|tLmk9p*dZJ2;VeEfO$A9pU6gz=hWX&lb zP(>`-CJ>{@ZRG7(_{iYZGUcJQg`p2Zh{1Ia{{Y5e^zLcl+j37|f;kuF}clS391*zR}X^i}ml_vlU)Se|=qEu&Yq;FR^(Qfg6rdO7;_U4eYf3sRrSZ+AV zKbvh8W{zgHlOXLQv89*28i&J+j^c^Ncr}^HJuuW(p4cNVL7K$TLMn{EO#4&E%4a<>c4Q*s$~jFT*VtMDx6Moaw@5J zfTb+;_@=^}*rwY%Yfw2Q;0m+Oe=51zZc9sPQ5er6sWv+b+;}XcpKR69QQM0g9C8Qi zMK+&Ag85@hhM)B-HI^p~VAiB}8b5pTbkqrA~;c$OK^2$N=@J zKpD+LhXRafm2gG~s$Z#fV4;ntp&#jAR?32M-g%>g zW1@u;+FApYf$g64q7tENR#&7)n6{44t~znWR^>N0>Jpi9)YBzdARe`%NKR9Run8XZ zO4T+8X0}vHhB^RoirwSq;Vp$0HntY+bDO)fE%^*3t%bClr9U8^hJw~|Viz27_7M9L zKuQOm0s_Y&I4LBY@<s^?~)k!{# zQ+yFol@Xm2X_FrMxemaAElsB0+iHs0+;;_plo5|#B|v&f!g%L6iuJy9=Ej-5!n3mT zf$5I53Og(%BPtSVY9^)CAQl_@-hS-=Kx1}jSDg-2?p~8{3gp-UB*QKft?FXl1-5(Zv zotbeUZEi2qxZ|&R=K)3cm$kn!Yf%V8VOU7yg>>_-oYmHASJYeW@M9NbA*jyWA6rOU zsRSK?ASLxUuSg@(MK6E=Y`|wAe}k2mzDazJY3MJzIl(SO^`UJ401$E`V3xxm0N86GS=ypWN{Lg4k}kd0EoxU5 zh)@b`Hh~se-AHl7l@LFRAxQu>Ql$XXKrFu?gr8A7;VDX#e7hdwezrx7<%#)E!d#W9 zPt2a$+90O|l7?Jf&Ba57EgOLuJ%;C}wNt3jkuEH?Yh}x8nE}Gu;jt+}C6$=%6(U2! zC1JIkf>X6BR%vo_Yi~oL#x9cC=6~7)L_mizH+YJdi4S46mgdJ0k1Y8qA$d_+keq<5 zdPo?_q`n+GZ}9t0D^^XDH4VOk9ZUyHCOm`yTS`Kcg)b>NSEyk?5I7ZJ)bSJIXNeE9 zK4Vg3KRlLRTMnhRV<&M+)SbiBr7WpMKoiiM9Mc_Z;j7lR?T}?PHOpF}u6o;y_v@fU=;k-6|q zvLBH)-IB{}A+Ul2iP@Eeo;I&xBRvu^Ugq#7ID!`3=8zv=)uBimyz-QUm6dai#FCTj z54~pkZYamep1 zR`}>)DRo2CTm>a5a2#{Dq5D^P&13B6VaGhpaaPL4reRk3Us~HQhNZF_PnO2yw2v^T ztdeoHTqQelK_s4)UTWwJpE;!Y%)GF+{IwD0I!50!a{8qp^WP*KRrggyc2$w(C(CI} z!foWVrvCur=Lqu?jFgm|tB$7zs8_cWCD!6YxW_~$5DZ1Om3ivQcHj~Y2*BiibviF{&pYQb zwJqWbSW1q?ERN)J?r;J3tYXoxAJeFitIY8p9AIDWrGU-IcZe1H?wnEmHra0rD zP>>Ijm0+Gf2}vV8`Rhf!e{vdjD2CFZjjkz5P&mdh$i-GIlNm&G#y(0AzI}M*UH`@2*&hjOKQxXWlo#1Qyyn5wH`y-qnR&SREd7v> zkVyDR{Ho~1xwtHuc1016$h8;(wJ_6aL1bf2;Z z-jV9fPom?XNWN*9krD>x+}1ZK5@jfRw{LBEPyYbBv``lvPinUplcY2-h==kXlBsJ$ z?|;tYWiB7LnU9L3iFbL|Kc=&2`y>}sbLO#Uj`H*FRN_{$esh{A<;1lb{lEAB02C5Z zdys6O+7I`#n$y}QG^>ImdJxj)7?6^E(Gnp}Kk9K`HFU7M;%&wvRdbTkV=Qj^RwKHE zkB$}3=bArAX&(@pLS@Nz_q|`z&(pX10FhQ(v~58a5|=*v9u5cNmw0WWKlSE{S#nL( zZoi;9JoDn)cLYDNVmv~l%AR3(81~5P`PZCv^^;0ULfpM->flOu6=X$XNR!w^xt)vt4fr z4k?&@ZQRR8}Z8>L?vdtCMZ9J)c>D`M|w{USjdJ+-G09Bt-NHOjO>)-6T7BUh@ zSrF$kqCN7D;phIGd{CCnQKexO8NVQ)k!ys^$gsp8*(Z&X+zU7JwCq_UA3T7E-_7*YQK zm>T9AFEYYf)S5OlCAyrpwpj>6qxze11fgT)B+^U$o$F(*$c(kS3P{}9S5y(zC*)Tu zDJh%f?ukm&o)|&B?vl_^h&YNtSu5j_Ys7eUd_uw3I0) zY$`n$t9`ejYrq7f?G`O6a7 z$wIyl^sC$&ccOK4t3!>mCvp?VBc)%FiaBG;8Ca(%J~BN=3d02siHMYt5_fIK6m|CH z9&AAlJVxAFMgsbpu4#MqzTs~}hCn-qzgn^|n60v0j@aCJ;aEyMs!0DGqL0jzwzAvo$U;E+7RFR7>@V_|ge&1f-tAnU5lnEmqO$ zw8&mYPAQx>g23E)p~M9Px3T`zRA_{92TJ+-V06Z8s1O0pYIgJ3XB7)g5CHe-MKX*W z;+7PZY$1>_(uhtm#YB`8`iJLVR-y(+Tvat&1vPX{kO;}*r$z}J_u`i&&C0=};-P-T z@j@DE=slE$r;ecWMW#OBGh9Xh0CC8s41}wJjyvL|1mu@Q4aVmk=+>D#ax1pmK~@xc zpKi3prcsO@^=cqF*Fy5zfyNFit)~PLOq9|;4tmhbKugU}W=O6$wwL zZsGCoM>4>`$l&%gb8Inz&~f?EMY5ccnrhuUCDySVBrQCGM@r9U%Q9V6y6p18(sTSj zPEWq-^c~71N17o+oMSnxSH!(8m9qS(ZIJ3lxW=G>Dr*d34 zmYZ8lE>KEXD#B8f;QDwx_RdvTZ`tzNyB*d#@7mR=qEsTU-O*&J_YMfbUKEyr&Tte} zj{en(>P1TO7jBagRCT=K~X1F*+a?nx)@S}*MWi=)>AId{pvbbcQ|f*8pt&_?IS39dpmy!a zJ5r;9ed>unS?S>K?ps`WVqzI^xb!DdnUkGSJCayfkl<`&D#hEEicCAg8)DoZ$;-r$3o^Uy)&glEM?+Dhba-vC7UTdh4 zbGkxGBC8+p${SCiV1J8o^m~ z(-LXXc(vT5TVPGPSlEjVrq_3MElRc&0CzUm)4H<4ittKy`HIF)&~r(ymX%v8;rRL z2uaCV2nQ*~Nk!Ib=K5|gmgZHg(1Wr#pX5@w4{XR9{}ey?N153Mu%atu$P`} zFv`Mrjlg|6QNo5m{$i3n^Mlg7pRBZlx(afe8fH{BLs)J15JEz+%1S{bdtjPUuNm)9 zY!R$>#$zegnOavVYTlJ^$xupv4s(xUF^cGTFP>js^wE>eYaTU)u9@#$PvS+jQ(cE0 z)yJ5r36A3mQgM(}@^GWas0EQ#i#D~f>Lyy9ZMn4BcMlI(Uy%2f3Xrs<5IrZM9d}pr z&UcMoBTkj)9ZIHDrG=;^DN=U@ZU>M^=RNz8(Bhu>Z{q#uPHNWX-mXtdW_b-djuJ7G zj3qpOG5u?ymn>Z-57T88kw$t?lsQ40#t&KIOK8Lipq5^ zvAdzqIhgIZwu#z-z##BX+>dVbMZWIL=ln)3`1OjC~3et%{GT@@FOHCL#}`epoynWlrT+kWxWSp_=Q|uiZ z;+C|#q%d03(3xi*lBSz)IsX8eN-96C4<_+>mf2yudSkKuZ#0w>{{WhmyzG&LJ$*xI z=u+Iak@<0x{j1D*K0=)BUH!);N`tO;EBxi9%}J=D!a?L2a{|^r$WC!UCr)Tsiu0ei z&%HbB+s#F9{{SC7=&@U7N_iK1;i8H4@jB=ORZNNYCDB_FNT_^ie{{UiB z50eJ|vR5gYPAlE%2_*jj=2d38@*LaO{{WZEv^Ru_RyD5lv$J;Ik$=Bsn7wYR$9Y6jbUK?dOD0(YDP@I3v&gyS7 zLrc$@2JvE?hj1N?i`;MG{ey)EKRszJ`$*lis@R-_-0fw$2jq~S;9B!gY7 za^brzLA?|h_bpRzo#fj!4dxQktQOHV=G2dZoaBCDm|b<2x@Dl<&hFLik~xicirmB~ zdlKSGhujfYi2CDHOO6@87+Qst3odtcA@Dp=@8q&!u%*&Z++vQN{jJgmCSz0a$*xP8?MK_NeMED_qb z8z!97z9m>BHKFeowqL3>xehqq?V+Rn$g0evl04(tF{wrIFIOEoBTmSA1=c&!B`~e7 zGV%%*+P#Q8id88)SW?rqKz2U$bB(PV)Nz`#_<+OLOL9;fW?~5X_!#J>)_)oYcGr+c zLX$z>Gy~RnrhGe#cY9Nb^BR{QBh&sg9#Tg_I~vOweaFjsGN~HU>tb=nT8N1do#FVa z^)}$ow`1}tMeW;cnCQIO*&-vy1t;+`zUWu_Rig8!*qVn7bSy8mR;;>y?`@L&S&E5V% zzsj1^fz;!UxXlF?@iCMoNpzOD>S(7Y#F9c*;M}gET#CyXIK2kr$sT^zu_B?xl%ons z>x!<}3Hp+xk_a`Zc&Dcjrk5f*HXn5K`_@HlmaBZSmk@Uoit6K+sgBW-QK?floj8!0vtCK1vAc8AAfXZhx=eB!i0+~U%rW=8WkEHRFSspdoA9kp$YnBkF6ya|N zKGj*V&kQ}ZsAjZ%QDqlsYg}~IJyCj7<-krx`xS$dRF8Y??qazsq04n7-rrut_ zp_)C(*e;@|vqza3&J+O0y>lT@`5^+U>zbCqSYO_PdiZmIX^Jo41#Gh>;FkS481|^; zw-Q=%K+RRta8s%T9(vOll2)(+k2NTy{^zTVWGP)LcA@YvoR0KKWFbXlb3`++fPc5O zcxwBK>>a4w@(mD}r78CR06Hy{Ax9xUo+zZS9Ai7ZT+t+od)TcbIY}K2M&nA)A9^k2 zG{MT$H&l_-+<}pb4mRl*PrraBBND8hhMKa-LPkYG?1;=PBq$7Z&w7;1ZEgB_!0pn8 zk}X(57Dpq3J5epa-6x)eQjqf2Mh8kEmsFF3!4xZfmh5oUf|2S69@KMlc7v1lKlh{# zNZL1X?_W_k0m0ykk55GT2g`jY#ghfdh{@gBo8UJ6ymyJDc$~61l=_ysCn`{paIBnX>b1k zDy?e(%Um#L3j_xWZIqM2_fQq5 z>6?44Gak=qyy_>AfH$=o#@bYr_520!P@dT%r?qxhhmu==@G)nFQYm+R2)R~ertx)@ z+fKfeMuyXcl!7=4SWhZZIXM2~6-$xe4ybiAq_L};vNIuRad4o=4m{|=+7_g!6yWo> za{Kq@y$LGQrq?A-$(+%Z82S-1WL6yt$MQCi>i2zz9`woOwPi0lkiRS^c_%pmNkTH%!2p#Iqwk(Zq@6huGM$3ning@tr})No$=3nT zUJ^lH=|@Mf+wF|(w`uzuK_>}JC}bpOt~V_HpipA)^5VBAf4}_|EU}$Y<~>+00rcFN zcHX&;9jwZ?#6vPvl_#97U||XTM5w7iq;)2_vR-XgYjM=NiD+C~bK*;r)PTxR2Gj!j z!0FTzxkX7vc8un?EoI@13$4CXsM>DC`~FJ9NPUkrJIFXsPypwU2=DJ$j2W2vc8wn+SQaif|t~^xB^^S z4)+G+;DS_~fH=q^koarhh4$Iu{kuwDE|FcHqUW9Tu;Z}UYy*<4Bw%xpP83Eu4l7CU zufod*hIV%BR&~6Uyy24Kox|m7{ncQ#beAv$Rle=S~3cb?@n?K4sd!_;#mIxbl;)OyFOZt4dgq$DJt_4laYbfo`4D{_~0u*Xu)2$I?YvK6_-1s}`O zKRWi$g*r+ej)2>%j)!KvrrIVGi1San$xw8-4p5SMjmhdcz#Bl#U7b`%p|sm;LYQqJyjlZ#vDlCRBkXELY5*fO z32;{9iW`9l{{V(DkO24}(t1)SZnI>(>?kvUk&U65OWK9|1mFTW`-*m2#;G1kc_)@v zAPcp%cWG%Z#zV1Epr(@QN*rk8f|8xUk~dPPjcK zP8U5x?dE3{!*fl!D~S{b-$}?rtonz-hXjy4s1@8$kYAElxMy-w;7|wQDja^b#?R!+ zZr@?jN|4S~^_ZZkFe0RM!Vskc`l}z>lDny(hgIg(rz#L^T00P#*~ZPNJh{{Vv1 zML5VFT1m&-8T~0scQiVzyA_3=x!6#DRFPP@N?RoshHrL7W6aZx=iE$nKjl(8d$hOs zY$N{w^PQ^S*V}>)_GC1D(6p?6gaiHP88>JnDQf7zJ(DDPB>w=_%70QSE>Gk8FZ~jW zQszJ`OyWf)K3xWko3cRx2~tn#K4^ zFx`~wB{+Qn(SWa%E24+UCb7Lisa%mPCFw~Lni)%oCn_K7T^}FAc|FoZNZBQr81n+UKfHkh?^@r70oBS?4A7ly?hY;c8Agf`ThQ zi>z)?rWuy0YlyWcW8tiMZw}$c&Qh>_lnA5aKVZMF>LkS$8HhWPCJ<4MRqm#N+}GW* zNB9MGV};{gjEKTwEP9n@CCUJ^Pb8&Ivf7MgA7>f-O&^qGe}Gr(LUp%?(pPS3NnMuW zHh`^@Ezd9cNe6R(7$GX>wG|^%T^ebW>f37<$lrmBfOq($59pGPN&f%~fS-XxTZ>m2 zetwj`$xNJX`Zv#!C0Oj01F`S72mCb?lyv$VNufa0T2gZSyN-%XM*+f|wN7z>PyYa> z=a`Sv72DcxPTf(97Nro%hb>*%@<2Yp=Jz1|$W2G8V99!Or7>d%!-SQkfYXG3N`i0? z+>HFxd&?HJwI#9UW4ARqQ%)uEb+Wy4hvI&Ob#kHGB*}p2{v(lsX2W?t%JPbmN8bc| zQs}yssn+=qY1@C7JxT^$yq{Aj9aP?VQgg{2N7|zjo{K2I5$EE%594LUHm}qi(X8tm zvl5$rTBF@1AQSOYNP!-7 zVwhLUT;K;gBzaDM!C zbNhkm`c=bEUf-F7OHV%d>s6BlJ4Gqpi=3;BJKeoe-#{lA!4;G0Js_CUOol%WYV`2h zvZjp?C)Wau@=&2?LI)ik1l=(_lM?snz=*x1Cn}M4A<-Sr$!axJvT-VgF>M`bL1K4z` zQm19Tpg5juE(D**Qwfrt^0tL`_pgeKgd=zdKWZK4*k7dQ^r~*zT_AX2Mc`CVTCp0ZLCDs`8sMQ^J7wucp$;Pc0>0*%Y_s$L?Lx zzpW+I6St?$euB|;&7|Qq+YP)Vc@H3@EBW)q5+k8IlG4E-WRQB-k?a)pK=UU>wxvba zMo(%1txRON8IIbNpK)IZG`(07=Gprq&o1@1_f7l!bwuW2FZvIcTb`#5a3= z!g|n_3X)WFfljTqX?FN&OobiCxjpHl5?Ky{ws1klM>I{(&2~DL*x@7EiYhT~WU`+w z6~*_kjzRC5XA&FCpauT`bIn#+)l(te7RHBsl$@CTD8!M)V0ZLa7G8kH)hu3 ziQJr@%DIht)397I+E(I$k`c`}x8ER0QA;T~CpbJ*ii*>AM!rA|rF>*#*yf~Zr6GF? zfQ_Q$H`-*K11cd(pFg+*-Tey^$n;X(jG7n0Z6}X}Yr41q9<2#Y}#%QF^X`%cy z){+Z=^-#zZR@t)<7V_02XwK5Cf%Z>9PQy1Ba@kB# ze6eY0@&nghS#ON3+dQ^X?4onjZDV+(hJ#C!H8a;qR|TLc4czB90mnxK5W`#4%YXAP%vL=q$2GD~CP@2Q zujlm+62~6fHi&*KNU_CCh*SJOv_vWiTe2rU*d4k)uuW?^Cs}HJSjuHbw%VSDkQJv$vPAq3zlQ?3p=w!!Q`Nj-r)Ri4S? zterz~e%Qp@{nSBdJ1#}bP%1P~)^J@>ZKUmMK9IGoJCCI)B;tz_oNDCX=(-m?kx$x> zXTH_Mw(C-7!HD2V^d-dSpKy|M`BgVlF{o7Y(CqUbn36Z2E6QXx0K#`TwQ-Mx=Oex< zpJwr2L+R)$k$SpV;6B4XbI5UtD|HR=g{VG3LOK9e5uA<(O0wy_d1=*;y8V*u?377q z5{s=N4WYLA2YE6FPCPCCHREvsV9#pAoOomuWPp8Rg zme8H~{6wi-kQB0b4Xl^_MRi5OEy<^$rY;xs!7JYLi3ItM6)2Oq;UGA6rw0kgCifZV zn}o0Tk3DT|EvO);(hmS6V3GkRCzUAn0)t}dYhpaX3M0-Tr;y5zq#@Tp2We32v=F6Z zfxymlj(p~mxt$zxwMejOD@Dy#)>*sU74^ZtE#{cO;Ne?T;Po7Srz1G+XOR`CjIzQW zN)+S1YSN4?OF7{Hb{GJ0?fKO%9;I>d8898lkP@sn=^IJ(Cm{M{0s04J|g!^;I78S^(OST?8q++18(i}O~srz%Q80D+C* z^dJ+*&MP6;Hv3Mimf;Z?aK}o}w-Ph8PAk-r{aca%{W+}?P?F5$%DTO|r*`aUfRqBB zgKuy$Pq#tdsaNaND3;kzyc!;BDkd)dX$tk@XdxqU9^--2 zJXf5xgee+|dLu1rdJ5JQ*iS+a2~h8lHst;3-j}h-w7CuhNiVXe*bS*F^x-6}C|=-r z!ola-vS80B=^d=`hmnn6!RWWPGa<6AJB>xS=H~( zTzREow;JD&=nHJ6TUV5;jN@vulh>}&NU7vXPotaYwc9j1jpie`k@XGviBLid;H7!) zledt5;Ew$&y1h$fZEm362h^nl_)a;5xR4Goqrn&lGDv{>QP2_ieU_ zXl6?Mw%Sg{9Cb-pN5>#~NybNRzV(x9FBIR4J*VQhP+9&9fjku?4@v8c;1uBGWdq)^ zM-$5}3bpWBE*GaEN)0-sEw>UJLXfahv>uB>zt{3L! zoRZhmg=cHX3ruGZMW-PD010^kL-|Pu?-Df*WGWQqQ5{K9O0kd|SnrIUJ@MO-PJS!- zEi(GnV*`h>YIeK5pIpd;Pj=&<|D6biy=ukBRL=Y*V&iot7~JcM$$2pM~@c? zCXjkEncBARVpEO%YQ$HRcd+6T>nHwZmI@Ew8Tq7BZdzkdmR)%aSndcpZe&g78vY$* zl_{`)t+RpM803BNa^i^WD90_S3OV!%`3rOkj^HdED;#}w~ro$->O@Oxw z{{VN)N1Yu?JxWuYwxj(`l&j{iF`d%)cuw3b(jK=KeNV-GMdsQ`JSi>#yJu{zJahU`SLpvUEO1e2~68g8&Z94M&0W`?xxA* zX&&E+KO>q2P#krnxo=!~AwP(+>Q#@p>;0<3W7FSNZ!>1HpzBgn&_dEmR1|`w=kr2` z>C&;u#vbNroubPlKg01-bwhB)`E+qZj*wXau^sH+M&rG6vgDlFL1d)uuLtlOadx;;jon=ZX$KnAAq%7)&oQAy_2$6bs15cVNm^SJh>d<6%i; zsR}sfp(2f9(&Ay1DVES$AdHU6&-&H!;iYehwY1wTB@QY>gp>Ut2_NU0Q6E9-2ctgg z=OpY(dj9}cYM+@0+X1v^7bq)Cf=qx1LK12g+lVk+TGfiZz2S}CQ|FMAjP&N3S^OYV z!~N}N8RsXj+M2$hi7vow%$~HdIAye{L#G0aw_D=D2h2i-Iuq8lcKkn&Yk$f}D@pxm zhkQMP^7p#V(0!?I>T!A$+q*G~l*}t*1d;xA!*FTVOn301hX+EAf>p?H6B)zD`X%nh(TgeaYstEp_Y8j{{Y^DoTRSGU}LCA6Dd?^~lC}{{ZI{N%4(O zvYNkvq-llN?Yfwb45*cN1Oxt6+xKY7^OCuR3Fs1@c9GcDSp!t+ct~d1Bq(ouq31%d zvYe8gp*(_5IXq)MD(2gdL$xi(?P5gP%#=n>66>uJc~-zcSUKaKoRTObmzvRaLDCfk zNoH7KF0Edo0zoNHxuz(RnNh?>m>6_{jC1)?T{`ZiD^nuR^pC!R%SwHuZc_2n2OxSu z;1ko{p0x%gwT5NBB);KVUU;eE5)cpJNF0I*0AQ2XJuyWat0nJXQiOXm&3Ov?k3GWL z5QPGuK+oaoN4-yW)Ud72M2M6v`o#N>WL;MWziB z-qf{%&8=;`;z=vP1Oe3jg)@O}gKUW8o2}F-4h%G=!Q6A$d(<}zitKW2^O>AoYDrOj zOifA%X9EKO9#pPLJ%?KD_v6lpv^d;}5k9RUK5L0tWou4S4;&JDDLuH)6%-?XsVx0u zUD+#8x3s5YD3y}-6M>(eO*R*WviprKw$j*Mz=R#y_x}FWq64<}-b1dT#Vxgjp@QO$ z)r|5~H+ldBo~NMCv@>k1YB!@?=Sq4zt%bU!7aUR)2_PjP5L1uv_Q!H+;};mzO6`_O zk|CEGAz`)kQdTzPd{--5EFeion6{sD%WNkq^=GH|6p|H!%EC~TH8OmbHptu>Q)H3& zND4V5@Gwp|=xMK)El{Jjhzp7t00xOj*`8E406KK`t6aWGqFEB3Uz=`eNohevvIbPW z<2e`u^ZcvxurJDv+xL~)Rw zaquzHtV^E+w1%6wEpl1hqDl(Td;AQ8o+wjtzFH$x3j#aogyU(LrMmJkTR=I$_U}Nt z^!cr`Ng(wc@=pezLv||dPfNHmL1TmzI+1~n zIOjEST$Li4Y3!i4pdT;GyU4LP+=o?%Wgs2pCy+M-oPTPn+`JC4&62t6YEuQ}Wltz? zb;TZ`8@Swmsay`c^sTxS+0x@5DYQ1)P<+s!Hn;~E=k7QZrMr6E!;xeZ6*=f^o)h`g zW0NE)!c3-l@)K*ABc~$7OOE`&*(&#O3TU0jzW(op1JH#$)(dY%I%Q+=HV~u zaz(|@A4bau#Ty;PwU}==<~b?G7TSnxtIG)( z0CpgyBl4+EzlhpJB=8-n_K#nb(l6Ev`IOO!{cjrlk%79C=Nl zA+!~A+q|Ubj)J>3?-F99F**~El!QFDT2MhNQgWe=nCA!JaaO$d8sjjYX8T6NseE5k zJAZ^!rxHk0gVKTKfJe9CC0~%L%W-d7y>O&&ad8s28c+#2D+^Ex{J=>A;MQ<&*SL-d zQen8cN6_02fKslR+)h9k{{Sk#>0cA0#zG-YhGKfk$lHyl8OZ~tct4-LHXcM|?<9u9 z$xu|5%?3Y(aUw1}5g0m%=P3>nKu?sko}GO~JbVgr{*aAo4!D@bD8%J`-BZyt1P=&;^h-G^LsskH1qqnzW9e@XoZy+?cTuSz(XD`w!!jHCeR2OAVI z&T-u*pTEs@eNPt@dB#P@&C4xFdmX6lN_EAv<|*x|K>RJ2h&ai|q^J)0=RHqY66{&4 zLz1GYIUn%d0PS!B6jDY=P~;KaJ&4GvJpTX{EYX>erw9$9;E5YjN&=5KP#pnD1S_!~ z*{WWt@wQx7WGdZlmpnH4Vdq*wB`yvCT5<{N$?6FF#$2CJ#`J>qTQxhS$rjm;;!;a$ z0YAlW0ivH}j#K)S`)#TYy74L|n?w6hm)>XvDk|_6pid<#=yE@(b;fu(th(#szOL2E zTNWr!?`wN*hLOq?ka8SKfX{p=V&K+wTK=cT~yLmt9o5ai3&pZXCU;)aZj4jGk%&1+no()S)8)PRj?8d{{{W32hdsbattuXfl&0S@Bq1ds3J5|sah!5{@k3>ekk&j5yXVJtl?-m^sPT2<39w}O#c8_l&gd&H59hlnYp7c^P#%ia!OukWujBT zPxO=R?0bKeQCc%z%28rSWdm_e;O1-6ww(v$zEyCV=ay0G+`ssW9lqZb!L1u=eGaiJ zEufU_N|Hv>JB1GX^!T8M)O>`rm+VI+Z3KH#Qr593dGUaoF|EO#c9fkjqW(dEX{O zfU>l;xh}93__OS;2e_@c{OR%DMO|`M=I~z2^{mqXL&PEStQBpUr3>8T>)#hF(BMs zdO=$yB|L6cIY~}>5J&eu^vc_&VuzhdnLun0NLMHS0D7@F>kSnbBRr&T4Xo`6#zih> z-68i?9fw>6!UAw|N$hD&$|)s+=+ib{|*h30ZlaaL}->neLMo@sCAt*ss)RLU4 zn5RunhJ>kdu=pp?wDK@;LBQbCDK^w0r1@zsw9-c9wURl`+^ZM_;N#$PQASeir;?VK zi}zCym+DDv$3}2PeOzc!kQCZMbFm5QRs_RpXiGv*)EsSXX(aM{bKkW9>u8MXn=5T9 zXpo?;FmsOmPJha-P_BXXe&UA_=j>9O*p|_^k<^k&p^!>erqGb2=bk!KONGKbNr;4^ z7r&BQ0YP5?=NYaPy~intP+Z0aG4~|%@x@mM_an)x0$r(FgTYDK25?78k+?Zl@BvBr z=|#6G)wSg;A!P|#mXu{hXE+%42j||Rh=mmfJwS&t0+b2<+|v|N*e-SrA`Eqsq$xzF zt$vRS>XV;IPYD^LB`!E+*MyRlk;o04EAv04Dvvhe>ceqZbTXcItvLL-tEk27u0bx^ z(zF)R_DW6%6qZe{DRiaBeOtM~rwL|j&g?D(r|~R>008&Ra3IHX_^c@p4cl|qa;V%? zz6hj23eL=Zt0ZhEl@r#8lWmK?_)GNx_(yswN){p`%LxHTX-+=~_xo{QU_?sX8CWYB z3s!g^trDo{9!y5Kq8dk>2q%y#S5k=#1f*=^gYQJ#6%j0jI_O`h@;T%F)H0;DkW#js zz#t3^j(xFL$+xmL*&KK(DPG)?J-dJ2fRu=+cgu0T#8XBddBc{1lGhng80XvM)DGH^ z{FJ)t$_t4q+Msy*)jqUKB8JONr!8STRFXkh7{TKddLhAWw1lYo#?Ud&d8K!`!Y|cHVTM zTR}o{qIlQ`P^{yT?kkqQl?YibD4pFvCy#DMb5~_s0eakAJJ9M$><3zrxU#o^^JOU< z0!1)xMYb;2P}y@w`-I2+$o0o|(92@2# zTom!@U)WO{IFoURp%ODr^je)?8-CQ14Xj0Mu0TfYp1k#^u`UML5yZ!B8&o+Rs}vKv zWw-D}b8RY$(~={la+I=5m;=%LqdwIItv2M)e>PK>6xuC*2aI0a2t6I z;GkoX{i-Iuowm^yQ6!*X=8AFnXownUq!2jAwNpvxoewRys4Vi8KI8Ze?@$Dwd!Tel z`Rj^=yu+6VTW$98M${6Fe4NsIT%E5WX}|#TNaWE{<1P7SV^~O0&&jK{;!ly&f-)dC z8R;&lA+D6gMz}#Zsg>+v0RZQd%^-w;6C7V9VmM=tL%r%(!+&KsNpJC9dpfm(+@ym-W~}`pCFy& zV;>dv1p9(zxV5BtYgYjEp>cAi*=hcRkulaMzjKP~`lVb%sxvB6iz$fl)TvVgCTOqRC1sETE@4nhYDKl|6z%K$BJ7RjQ7nksNFJ! z@%$~av2s<(EET@I4a*y`&3wA}QKl|4-I;J@!k~uALRYmRLpJXJFP3_z2$5W0lb4R%U+fu-q;CX0J+)0)6 z!a&LAr+k{VuKe;HD^UkMaw%KVn3r@zsu)VL2m+Thmqd(2@iaw+tFt#Zp)kxrA-QT$ zM%64OStH<@B`1!! z{{Tv0;}Zi*bq}d-BZUPXL9fy6vlf!-*SxvH^VI%S*>P^Dt7T~r-BaaAWlk$+4y=>A z9XRb$+wPb6Y>O*1Hbil1bw~*uWTc<&g#Q5Wny?#sTase9f*072Z?$!E(t(t-Dl^-b zc~U{=9`#B!`AV{{=Q`AfTRuuhI|@MnDE|QMQOKVX95)(prPtiTLV;IPj=j5kRoc~~ z;l9e>xT|SbT%LDO^7NIWE9KVKo=+Qq=8ua?%CfMVvI+xkx}+hzD5RugJoAsw`_Ru_ zVm!4u77~WtfQJYnN4;1iGiGLKQZTfUw5J)N*K11MNoBBT+s^6a&xz}ik+S;5C+$K27^D_KcxET{vXaoUc5O}}%uE6f0tCm_{o z`X$j;7Y0z=bLO~!JpiDbZ9HUtU*4s<8+>4ew;{!sTggCK0VMSbJkxbQq;;_3UiIBDJnrA0&;PlK|d6!n^o%XJig0* zVSMF$;1qiw?N^7bD{VzdNMAgp4%8HOE-)R~!oqhFK?#RdDh$D9D1 zVDXB(chLmM56FGTS#_s^PnWgqk`g`kDHSVi*XrsCje)q3a8;YuTqwKIn?haJn&(YOtul=L4I6zOj^)2Q3(UPm1< zM4C#-E^&ztEz~8%EH>|_*MYPQV}b4{B(*j;^Q1b3r7BX>h2RBt9Q%A%VlJBbIl9k}gLKNPgq^(_U}5Pc2cCmnuy z{{X!+Fp|BxZCb!7TGS2)8OOIdsbt2G z^p@RJkf6{lf{}rre14y$5T?o&5T^2@&IeDmNaIek8BZ3D?BtA9)%HMA(%50m#__?WI2yvCd<#|R1;9RTDWqa1!hjYBbZ zBq_nTBqyJW;!J5xpi5;VIqF7oJ*sVNv2v{vt`c5DMTp_Fg0il2$Q^yp`BaVCnnH+i z0l?WHj-grW{r>>Un8A6+T~U%-NLck9yqtSiGM$vt)FP(|UqkU#sG87nD2q&dGw+|%)%vMP2??D^s7S-(L<>kqmr%G_D`kQmk zHWq}aC)n{_zku(Wo`Yz*LItzQK1krN5Gvco7*T9ci)!Kj>?rUwA2C%rCA zrI$JHOpIhYfsiWcO;kx%_IyRfZnv<40Q~u`+C+4zE-dr<)FqahMh6P}^`foO5cXE$ zxuN4E>6d>98cp&%CkjeNYA0uY@E&Rh$)I%v7R-~eK|mUo6D18LIVT?7DPHFIvehl4 zrKx+iB#e#@X~bv@JqvS-DsvdYr7J>;fgl`pG{z&#Wdw7B??nhZB&DiH%X}djYRU*1 z%{ED4IB+M7_325Tx&*5mPSMw%YnfLK6#~*Zn7>5 zOkmy-o=K$eQ6^lC?=13o9@S@!Zfyw)Dg=&xX>GX$YAoTKV`aZJ#U$W#BBzOQVks>! zl$Gb8;-q~gO>9j9%rv28P5?4_tFN(1{l_0%^rZbegVMgnp^0l|8>QtW1m`4yn)n|> zSlf(>`eX-k6rwRv!8fwGd<BfsoB+Z`QL{aBni$5_vPsj> zvUQDwNJPQ{-A)Eb#wgTEaSacsC0OV)&{x;qD&TWbhW@2HN$M$d>Yyl;Z$=)T@fbbGbxuPN!Bw;6lugFykG;6%_d^NGMOY zYU}rtMMVf${cDFB`h;&u1k}+1n3m{eK}V8$4)q9?BssGy2|m@^sV$6d0Fp?+q1$y} z{vYqn19U;FQyUC=Qh4_?Y!;bQ3VG3jJwT4Mg)c0)k%Nx;riNid8v!HUjxNYbp!3iF z00q*9$6SuJ%=D@vIdJbIBPN!9`&4Z0O?^zulL;g^ z;K=HF3K0`lmqMEZ2i}WRvZRg=KEkiIOQY;5PPrsH`-M)E#(B*(mvC_5O`X{5_NwEp zy5oh^D`*`6;0ly3N>X<#w-j)+sHX2>*BwbLBg$!8R7a;C{`FoL8Rx!|=RIW0Sg((LheX5*X*?0CiM0B>`JITT6?^DQ}whyD6^c1<*)P>;WaqrTl zKvSq2v(L3%E|FGBo13b64m-#m^x+zyEx#eKG6zs|bkwM6Grz!-GZq!e7&^BEm-WVz6 zO{GKng(bSlM46zqgaC7pIL#!BAdpJbb6qM}NjMz)QGR4vE$DfaLj!V3LVfu)S($S& z+#$7RC;L<%yd6*}1mOEp`5?Hmf3|8$>qO{v8RIP**Mg!l2{q4@l`H_DG1`|!xP2lv zly>h!F5vpzDjEBG)ZHSR9EX$?QN38`Yva)&GcK?RK5a_*j4t4 zpr?XcAHx{tmq|;jJNn{`X~douuhzSCu&m=8ijFpm6|AlF>>zME3W_8Z#!*s^06FVS zkd!5S4CbbihE#xh991ZlwhoC6uh4_In);~jvI5F|D)_GL>@*(=Rsha<_oq{SpXo|c zMsc6cioWD;?j(rfR_avZI0wBud2!^&45a=p_~}(w9>lZd@;)h}5<_hjl@132q$Jq` zRT>;de8myJ`II;&zeG)?{l(N}KyXhLQ@Y*cIPa9%@9)x$vo`aX3MZ~=DpXouEG4C+;19JpgBh>_+(`I6Xvs2Tw45ZTIPFbIJ+c+%Y^)$D#gWs! zES}mF@F?s*y)?MpFcg#?O(HJ`Rgio0Ln%dcPP->z$JybtfO-4Z4|(l7m1L7ruza!> zMsRvpQXTq*=b;rH^jIUYLE$3sNX;;84Wo|Lk~Jc(v?g*s#N;G} z4tcN9bW?N(G4%yuJ*XEhbCNd4$j`ME zBdPn;uv=|Q^&lK{;+45DlL3g_$x7V%Rg=yQI%+X(8B%gF??$l0Jbw&ysonr1fCV-< z_?Gr6Q0I<0>0g|8&{BU|k$kA1&ViBYbDFunD55tbt@UIc4HX^=l!3e-(-p*Sw8k7jl?;knqDqEot-FE8b5@nIH&ljxVQB-( zI5qjHZWUolBbxm91ObX2J^lq1P$M)sxR6zhQA|7&fN)JKM*xs{74SH*&f`Xvmqmy7 zf*MIb6q4(BE@cQgCyuoX?k{FH8d#!A`R127wW37rg*oPKT8}5KJ&QfGr90FQ9nAv5 zQ{@jx_ox;4%@5^PUt^WL9T@}Tt$c1OGq?^){VZpflGBn zq#TTQ6*KNE@wn7Ub2%TNsM8&74T;C*DbpYnawu7VZah_RM4?m7 zI-~9>m9@_y8Bia36r3kNH7ZoEBy--YhZkh9xl)3-Bw~X~3&#opL)#P_rySg$QRa() zrFu?#;-Y++30Dv^f~1jAspis1Dx+rd-bp<@=_|Zr6>TJ}jAGQJ?%-$NG|h(?c|B5b zNFtMj^Gu>~gWj$#$R1gYkX5u~&~jTUN%GU0okZYsUkLQ5(X`4;Nc>f-?i_qq#?+>O zNI3)MkAH*30_swuk~&hHX_m>-@F+bFdghbdRh8~bXO5$M~kUx!ons?kV^!la+QbGb*H zREkLUu9ODel@`c>mn65}8L0lXMOYa>+PRPh>WwKL)lx-2uuE^Kt2sZd5$9xQCy#m& zDFHkR>0FE&xVj?Ni4{1ud+}VZ=;Od6D#uJ_fRWJtmHH_E04fyRnM&9~Qg@Y+QYt|M zZE$$$Uk@~DHPBQ>hEn6(i0@Be5XwQvT2PJ_F~u^C+*DDbP{GN&Q-DYxdL8LXQVCM% zX=*tM&j%+TokO`rI43wXRb*R44}woq&r@7B03xPv4Y{ZRBRuq>lBZ=gPRZ0xFb*s8 SJ$D{!;(6fLbaignNB`O9`(%9p literal 0 HcmV?d00001 diff --git a/python/packages/foundry_hosting/tests/test_responses_int.py b/python/packages/foundry_hosting/tests/test_responses_int.py new file mode 100644 index 0000000000..587478a234 --- /dev/null +++ b/python/packages/foundry_hosting/tests/test_responses_int.py @@ -0,0 +1,566 @@ +# Copyright (c) Microsoft. All rights reserved. + +"""Integration tests for ResponsesHostServer with a real Foundry endpoint. + +These tests exercise the full HTTP pipeline using httpx.AsyncClient with +ASGITransport — no real server process is started. The agent talks to a real +Foundry project endpoint so every test requires valid credentials. + +Required environment variables: + FOUNDRY_PROJECT_ENDPOINT - The Azure AI Foundry project endpoint URL. + FOUNDRY_MODEL - The model deployment name (e.g. gpt-4o). +""" + +from __future__ import annotations + +import base64 +import json +import os +from pathlib import Path +from typing import Annotated, Any + +import httpx +import pytest +from agent_framework import Agent, tool +from agent_framework.foundry import FoundryChatClient +from azure.ai.agentserver.responses import InMemoryResponseProvider +from azure.identity import AzureCliCredential + +from agent_framework_foundry_hosting import ResponsesHostServer + +# --------------------------------------------------------------------------- +# Skip / marker helpers +# --------------------------------------------------------------------------- + +skip_if_foundry_hosting_integration_tests_disabled = pytest.mark.skipif( + os.getenv("FOUNDRY_PROJECT_ENDPOINT", "") in ("", "https://test-project.services.ai.azure.com/") + or os.getenv("FOUNDRY_MODEL", "") == "", + reason="No real FOUNDRY_PROJECT_ENDPOINT or FOUNDRY_MODEL provided; skipping integration tests.", +) + + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + + +@pytest.fixture +def server() -> ResponsesHostServer: + """Create a ResponsesHostServer backed by a real Foundry agent.""" + client = FoundryChatClient(credential=AzureCliCredential()) + + agent = Agent( + client=client, + instructions="You are a concise assistant. Keep answers very short (one or two sentences).", + default_options={"store": False}, + ) + + return ResponsesHostServer(agent, store=InMemoryResponseProvider()) + + +@tool +async def get_weather(location: Annotated[str, "The city name"]) -> str: + """Get the current weather in a given location.""" + return f"The weather in {location} is 72°F and sunny." + + +@pytest.fixture +def server_with_tools() -> ResponsesHostServer: + """Create a ResponsesHostServer whose agent has a tool.""" + client = FoundryChatClient(credential=AzureCliCredential()) + + agent = Agent( + client=client, + instructions="You are a concise assistant. Use the provided tools when appropriate. Keep answers very short.", + tools=[get_weather], + default_options={"store": False}, + ) + + return ResponsesHostServer(agent, store=InMemoryResponseProvider()) + + +# --------------------------------------------------------------------------- +# HTTP helpers +# --------------------------------------------------------------------------- + + +async def _post_json( + server: ResponsesHostServer, + payload: dict[str, Any], +) -> httpx.Response: + """Send a POST /responses request with a raw JSON payload.""" + transport = httpx.ASGITransport(app=server) + async with httpx.AsyncClient(transport=transport, base_url="http://test") as client: + return await client.post("/responses", json=payload, timeout=120) + + +def _parse_sse_events(body: str) -> list[dict[str, Any]]: + """Parse SSE text into a list of event dicts with 'event' and 'data' keys.""" + events: list[dict[str, Any]] = [] + current_event: str | None = None + current_data_lines: list[str] = [] + + for line in body.split("\n"): + if line.startswith("event: "): + current_event = line[len("event: ") :] + elif line.startswith("data: "): + current_data_lines.append(line[len("data: ") :]) + elif line.strip() == "" and current_event is not None: + data_str = "\n".join(current_data_lines) + try: + data = json.loads(data_str) + except json.JSONDecodeError: + data = data_str + events.append({"event": current_event, "data": data}) + current_event = None + current_data_lines = [] + + return events + + +def _sse_event_types(events: list[dict[str, Any]]) -> list[str]: + """Extract event type strings from parsed SSE events.""" + return [e["event"] for e in events] + + +# --------------------------------------------------------------------------- +# Tests — basic text input +# --------------------------------------------------------------------------- + + +class TestBasicText: + """Simple text-in / text-out round trips.""" + + @pytest.mark.flaky + @pytest.mark.integration + @skip_if_foundry_hosting_integration_tests_disabled + async def test_simple_text_non_streaming(self, server: ResponsesHostServer) -> None: + """Non-streaming: send a text prompt and get a completed response.""" + resp = await _post_json( + server, + { + "input": "Say hello in exactly three words.", + "stream": False, + }, + ) + + assert resp.status_code == 200 + body = resp.json() + assert body["status"] == "completed" + # There should be at least one output item with text + output_messages = [o for o in body["output"] if o["type"] == "message"] + assert len(output_messages) >= 1 + text_parts = [c for c in output_messages[0]["content"] if c["type"] == "output_text"] + assert len(text_parts) >= 1 + assert len(text_parts[0]["text"]) > 0 + + @pytest.mark.flaky + @pytest.mark.integration + @skip_if_foundry_hosting_integration_tests_disabled + async def test_simple_text_streaming(self, server: ResponsesHostServer) -> None: + """Streaming: send a text prompt and verify SSE lifecycle events.""" + resp = await _post_json( + server, + { + "input": "Say hello in exactly three words.", + "stream": True, + }, + ) + + assert resp.status_code == 200 + assert "text/event-stream" in resp.headers["content-type"] + + events = _parse_sse_events(resp.text) + types = _sse_event_types(events) + + assert types[0] == "response.created" + assert types[1] == "response.in_progress" + assert types[-1] == "response.completed" + assert "response.output_text.delta" in types + assert "response.output_text.done" in types + + # The done event should have accumulated text + done_events = [e for e in events if e["event"] == "response.output_text.done"] + assert len(done_events) >= 1 + assert len(done_events[0]["data"]["text"]) > 0 + + +# --------------------------------------------------------------------------- +# Tests — structured content input +# --------------------------------------------------------------------------- + + +class TestStructuredContentInput: + """Structured content arrays: text + images, text + files.""" + + @pytest.mark.flaky + @pytest.mark.integration + @skip_if_foundry_hosting_integration_tests_disabled + async def test_text_array_input(self, server: ResponsesHostServer) -> None: + """Multiple input_text parts in one message.""" + resp = await _post_json( + server, + { + "input": [ + { + "type": "message", + "role": "user", + "content": [ + {"type": "input_text", "text": "My name is Alice."}, + {"type": "input_text", "text": "What is my name?"}, + ], + } + ], + "stream": False, + }, + ) + + assert resp.status_code == 200 + body = resp.json() + assert body["status"] == "completed" + # The response should mention Alice + output_text = body["output"][0]["content"][0]["text"] + assert "alice" in output_text.lower() + + @pytest.mark.flaky + @pytest.mark.integration + @skip_if_foundry_hosting_integration_tests_disabled + async def test_input_image_url(self, server: ResponsesHostServer) -> None: + """Send an image via URL and ask the model about it.""" + resp = await _post_json( + server, + { + "input": [ + { + "type": "message", + "role": "user", + "content": [ + {"type": "input_text", "text": "What animal is in this image? Reply in one word."}, + { + "type": "input_image", + "image_url": "https://cdn.pixabay.com/photo/2024/02/28/07/42/european-shorthair-8601492_640.jpg", + }, + ], + } + ], + "stream": False, + }, + ) + + assert resp.status_code == 200 + body = resp.json() + assert body["status"] == "completed" + output_text = body["output"][0]["content"][0]["text"].lower() + assert "cat" in output_text + + @pytest.mark.flaky + @pytest.mark.integration + @skip_if_foundry_hosting_integration_tests_disabled + async def test_input_image_file_data(self, server: ResponsesHostServer) -> None: + """Send a local image file as inline base64 data URI.""" + image_path = Path(__file__).resolve().parent / "test_assets" / "sample_image.jpg" # noqa: ASYNC240 + image_bytes = image_path.read_bytes() + b64 = base64.b64encode(image_bytes).decode() + data_uri = f"data:image/jpeg;base64,{b64}" + + resp = await _post_json( + server, + { + "input": [ + { + "type": "message", + "role": "user", + "content": [ + {"type": "input_text", "text": "What animal is in this image? Reply in one word."}, + {"type": "input_image", "image_url": data_uri}, + ], + } + ], + "stream": False, + }, + ) + + assert resp.status_code == 200 + body = resp.json() + assert body["status"] == "completed" + output_text = body["output"][0]["content"][0]["text"].lower() + assert "cat" in output_text + + @pytest.mark.flaky + @pytest.mark.integration + @skip_if_foundry_hosting_integration_tests_disabled + async def test_input_file_data(self, server: ResponsesHostServer) -> None: + """Send a small text file as inline file_data (base64 data URI).""" + text_content = "The capital of France is Paris." + b64 = base64.b64encode(text_content.encode()).decode() + data_uri = f"data:text/plain;base64,{b64}" + + resp = await _post_json( + server, + { + "input": [ + { + "type": "message", + "role": "user", + "content": [ + {"type": "input_text", "text": "What is the capital mentioned in the attached file?"}, + {"type": "input_file", "file_data": data_uri, "filename": "info.txt"}, + ], + } + ], + "stream": False, + }, + ) + + assert resp.status_code == 200 + body = resp.json() + assert body["status"] == "completed" + output_text = body["output"][0]["content"][0]["text"].lower() + assert "paris" in output_text + + @pytest.mark.flaky + @pytest.mark.integration + @skip_if_foundry_hosting_integration_tests_disabled + async def test_input_pdf_file_data(self, server: ResponsesHostServer) -> None: + """Send a real PDF file as inline file_data (base64 data URI).""" + pdf_path = Path(__file__).resolve().parent / "test_assets" / "sample.pdf" # noqa: ASYNC240 + pdf_bytes = pdf_path.read_bytes() + b64 = base64.b64encode(pdf_bytes).decode() + data_uri = f"data:application/pdf;base64,{b64}" + + resp = await _post_json( + server, + { + "input": [ + { + "type": "message", + "role": "user", + "content": [ + {"type": "input_text", "text": "Summarize this PDF in one sentence."}, + {"type": "input_file", "file_data": data_uri, "filename": "sample.pdf"}, + ], + } + ], + "stream": False, + }, + ) + + assert resp.status_code == 200 + body = resp.json() + assert body["status"] == "completed" + output_text = body["output"][0]["content"][0]["text"] + assert "microsoft" in output_text.lower() + + +# --------------------------------------------------------------------------- +# Tests — multi-turn conversations +# --------------------------------------------------------------------------- + + +class TestMultiTurn: + """Multi-round conversations using previous_response_id.""" + + @pytest.mark.flaky + @pytest.mark.integration + @skip_if_foundry_hosting_integration_tests_disabled + async def test_two_turn_conversation(self, server: ResponsesHostServer) -> None: + """Turn 1: introduce context. Turn 2: ask about it using previous_response_id.""" + # Turn 1 + resp1 = await _post_json( + server, + { + "input": "My favorite color is blue. Remember that.", + "stream": False, + }, + ) + + assert resp1.status_code == 200 + body1 = resp1.json() + assert body1["status"] == "completed" + response_id_1 = body1["id"] + + # Turn 2 — references turn 1 + resp2 = await _post_json( + server, + { + "input": "What is my favorite color?", + "stream": False, + "previous_response_id": response_id_1, + }, + ) + + assert resp2.status_code == 200 + body2 = resp2.json() + assert body2["status"] == "completed" + output_text = body2["output"][0]["content"][0]["text"].lower() + assert "blue" in output_text + + @pytest.mark.flaky + @pytest.mark.integration + @skip_if_foundry_hosting_integration_tests_disabled + async def test_three_turn_conversation(self, server: ResponsesHostServer) -> None: + """Three sequential turns to verify history accumulates correctly.""" + # Turn 1 + resp1 = await _post_json( + server, + { + "input": "I have a pet dog named Max.", + "stream": False, + }, + ) + assert resp1.status_code == 200 + id1 = resp1.json()["id"] + + # Turn 2 + resp2 = await _post_json( + server, + { + "input": "I also have a cat named Luna.", + "stream": False, + "previous_response_id": id1, + }, + ) + assert resp2.status_code == 200 + id2 = resp2.json()["id"] + + # Turn 3 — should remember both pets + resp3 = await _post_json( + server, + { + "input": "What are my pets' names?", + "stream": False, + "previous_response_id": id2, + }, + ) + assert resp3.status_code == 200 + output_text = resp3.json()["output"][0]["content"][0]["text"].lower() + assert "max" in output_text + assert "luna" in output_text + + @pytest.mark.flaky + @pytest.mark.integration + @skip_if_foundry_hosting_integration_tests_disabled + async def test_multi_turn_streaming(self, server: ResponsesHostServer) -> None: + """Multi-turn conversation with streaming on the second turn.""" + # Turn 1 — non-streaming + resp1 = await _post_json( + server, + { + "input": "My favorite number is 42.", + "stream": False, + }, + ) + assert resp1.status_code == 200 + id1 = resp1.json()["id"] + + # Turn 2 — streaming + resp2 = await _post_json( + server, + { + "input": "What is my favorite number?", + "stream": True, + "previous_response_id": id1, + }, + ) + assert resp2.status_code == 200 + assert "text/event-stream" in resp2.headers["content-type"] + + events = _parse_sse_events(resp2.text) + types = _sse_event_types(events) + + assert types[0] == "response.created" + assert types[-1] == "response.completed" + assert "response.output_text.done" in types + + done_events = [e for e in events if e["event"] == "response.output_text.done"] + assert "42" in done_events[0]["data"]["text"] + + +# --------------------------------------------------------------------------- +# Tests — tool calling +# --------------------------------------------------------------------------- + + +class TestToolCalling: + """Tests that verify function-tool round trips through the hosting layer.""" + + @pytest.mark.flaky + @pytest.mark.integration + @skip_if_foundry_hosting_integration_tests_disabled + async def test_tool_call_non_streaming(self, server_with_tools: ResponsesHostServer) -> None: + """Agent invokes a tool and returns a final answer (non-streaming).""" + resp = await _post_json( + server_with_tools, + { + "input": "What is the weather in Seattle?", + "stream": False, + }, + ) + + assert resp.status_code == 200 + body = resp.json() + assert body["status"] == "completed" + + # The output should contain the final text referencing the weather + output_messages = [o for o in body["output"] if o["type"] == "message"] + assert len(output_messages) >= 1 + final_text = output_messages[0]["content"][0]["text"].lower() + assert "72" in final_text or "sunny" in final_text or "seattle" in final_text + + @pytest.mark.flaky + @pytest.mark.integration + @skip_if_foundry_hosting_integration_tests_disabled + async def test_tool_call_streaming(self, server_with_tools: ResponsesHostServer) -> None: + """Agent invokes a tool and returns a final answer (streaming).""" + resp = await _post_json( + server_with_tools, + { + "input": "What is the weather in Seattle?", + "stream": True, + }, + ) + + assert resp.status_code == 200 + assert "text/event-stream" in resp.headers["content-type"] + + events = _parse_sse_events(resp.text) + types = _sse_event_types(events) + + assert types[0] == "response.created" + assert types[-1] == "response.completed" + + # Should have text output with the weather info + done_events = [e for e in events if e["event"] == "response.output_text.done"] + assert len(done_events) >= 1 + final_text = done_events[-1]["data"]["text"].lower() + assert "72" in final_text or "sunny" in final_text or "seattle" in final_text + + +# --------------------------------------------------------------------------- +# Tests — options passthrough +# --------------------------------------------------------------------------- + + +class TestOptions: + """Verify chat options are passed through to the model.""" + + @pytest.mark.flaky + @pytest.mark.integration + @skip_if_foundry_hosting_integration_tests_disabled + async def test_temperature_and_max_tokens(self, server: ResponsesHostServer) -> None: + """Set temperature and max_output_tokens and verify the response succeeds.""" + resp = await _post_json( + server, + { + "input": "Say hello briefly.", + "stream": False, + "temperature": 0.0, + "max_output_tokens": 50, + }, + ) + + assert resp.status_code == 200 + body = resp.json() + assert body["status"] == "completed" + output_text = body["output"][0]["content"][0]["text"] + assert len(output_text) > 0 From 78c7d5fc84e74b33cc8777c8c4ab28e0aa314345 Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Fri, 24 Apr 2026 17:27:02 -0700 Subject: [PATCH 4/7] Fix README --- .../foundry-hosted-agents/README.md | 192 ++++++++++++++-- .../foundry-hosted-agents/responses/README.md | 215 ------------------ 2 files changed, 176 insertions(+), 231 deletions(-) delete mode 100644 python/samples/04-hosting/foundry-hosted-agents/responses/README.md diff --git a/python/samples/04-hosting/foundry-hosted-agents/README.md b/python/samples/04-hosting/foundry-hosted-agents/README.md index dcb7dcd24d..72b21128ba 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/README.md +++ b/python/samples/04-hosting/foundry-hosted-agents/README.md @@ -1,12 +1,139 @@ -# Foundry Hosted Agents Samples +# Foundry Hosted Agent Samples -This directory contains samples that demonstrate how to use the Agent Framework to host agents on Foundry with different capabilities and configurations. Each sample includes a README with instructions on how to set up, run, and interact with the agent. +This directory contains samples that demonstrate how to use hosted [Agent Framework](https://github.com/microsoft/agent-framework) agents with different capabilities and configurations on Foundry using the Foundry Hosting Agent service. Each sample includes a README with instructions on how to set up, run, and interact with the agent. -Read more about Foundry Hosted Agents [here](https://learn.microsoft.com/en-us/azure/foundry/agents/concepts/hosted-agents). +## Samples -## Environment setup +### Responses API -1. Navigate to the sample directory you want to run. For example: +| # | Sample | Description | +|---|--------|-------------| +| 1 | [Basic](responses/01_basic/) | A minimal agent demonstrating basic request/response interaction and multi-turn conversations using `previous_response_id`. | +| 2 | [Tools](responses/02_tools/) | An agent with local tools (e.g., weather lookup), demonstrating how to register and invoke custom tool functions alongside the LLM. | +| 3 | [MCP](responses/03_mcp/) | An agent connected to a remote MCP server (GitHub), demonstrating external MCP tool provider integration. | +| 4 | [Foundry Toolbox](responses/04_foundry_toolbox/) | An agent using Azure Foundry Toolbox, demonstrating toolbox provisioning and querying available tools at runtime. | +| 5 | [Workflows](responses/05_workflows/) | An agent with a multi-step orchestrated workflow, demonstrating chaining prompts through an orchestrated flow. | +| 6 | [Using deployed agent](responses/using_deployed_agent.py) | A sample demonstrating how to invoke an agent that has already been deployed to Foundry, showing how to interact with a hosted agent in code. | + +### Invocations API + +| # | Sample | Description | +|---|--------|-------------| +| 1 | [Basic](invocations/01_basic/) | A minimal agent demonstrating session state management via `agent_session_id` in URL params/response headers. | +| 2 | [Break Glass](invocations/02_break_glass/) | An agent demonstrating a "break glass" scenario where customizations of the API behaviors are needed, allowing for more direct control over how requests and responses are handled by the hosting layer. | + +## Running the Agent Host Locally + +### Using `azd` + +#### Prerequisites + +1. **Azure Developer CLI (`azd`)** + + - [Install azd](https://learn.microsoft.com/en-us/azure/developer/azure-developer-cli/install-azd) and the AI agent extension: `azd ext install azure.ai.agents` + - Authenticated: `azd auth login` + +2. **Azure Subscription** + +#### Create a new project + +**No cloning required**. Create a new folder, point azd at the manifest on GitHub. + +```bash +mkdir hosted-agent-framework-agent && cd hosted-agent-framework-agent + +# Initialize from the manifest +azd ai agent init -m https://github.com/microsoft/agent-framework/blob/main/python/samples/04-hosting/foundry-hosted-agents/responses/01_basic/agent.manifest.yaml +``` + +Follow the instructions from `azd ai agent init` to complete the agent initialization. If you don't have an existing Foundry project and a model deployment, `azd ai agent init` will guide you through creating them. + +#### Provision Azure Resources + +> This step is only needed if you don't have an existing Foundry project and model deployment. + +Run the following command to provision the necessary Azure resources: + +```bash +azd provision +``` + +This will create the following Azure resources: + +- A new resource group named `rg-[project_name]-dev`. In this guide, `[project_name]` will be `hosted-agent-framework-agent`. +- Within the resource group, among other resources, the most important ones are: + - A new Foundry instance + - A new Foundry project, within which a new model deployment will be created + - An Application Insights instance + - A container registry, which will be used to store the container images for the hosted agent + +#### Set Environment Variables + +```bash +export FOUNDRY_PROJECT_ENDPOINT="https://.services.ai.azure.com/api/projects/" +export AZURE_AI_MODEL_DEPLOYMENT_NAME="" +# And any other environment variables required by the sample +``` + +Or in PowerShell: + +```powershell +$env:FOUNDRY_PROJECT_ENDPOINT="https://.services.ai.azure.com/api/projects/" +$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="" +# And any other environment variables required by the sample +``` + +> Note: The environment variables set above are only for the current session. You will need to set them again if you open a new terminal session. if you want to set the environment variables permanently in the azd environment, you can use `azd env set `. + +#### Running the Agent Host + +```bash +azd ai agent run +``` + +Right now, the agent host should be running on `http://localhost:8088` + +#### Invoking the Agent + +Open another terminal, **navigate to the project directory**, and run the following command to invoke the agent: + +```bash +azd ai agent invoke --local "Hello!" +``` + +Or you can in another terminal, without navigating to the project directory, run the following command to invoke the agent: + +```bash +curl -X POST http://localhost:8088/responses -H "Content-Type: application/json" -d '{"input": "Hello!"}' +``` + +Or in PowerShell: + +```powershell +(Invoke-WebRequest -Uri http://localhost:8088/responses -Method POST -ContentType "application/json" -Body '{"input": "Hello!"}').Content +``` + +### Using `python` + +#### Prerequisites + +1. An existing Foundry project +2. A deployed model in your Foundry project +3. Azure CLI installed and authenticated +4. Python 3.10 or later + +#### Running the Agent Host with Python + +Clone the repository containing the sample code: + +```bash +git clone https://github.com/microsoft/agent-framework.git +cd agent-framework/python/samples/04-hosting/foundry-hosted-agents/responses +``` + +#### Environment setup + +1. Navigate to the sample directory you want to explore. Create a virtual environment: ```bash python -m venv .venv @@ -32,25 +159,58 @@ Read more about Foundry Hosted Agents [here](https://learn.microsoft.com/en-us/a az login ``` -## Deploying to a Docker container +#### Running the Agent Host + +```bash +python main.py +``` + +Right now, the agent host should be running on `http://localhost:8088` -Navigate to the sample directory and build the Docker image: +#### Invoking the Agent + +On another terminal, run the following command to invoke the agent: ```bash -docker build -t hosted-agent-sample . +curl -X POST http://localhost:8088/responses -H "Content-Type: application/json" -d '{"input": "Hello!"}' +``` + +Or in PowerShell: + +```powershell +(Invoke-WebRequest -Uri http://localhost:8088/responses -Method POST -ContentType "application/json" -Body '{"input": "Hello!"}').Content ``` -Run the container, passing in the required environment variables: +## Deploying the Agent to Foundry + +Once you've tested locally, deploy to Microsoft Foundry. + +### With an Existing Foundry Project + +If you already have a Foundry project and the necessary Azure resources provisioned, you can skip the setup steps and proceed directly to deploying the agent. + +After running `azd ai agent init -m ` and following the prompts to configure your agent, you will have a project ready for deployment. + +### Setting Up a New Foundry Project + +Follow the steps in [Using `azd`](#using-azd) to set up the project and provision the necessary Azure resources for your Foundry deployment. + +### Deploying the Agent + +Once the project is setup and resources are provisioned, you can deploy the agent to Foundry by running: ```bash -docker run -p 8088:8088 \ - -e FOUNDRY_PROJECT_ENDPOINT= \ - -e MODEL_DEPLOYMENT_NAME= \ - hosted-agent-sample +azd deploy ``` -The server will be available at `http://localhost:8088`. You can send requests using the same `curl` command shown above. +> The Foundry hosting infrastructure will inject the following environment variables into your agent at runtime: +> +> - `FOUNDRY_PROJECT_ENDPOINT`: The endpoint URL for the Foundry project where the agent is deployed. +> - `AZURE_AI_MODEL_DEPLOYMENT_NAME`: The name of the model deployment in your Foundry project. This is configured during the agent initialization process with `azd ai agent init`. +> - `APPLICATIONINSIGHTS_CONNECTION_STRING`: The connection string for Application Insights to enable telemetry for your agent. + +This will package your agent and deploy it to the Foundry environment, making it accessible through the Foundry project endpoint. Once it's deployed, you can also access the agent through the Foundry UI. -## Deploying to Foundry +For the full deployment guide, see the [official deployment guide](https://learn.microsoft.com/en-us/azure/foundry/agents/how-to/deploy-hosted-agent). -Follow this [guide](https://learn.microsoft.com/en-us/azure/foundry/agents/how-to/deploy-hosted-agent?tabs=bash#configure-your-agent) to deploy your agent to Foundry. +Once deployed, learn more about how to manage deployed agents in the [official management guide](https://learn.microsoft.com/en-us/azure/foundry/agents/how-to/manage-hosted-agent). \ No newline at end of file diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/README.md b/python/samples/04-hosting/foundry-hosted-agents/responses/README.md deleted file mode 100644 index c6bd6987e4..0000000000 --- a/python/samples/04-hosting/foundry-hosted-agents/responses/README.md +++ /dev/null @@ -1,215 +0,0 @@ -# Foundry Hosted Agent Samples - -This directory contains samples that demonstrate how to use hosted [Agent Framework](https://github.com/microsoft/agent-framework) agents with different capabilities and configurations on Foundry using the Foundry Hosting Agent service. Each sample includes a README with instructions on how to set up, run, and interact with the agent. - -## Samples - -### Responses API - -| # | Sample | Description | -|---|--------|-------------| -| 1 | [Basic](responses/01_basic/) | A minimal agent demonstrating basic request/response interaction and multi-turn conversations using `previous_response_id`. | -| 2 | [Tools](responses/02_tools/) | An agent with local tools (e.g., weather lookup), demonstrating how to register and invoke custom tool functions alongside the LLM. | -| 3 | [MCP](responses/03_mcp/) | An agent connected to a remote MCP server (GitHub), demonstrating external MCP tool provider integration. | -| 4 | [Foundry Toolbox](responses/04_foundry_toolbox/) | An agent using Azure Foundry Toolbox, demonstrating toolbox provisioning and querying available tools at runtime. | -| 5 | [Workflows](responses/05_workflows/) | An agent with a multi-step orchestrated workflow, demonstrating chaining prompts through an orchestrated flow. | - -### Invocations API - -| # | Sample | Description | -|---|--------|-------------| -| 1 | [Basic](invocations/01_basic/) | A minimal agent demonstrating session state management via `agent_session_id` in URL params/response headers. | -| 2 | [Break Glass](invocations/02_break_glass/) | An agent demonstrating a "break glass" scenario where customizations of the API behaviors are needed, allowing for more direct control over how requests and responses are handled by the hosting layer. | - -## Running the Agent Host Locally - -### Using `azd` - -#### Prerequisites - -1. **Azure Developer CLI (`azd`)** - - - [Install azd](https://learn.microsoft.com/en-us/azure/developer/azure-developer-cli/install-azd) and the AI agent extension: `azd ext install azure.ai.agents` - - Authenticated: `azd auth login` - -2. **Azure Subscription** - -#### Create a new project - -**No cloning required**. Create a new folder, point azd at the manifest on GitHub. - -```bash -mkdir hosted-agent-framework-agent && cd hosted-agent-framework-agent - -# Initialize from the manifest -azd ai agent init -m https://github.com/microsoft/agent-framework/blob/main/python/samples/04-hosting/foundry-hosted-agents/responses/01_basic/agent.manifest.yaml -``` - -Follow the instructions from `azd ai agent init` to complete the agent initialization. If you don't have an existing Foundry project and a model deployment, `azd ai agent init` will guide you through creating them. - -#### Provision Azure Resources - -> This step is only needed if you don't have an existing Foundry project and model deployment. - -Run the following command to provision the necessary Azure resources: - -```bash -azd provision -``` - -This will create the following Azure resources: - -- A new resource group named `rg-[project_name]-dev`. In this guide, `[project_name]` will be `hosted-agent-framework-agent`. -- Within the resource group, among other resources, the most important ones are: - - A new Foundry instance - - A new Foundry project, within which a new model deployment will be created - - An Application Insights instance - - A container registry, which will be used to store the container images for the hosted agent - -#### Set Environment Variables - -```bash -export FOUNDRY_PROJECT_ENDPOINT="https://.services.ai.azure.com/api/projects/" -export AZURE_AI_MODEL_DEPLOYMENT_NAME="" -# And any other environment variables required by the sample -``` - -Or in PowerShell: - -```powershell -$env:FOUNDRY_PROJECT_ENDPOINT="https://.services.ai.azure.com/api/projects/" -$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="" -# And any other environment variables required by the sample -``` - -> Note: The environment variables set above are only for the current session. You will need to set them again if you open a new terminal session. if you want to set the environment variables permanently in the azd environment, you can use `azd env set `. - -#### Running the Agent Host - -```bash -azd ai agent run -``` - -Right now, the agent host should be running on `http://localhost:8088` - -#### Invoking the Agent - -Open another terminal, **navigate to the project directory**, and run the following command to invoke the agent: - -```bash -azd ai agent invoke --local "Hello!" -``` - -Or you can in another terminal, without navigating to the project directory, run the following command to invoke the agent: - -```bash -curl -X POST http://localhost:8088/responses -H "Content-Type: application/json" -d '{"input": "Hello!"}' -``` - -Or in PowerShell: - -```powershell -(Invoke-WebRequest -Uri http://localhost:8088/responses -Method POST -ContentType "application/json" -Body '{"input": "Hello!"}').Content -``` - -### Using `python` - -#### Prerequisites - -1. An existing Foundry project -2. A deployed model in your Foundry project -3. Azure CLI installed and authenticated -4. Python 3.10 or later - -#### Running the Agent Host with Python - -Clone the repository containing the sample code: - -```bash -git clone https://github.com/microsoft/agent-framework.git -cd agent-framework/python/samples/04-hosting/foundry-hosted-agents/responses -``` - -#### Environment setup - -1. Navigate to the sample directory you want to explore. Create a virtual environment: - - ```bash - python -m venv .venv - - # Windows - .venv\Scripts\Activate - - # macOS/Linux - source .venv/bin/activate - ``` - -2. Install dependencies: - - ```bash - pip install -r requirements.txt - ``` - -3. Create a `.env` file with your Foundry configuration following the `env.example` file in the sample. - -4. Make sure you are logged in with the Azure CLI: - - ```bash - az login - ``` - -#### Running the Agent Host - -```bash -python main.py -``` - -Right now, the agent host should be running on `http://localhost:8088` - -#### Invoking the Agent - -On another terminal, run the following command to invoke the agent: - -```bash -curl -X POST http://localhost:8088/responses -H "Content-Type: application/json" -d '{"input": "Hello!"}' -``` - -Or in PowerShell: - -```powershell -(Invoke-WebRequest -Uri http://localhost:8088/responses -Method POST -ContentType "application/json" -Body '{"input": "Hello!"}').Content -``` - -## Deploying the Agent to Foundry - -Once you've tested locally, deploy to Microsoft Foundry. - -### With an Existing Foundry Project - -If you already have a Foundry project and the necessary Azure resources provisioned, you can skip the setup steps and proceed directly to deploying the agent. - -After running `azd ai agent init -m ` and following the prompts to configure your agent, you will have a project ready for deployment. - -### Setting Up a New Foundry Project - -Follow the steps in [Using `azd`](#using-azd) to set up the project and provision the necessary Azure resources for your Foundry deployment. - -### Deploying the Agent - -Once the project is setup and resources are provisioned, you can deploy the agent to Foundry by running: - -```bash -azd deploy -``` - -> The Foundry hosting infrastructure will inject the following environment variables into your agent at runtime: -> -> - `FOUNDRY_PROJECT_ENDPOINT`: The endpoint URL for the Foundry project where the agent is deployed. -> - `AZURE_AI_MODEL_DEPLOYMENT_NAME`: The name of the model deployment in your Foundry project. This is configured during the agent initialization process with `azd ai agent init`. -> - `APPLICATIONINSIGHTS_CONNECTION_STRING`: The connection string for Application Insights to enable telemetry for your agent. - -This will package your agent and deploy it to the Foundry environment, making it accessible through the Foundry project endpoint. Once it's deployed, you can also access the agent through the Foundry UI. - -For the full deployment guide, see the [official deployment guide](https://learn.microsoft.com/en-us/azure/foundry/agents/how-to/deploy-hosted-agent). - -Once deployed, learn more about how to manage deployed agents in the [official management guide](https://learn.microsoft.com/en-us/azure/foundry/agents/how-to/manage-hosted-agent). \ No newline at end of file From 8940b45d5b7d8152c2c719263259a8f4342258a8 Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Mon, 27 Apr 2026 09:48:05 -0700 Subject: [PATCH 5/7] Address comments --- .../_responses.py | 15 +++- .../foundry_hosting/tests/test_responses.py | 74 +++++++++++++++++++ .../responses/01_basic/README.md | 2 +- .../responses/02_tools/README.md | 2 +- .../responses/03_mcp/README.md | 2 +- .../responses/04_foundry_toolbox/README.md | 2 +- .../responses/05_workflows/README.md | 2 +- 7 files changed, 91 insertions(+), 8 deletions(-) diff --git a/python/packages/foundry_hosting/agent_framework_foundry_hosting/_responses.py b/python/packages/foundry_hosting/agent_framework_foundry_hosting/_responses.py index ceaf8fc865..be04a1f397 100644 --- a/python/packages/foundry_hosting/agent_framework_foundry_hosting/_responses.py +++ b/python/packages/foundry_hosting/agent_framework_foundry_hosting/_responses.py @@ -1087,9 +1087,16 @@ def _convert_file_data(data_uri: str, filename: str | None = None) -> Content: header, encoded = data_uri.split(";base64,", 1) media_type = header[len("data:") :] if media_type.startswith("text/"): - decoded_text = base64.b64decode(encoded).decode("utf-8") - prefix = f"[File: {filename}]\n" if filename else "" - return Content.from_text(f"{prefix}{decoded_text}") + try: + decoded_text = base64.b64decode(encoded).decode("utf-8") + except (ValueError, UnicodeDecodeError): + logger.warning( + "Failed to decode text/* file_data as UTF-8, falling through to URI passthrough.", + exc_info=True, + ) + else: + prefix = f"[File: {filename}]\n" if filename else "" + return Content.from_text(f"{prefix}{decoded_text}") additional_properties = {"filename": filename} if filename else None return Content.from_uri(data_uri, additional_properties=additional_properties) @@ -1127,6 +1134,8 @@ def _convert_message_content(content: MessageContent) -> Content: if content.type == "input_image": image = cast(MessageContentInputImageContent, content) if image.image_url: + if image.image_url.startswith("data:"): + return Content.from_uri(image.image_url) return Content.from_uri(image.image_url, media_type="image/*") if image.file_id: return Content.from_hosted_file(image.file_id) diff --git a/python/packages/foundry_hosting/tests/test_responses.py b/python/packages/foundry_hosting/tests/test_responses.py index 60a3532ee2..e7c0599ad3 100644 --- a/python/packages/foundry_hosting/tests/test_responses.py +++ b/python/packages/foundry_hosting/tests/test_responses.py @@ -1548,6 +1548,80 @@ async def test_text_and_file_data_input_single_turn(self) -> None: assert messages[0].contents[1].type == "data" assert messages[0].contents[1].uri == "data:application/pdf;base64,JVBERi0xLjQ=" + async def test_text_mime_file_data_decoded(self) -> None: + """Agent receives a text/* file_data that is base64-decoded to plain text.""" + agent = _make_agent( + response=AgentResponse(messages=[Message(role="assistant", contents=[Content.from_text("Got it")])]) + ) + server = _make_server(agent) + + import base64 + + encoded = base64.b64encode(b"Hello, world!").decode() + + resp = await _post_json( + server, + { + "model": "test-model", + "input": [ + { + "type": "message", + "role": "user", + "content": [ + { + "type": "input_file", + "file_data": f"data:text/plain;base64,{encoded}", + "filename": "greeting.txt", + }, + ], + } + ], + "stream": False, + }, + ) + + assert resp.status_code == 200 + + messages = agent.run.call_args.kwargs["messages"] + assert len(messages) == 1 + assert messages[0].contents[0].type == "text" + assert messages[0].contents[0].text == "[File: greeting.txt]\nHello, world!" + + async def test_text_mime_file_data_invalid_base64_falls_through(self) -> None: + """Invalid base64 in a text/* file_data falls through to URI passthrough.""" + agent = _make_agent( + response=AgentResponse(messages=[Message(role="assistant", contents=[Content.from_text("Got it")])]) + ) + server = _make_server(agent) + + resp = await _post_json( + server, + { + "model": "test-model", + "input": [ + { + "type": "message", + "role": "user", + "content": [ + { + "type": "input_file", + "file_data": "data:text/plain;base64,!!!invalid!!!", + "filename": "bad.txt", + }, + ], + } + ], + "stream": False, + }, + ) + + assert resp.status_code == 200 + + messages = agent.run.call_args.kwargs["messages"] + assert len(messages) == 1 + assert messages[0].contents[0].type == "data" + assert messages[0].contents[0].uri == "data:text/plain;base64,!!!invalid!!!" + async def test_mixed_text_and_image_input(self) -> None: """Agent receives a single message with both text and image content.""" agent = _make_agent( diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/01_basic/README.md b/python/samples/04-hosting/foundry-hosted-agents/responses/01_basic/README.md index 7081a581e9..dc1883778f 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/responses/01_basic/README.md +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/01_basic/README.md @@ -18,7 +18,7 @@ The agent is hosted using the [Agent Framework](https://github.com/microsoft/age > Depending on how you run the agent host, you can invoke the agent using `curl` (`Invoke-WebRequest` in PowerShell) or `azd`. Please refer to the [parent README](../../README.md) for more details. Use this README for sample queries you can send to the agent. -Send a POST request to the server with a JSON body containing a "message" field to interact with the agent. For example: +Send a POST request to the server with a JSON body containing an `"input"` field to interact with the agent. For example: ```bash curl -X POST http://localhost:8088/responses -H "Content-Type: application/json" -d '{"input": "Hi"}' diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/02_tools/README.md b/python/samples/04-hosting/foundry-hosted-agents/responses/02_tools/README.md index e08eaf98f3..e82296c966 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/responses/02_tools/README.md +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/02_tools/README.md @@ -22,7 +22,7 @@ Follow the instructions in the [Running the Agent Host Locally](../../README.md# > Depending on how you run the agent host, you can invoke the agent using `curl` (`Invoke-WebRequest` in PowerShell) or `azd`. Please refer to the [parent README](../../README.md) for more details. Use this README for sample queries you can send to the agent. -Send a POST request to the server with a JSON body containing a "message" field to interact with the agent. For example: +Send a POST request to the server with a JSON body containing an `"input"` field to interact with the agent. For example: ```bash curl -X POST http://localhost:8088/responses -H "Content-Type: application/json" -d '{"input": "What is the weather in Seattle?"}' diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/03_mcp/README.md b/python/samples/04-hosting/foundry-hosted-agents/responses/03_mcp/README.md index ac43d4f1df..b8b2bc137d 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/responses/03_mcp/README.md +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/03_mcp/README.md @@ -22,7 +22,7 @@ Follow the instructions in the [Running the Agent Host Locally](../../README.md# > Depending on how you run the agent host, you can invoke the agent using `curl` (`Invoke-WebRequest` in PowerShell) or `azd`. Please refer to the [parent README](../../README.md) for more details. Use this README for sample queries you can send to the agent. -Send a POST request to the server with a JSON body containing a "message" field to interact with the agent. For example: +Send a POST request to the server with a JSON body containing an `"input"` field to interact with the agent. For example: ```bash curl -X POST http://localhost:8088/responses -H "Content-Type: application/json" -d '{"input": "List all the repositories I own on GitHub."}' diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/README.md b/python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/README.md index d3358cdc04..c7f8721d5c 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/README.md +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/04_foundry_toolbox/README.md @@ -32,7 +32,7 @@ Follow the instructions in the [Running the Agent Host Locally](../../README.md# > Depending on how you run the agent host, you can invoke the agent using `curl` (`Invoke-WebRequest` in PowerShell) or `azd`. Please refer to the [parent README](../../README.md) for more details. Use this README for sample queries you can send to the agent. -Send a POST request to the server with a JSON body containing a "message" field to interact with the agent. For example: +Send a POST request to the server with a JSON body containing an `"input"` field to interact with the agent. For example: ```bash curl -X POST http://localhost:8088/responses -H "Content-Type: application/json" -d '{"input": "What tools do you have?"}' diff --git a/python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/README.md b/python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/README.md index fe1228f5a1..608d1a564e 100644 --- a/python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/README.md +++ b/python/samples/04-hosting/foundry-hosted-agents/responses/05_workflows/README.md @@ -26,7 +26,7 @@ Follow the instructions in the [Running the Agent Host Locally](../../README.md# > Depending on how you run the agent host, you can invoke the agent using `curl` (`Invoke-WebRequest` in PowerShell) or `azd`. Please refer to the [parent README](../../README.md) for more details. Use this README for sample queries you can send to the agent. -Send a POST request to the server with a JSON body containing a "message" field to interact with the agent. For example: +Send a POST request to the server with a JSON body containing an `"input"` field to interact with the agent. For example: ```bash curl -X POST http://localhost:8088/responses -H "Content-Type: application/json" -d '{"input": "Create a slogan for a new electric SUV that is affordable and fun to drive."}' From 48edeb3f67d7199b8ab646c43b305f872562fcb5 Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Mon, 27 Apr 2026 20:51:15 -0700 Subject: [PATCH 6/7] Fix int tests --- .../tests/test_responses_int.py | 39 +++++++++++++------ 1 file changed, 28 insertions(+), 11 deletions(-) diff --git a/python/packages/foundry_hosting/tests/test_responses_int.py b/python/packages/foundry_hosting/tests/test_responses_int.py index 587478a234..4d6d16659d 100644 --- a/python/packages/foundry_hosting/tests/test_responses_int.py +++ b/python/packages/foundry_hosting/tests/test_responses_int.py @@ -147,9 +147,9 @@ async def test_simple_text_non_streaming(self, server: ResponsesHostServer) -> N assert resp.status_code == 200 body = resp.json() assert body["status"] == "completed" - # There should be at least one output item with text + # There should be exactly one output item with text output_messages = [o for o in body["output"] if o["type"] == "message"] - assert len(output_messages) >= 1 + assert len(output_messages) == 1 text_parts = [c for c in output_messages[0]["content"] if c["type"] == "output_text"] assert len(text_parts) >= 1 assert len(text_parts[0]["text"]) > 0 @@ -219,7 +219,9 @@ async def test_text_array_input(self, server: ResponsesHostServer) -> None: body = resp.json() assert body["status"] == "completed" # The response should mention Alice - output_text = body["output"][0]["content"][0]["text"] + output_messages = [o for o in body["output"] if o["type"] == "message"] + assert len(output_messages) == 1 + output_text = output_messages[0]["content"][0]["text"] assert "alice" in output_text.lower() @pytest.mark.flaky @@ -250,7 +252,9 @@ async def test_input_image_url(self, server: ResponsesHostServer) -> None: assert resp.status_code == 200 body = resp.json() assert body["status"] == "completed" - output_text = body["output"][0]["content"][0]["text"].lower() + output_messages = [o for o in body["output"] if o["type"] == "message"] + assert len(output_messages) == 1 + output_text = output_messages[0]["content"][0]["text"].lower() assert "cat" in output_text @pytest.mark.flaky @@ -283,7 +287,9 @@ async def test_input_image_file_data(self, server: ResponsesHostServer) -> None: assert resp.status_code == 200 body = resp.json() assert body["status"] == "completed" - output_text = body["output"][0]["content"][0]["text"].lower() + output_messages = [o for o in body["output"] if o["type"] == "message"] + assert len(output_messages) == 1 + output_text = output_messages[0]["content"][0]["text"].lower() assert "cat" in output_text @pytest.mark.flaky @@ -315,7 +321,9 @@ async def test_input_file_data(self, server: ResponsesHostServer) -> None: assert resp.status_code == 200 body = resp.json() assert body["status"] == "completed" - output_text = body["output"][0]["content"][0]["text"].lower() + output_messages = [o for o in body["output"] if o["type"] == "message"] + assert len(output_messages) == 1 + output_text = output_messages[0]["content"][0]["text"].lower() assert "paris" in output_text @pytest.mark.flaky @@ -348,7 +356,9 @@ async def test_input_pdf_file_data(self, server: ResponsesHostServer) -> None: assert resp.status_code == 200 body = resp.json() assert body["status"] == "completed" - output_text = body["output"][0]["content"][0]["text"] + output_messages = [o for o in body["output"] if o["type"] == "message"] + assert len(output_messages) == 1 + output_text = output_messages[0]["content"][0]["text"] assert "microsoft" in output_text.lower() @@ -392,7 +402,9 @@ async def test_two_turn_conversation(self, server: ResponsesHostServer) -> None: assert resp2.status_code == 200 body2 = resp2.json() assert body2["status"] == "completed" - output_text = body2["output"][0]["content"][0]["text"].lower() + output_messages = [o for o in body2["output"] if o["type"] == "message"] + assert len(output_messages) == 1 + output_text = output_messages[0]["content"][0]["text"].lower() assert "blue" in output_text @pytest.mark.flaky @@ -433,7 +445,10 @@ async def test_three_turn_conversation(self, server: ResponsesHostServer) -> Non }, ) assert resp3.status_code == 200 - output_text = resp3.json()["output"][0]["content"][0]["text"].lower() + body3 = resp3.json() + output_messages = [o for o in body3["output"] if o["type"] == "message"] + assert len(output_messages) == 1 + output_text = output_messages[0]["content"][0]["text"].lower() assert "max" in output_text assert "luna" in output_text @@ -503,7 +518,7 @@ async def test_tool_call_non_streaming(self, server_with_tools: ResponsesHostSer # The output should contain the final text referencing the weather output_messages = [o for o in body["output"] if o["type"] == "message"] - assert len(output_messages) >= 1 + assert len(output_messages) == 1 final_text = output_messages[0]["content"][0]["text"].lower() assert "72" in final_text or "sunny" in final_text or "seattle" in final_text @@ -562,5 +577,7 @@ async def test_temperature_and_max_tokens(self, server: ResponsesHostServer) -> assert resp.status_code == 200 body = resp.json() assert body["status"] == "completed" - output_text = body["output"][0]["content"][0]["text"] + output_messages = [o for o in body["output"] if o["type"] == "message"] + assert len(output_messages) == 1 + output_text = output_messages[0]["content"][0]["text"] assert len(output_text) > 0 From 20ae71251dd45f1f5818f704282ad1da06495260 Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Mon, 27 Apr 2026 20:54:01 -0700 Subject: [PATCH 7/7] remove temp --- python/packages/foundry_hosting/tests/test_responses_int.py | 1 - 1 file changed, 1 deletion(-) diff --git a/python/packages/foundry_hosting/tests/test_responses_int.py b/python/packages/foundry_hosting/tests/test_responses_int.py index 4d6d16659d..e64976989b 100644 --- a/python/packages/foundry_hosting/tests/test_responses_int.py +++ b/python/packages/foundry_hosting/tests/test_responses_int.py @@ -569,7 +569,6 @@ async def test_temperature_and_max_tokens(self, server: ResponsesHostServer) -> { "input": "Say hello briefly.", "stream": False, - "temperature": 0.0, "max_output_tokens": 50, }, )