From ab7779191cab79116e53ec0154bae90dc3d3b28b Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Thu, 13 Nov 2025 05:35:13 -0800 Subject: [PATCH 01/24] Rename `items_property` to `items_property` in emitted code --- .../azure/ai/projects/aio/operations/_operations.py | 4 ++-- .../azure/ai/projects/operations/_operations.py | 4 ++-- sdk/ai/azure-ai-projects/post-emitter-fixes.cmd | 4 ++++ 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py index d906e7dfc4cd..c0990aa1e261 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py @@ -2482,7 +2482,7 @@ async def search_memories( if scope is _Unset: raise TypeError("missing required argument: scope") body = { - "items_property": items, + "items": items, "options": options, "previous_search_id": previous_search_id, "scope": scope, @@ -2572,7 +2572,7 @@ async def _update_memories_initial( if scope is _Unset: raise TypeError("missing required argument: scope") body = { - "items_property": items, + "items": items, "previous_update_id": previous_update_id, "scope": scope, "update_delay": update_delay, diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py index eb3e57f67656..c33de6e23003 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py @@ -4095,7 +4095,7 @@ def search_memories( if scope is _Unset: raise TypeError("missing required argument: scope") body = { - "items_property": items, + "items": items, "options": options, "previous_search_id": previous_search_id, "scope": scope, @@ -4185,7 +4185,7 @@ def _update_memories_initial( if scope is _Unset: raise TypeError("missing required argument: scope") body = { - "items_property": items, + "items": items, "previous_update_id": previous_update_id, "scope": scope, "update_delay": update_delay, diff --git a/sdk/ai/azure-ai-projects/post-emitter-fixes.cmd b/sdk/ai/azure-ai-projects/post-emitter-fixes.cmd index 7bd1b2e00b09..5baa4bc23f33 100644 --- a/sdk/ai/azure-ai-projects/post-emitter-fixes.cmd +++ b/sdk/ai/azure-ai-projects/post-emitter-fixes.cmd @@ -16,6 +16,10 @@ REM Rename "A2_A_PREVIEW" to "A2A_PREVIEW". Since this value is an extension to powershell -Command "(Get-Content azure\ai\projects\models\_models.py) -replace 'A2_A_PREVIEW', 'A2A_PREVIEW' | Set-Content azure\ai\projects\models\_models.py" powershell -Command "(Get-Content azure\ai\projects\models\_enums.py) -replace 'A2_A_PREVIEW', 'A2A_PREVIEW' | Set-Content azure\ai\projects\models\_enums.py" +REM Rename `"items_property": items`, to `"items": items` in search_memories and begin_update_memories methods. "items" is specified in TypeSpec, but Python emitter does not allow it. +powershell -Command "(Get-Content azure\ai\projects\aio\operations\_operations.py) -replace '\"items_property\": items', '\"items\": items' | Set-Content azure\ai\projects\aio\operations\_operations.py" +powershell -Command "(Get-Content azure\ai\projects\operations\_operations.py) -replace '\"items_property\": items', '\"items\": items' | Set-Content azure\ai\projects\operations\_operations.py" + REM Add quotation marks around "str" in the expression: content: Union[str, list["_models.ItemContent"]] = rest_field( REM This fixes the serialization of this expression: item_param: ItemParam = ResponsesUserMessageItemParam(content="my text") powershell -Command "(Get-Content azure\ai\projects\models\_models.py) -replace 'Union\[str, list\[\"_models\.ItemContent\"\]\] = rest_field\(', 'Union[\"str\", list[\"_models.ItemContent\"]] = rest_field(' | Set-Content azure\ai\projects\models\_models.py" From 0422663c6219fe00b1e73347983d284bea91c713 Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Thu, 13 Nov 2025 06:46:05 -0800 Subject: [PATCH 02/24] Patch model and sync operations --- .../azure/ai/projects/models/_patch.py | 131 +++++++++- .../azure/ai/projects/operations/_patch.py | 3 + .../ai/projects/operations/_patch_memories.py | 234 ++++++++++++++++++ 3 files changed, 366 insertions(+), 2 deletions(-) create mode 100644 sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py index cbb449e5571e..54a536bdcc2a 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py @@ -6,9 +6,15 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import List, Dict +from typing import List, Dict, Optional, Any from ._models import CustomCredential as CustomCredentialGenerated - +from ..models import MemoryStoreUpdateCompletedResult, MemoryStoreUpdateResult +from azure.core.polling import LROPoller, PollingMethod +from azure.core.polling.base_polling import ( + LROBasePolling, + OperationFailed, + _raise_if_bad_http_status_and_method, +) class CustomCredential(CustomCredentialGenerated): """Custom credential definition. @@ -23,8 +29,129 @@ class CustomCredential(CustomCredentialGenerated): """The secret custom credential keys. Required.""" +_FINISHED = frozenset(["completed", "superseded"]) +_FAILED = frozenset(["failed"]) + +class UpdateMemoriesLROPoller(LROPoller[MemoryStoreUpdateCompletedResult]): + """Custom LROPoller for Memory Store update operations.""" + + _polling_method: "UpdateMemoriesLROPollingMethod" + + @property + def update_id(self) -> str: + """Returns the update ID associated with the long-running update memories operation. + + :return: Returns the update ID. + :rtype: str + """ + return self._polling_method._current_body.update_id + + @property + def superseded_by(self) -> Optional[str]: + """Returns the ID of the operation that superseded this update. + + :return: Returns the ID of the superseding operation, if it exists. + :rtype: Optional[str] + """ + # pylint: disable=protected-access + return self._polling_method._current_body.superseded_by if self._polling_method._current_body else None + + @classmethod + def from_continuation_token(cls, polling_method: PollingMethod[MemoryStoreUpdateCompletedResult], continuation_token: str, **kwargs: Any) -> "UpdateMemoriesLROPoller": + """Create a poller from a continuation token. + + :param polling_method: The polling strategy to adopt + :type polling_method: ~azure.core.polling.PollingMethod + :param continuation_token: An opaque continuation token + :type continuation_token: str + :return: An instance of UpdateMemoriesLROPoller + :rtype: UpdateMemoriesLROPoller + :raises ~azure.core.exceptions.HttpResponseError: If the continuation token is invalid. + """ + ( + client, + initial_response, + deserialization_callback, + ) = polling_method.from_continuation_token(continuation_token, **kwargs) + + return cls(client, initial_response, deserialization_callback, polling_method) + + +class UpdateMemoriesLROPollingMethod(LROBasePolling): + """A custom polling method implementation for Memory Store updates.""" + + @property + def _current_body(self) -> MemoryStoreUpdateResult: + try: + return MemoryStoreUpdateResult(self._pipeline_response.http_response.json()) + except Exception: # pylint: disable=broad-exception-caught + return MemoryStoreUpdateResult() # type: ignore[call-overload] + + def finished(self) -> bool: + """Is this polling finished? + + :return: True/False for whether polling is complete. + :rtype: bool + """ + return self._finished(self.status()) + + @staticmethod + def _finished(status) -> bool: + if hasattr(status, "value"): + status = status.value + return str(status).lower() in _FINISHED + + @staticmethod + def _failed(status) -> bool: + if hasattr(status, "value"): + status = status.value + return str(status).lower() in _FAILED + + def get_continuation_token(self) -> str: + return self._current_body.update_id + + # pylint: disable=arguments-differ + def from_continuation_token(self, continuation_token: str, **kwargs: Any) -> Tuple: # type: ignore[override] + try: + client = kwargs["client"] + except KeyError as exc: + raise ValueError("Need kwarg 'client' to be recreated from continuation_token") from exc + + try: + deserialization_callback = kwargs["deserialization_callback"] + except KeyError as exc: + raise ValueError("Need kwarg 'deserialization_callback' to be recreated from continuation_token") from exc + + return client, continuation_token, deserialization_callback + + def _poll(self) -> None: + """Poll status of operation so long as operation is incomplete and + we have an endpoint to query. + + :raises: OperationFailed if operation status 'Failed' or 'Canceled'. + :raises: BadStatus if response status invalid. + :raises: BadResponse if response invalid. + """ + + if not self.finished(): + self.update_status() + while not self.finished(): + self._delay() + self.update_status() + + if self._failed(self.status()): + raise OperationFailed("Operation failed or canceled") + + final_get_url = self._operation.get_final_get_url(self._pipeline_response) + if final_get_url: + self._pipeline_response = self.request_status(final_get_url) + _raise_if_bad_http_status_and_method(self._pipeline_response.http_response) + + __all__: List[str] = [ "CustomCredential", + "UpdateMemoriesLROPollingMethod", + "UpdateMemoriesLROPoller", ] # Add all objects you want publicly available to users at this package level diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch.py index 413798a3ecc1..e301000fe7b5 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch.py @@ -11,11 +11,14 @@ from ._patch_datasets import DatasetsOperations from ._patch_telemetry import TelemetryOperations from ._patch_connections import ConnectionsOperations +from ._patch_memories import UpdateMemoriesLROPoller, MemoryStoresOperations __all__: List[str] = [ "TelemetryOperations", "DatasetsOperations", "ConnectionsOperations", + "UpdateMemoriesLROPoller", + "MemoryStoresOperations", ] # Add all objects you want publicly available to users at this package level diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py new file mode 100644 index 000000000000..c77dc9552cdd --- /dev/null +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py @@ -0,0 +1,234 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import Union, Optional, Any, List, overload, IO, cast +from azure.core.tracing.decorator import distributed_trace +from azure.core.polling import NoPolling +from azure.core.utils import case_insensitive_dict +from .. import models as _models +from ..models import ( + ItemParam, + MemoryStoreOperationUsage, + MemoryStoreOperationUsageInputTokensDetails, + MemoryStoreOperationUsageOutputTokensDetails, + MemoryStoreUpdateCompletedResult, + MemoryStoreUpdateResult, + UpdateMemoriesLROPoller, + UpdateMemoriesLROPollingMethod, +) +from ._operations import (JSON, _Unset, ClsType, MemoryStoresOperations as GenerateMemoryStoresOperations) +from .._validation import api_version_validation +from .._utils.model_base import _deserialize + +class MemoryStoresOperations(GenerateMemoryStoresOperations): + + @overload + def begin_update_memories( + self, + name: str, + *, + scope: str, + content_type: str = "application/json", + items: Optional[List[_models.ItemParam]] = None, + previous_update_id: Optional[str] = None, + update_delay: Optional[int] = None, + **kwargs: Any + ) -> UpdateMemoriesLROPoller: + """Update memory store with conversation memories. + + :param name: The name of the memory store to update. Required. + :type name: str + :keyword scope: The namespace that logically groups and isolates memories, such as a user ID. + Required. + :paramtype scope: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword items: Conversation items from which to extract memories. Default value is None. + :paramtype items: list[~azure.ai.projects.models.ItemParam] + :keyword previous_update_id: The unique ID of the previous update request, enabling incremental + memory updates from where the last operation left off. Default value is None. + :paramtype previous_update_id: str + :keyword update_delay: Timeout period before processing the memory update in seconds. + If a new update request is received during this period, it will cancel the current request and + reset the timeout. + Set to 0 to immediately trigger the update without delay. + Defaults to 300 (5 minutes). Default value is None. + :paramtype update_delay: int + :return: An instance of UpdateMemoriesLROPoller that returns MemoryStoreUpdateCompletedResult. The + MemoryStoreUpdateCompletedResult is compatible with MutableMapping + :rtype: + ~azure.ai.projects.models.UpdateMemoriesLROPoller + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update_memories( + self, name: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> UpdateMemoriesLROPoller: + """Update memory store with conversation memories. + + :param name: The name of the memory store to update. Required. + :type name: str + :param body: Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of UpdateMemoriesLROPoller that returns MemoryStoreUpdateCompletedResult. The + MemoryStoreUpdateCompletedResult is compatible with MutableMapping + :rtype: + ~azure.ai.projects.models.UpdateMemoriesLROPoller + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update_memories( + self, name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> UpdateMemoriesLROPoller: + """Update memory store with conversation memories. + + :param name: The name of the memory store to update. Required. + :type name: str + :param body: Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of UpdateMemoriesLROPoller that returns MemoryStoreUpdateCompletedResult. The + MemoryStoreUpdateCompletedResult is compatible with MutableMapping + :rtype: + ~azure.ai.projects.models.UpdateMemoriesLROPoller + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + @api_version_validation( + method_added_on="2025-11-15-preview", + params_added_on={"2025-11-15-preview": ["api_version", "name", "content_type", "accept"]}, + api_versions_list=["2025-11-15-preview"], + ) + def begin_update_memories( + self, + name: str, + body: Union[JSON, IO[bytes]] = _Unset, + *, + scope: str = _Unset, + conversation_id: Optional[str] = None, + items: Optional[List[ItemParam]] = None, + previous_update_id: Optional[str] = None, + update_delay: Optional[int] = None, + **kwargs: Any, + ) -> UpdateMemoriesLROPoller: + """Update memory store with conversation memories. + + :param name: The name of the memory store to update. Required. + :type name: str + :param body: Is either a JSON type or a IO[bytes] type. Required. + :type body: JSON or IO[bytes] + :keyword scope: The namespace that logically groups and isolates memories, such as a user ID. + Required. + :paramtype scope: str + :keyword conversation_id: The conversation ID from which to extract memories. Only one of + conversation_id or items should be provided. Default value is None. + :paramtype conversation_id: str + :keyword items: Conversation items from which to extract memories. Only one of conversation_id + or items should be provided. Default value is None. + :paramtype items: list[~azure.ai.projects.models.ItemParam] + :keyword previous_update_id: The unique ID of the previous update request, enabling incremental + memory updates from where the last operation left off. Cannot be used together with + conversation_id. Default value is None. + :paramtype previous_update_id: str + :keyword update_delay: Timeout period before processing the memory update in seconds. + If a new update request is received during this period, it will cancel the current request and + reset the timeout. + Set to 0 to immediately trigger the update without delay. + Defaults to 300 (5 minutes). Default value is None. + :paramtype update_delay: int + :return: An instance of UpdateMemoriesLROPoller that returns MemoryStoreUpdateCompletedResult. The + MemoryStoreUpdateCompletedResult is compatible with MutableMapping + :rtype: + ~azure.ai.projects.models.UpdateMemoriesLROPoller + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[MemoryStoreUpdateResult] = kwargs.pop("cls", None) + polling: Union[bool, UpdateMemoriesLROPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_memories_initial( + name=name, + body=body, + scope=scope, + conversation_id=conversation_id, + items=items, + previous_update_id=previous_update_id, + update_delay=update_delay, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs, + ) + raw_result.http_response.read() # type: ignore + + raw_result.http_response.status_code = 202 + raw_result.http_response.headers["Operation-Location"] = ( + f"{self._config.endpoint}/memory_stores/{name}/updates/{raw_result.http_response.json().get('update_id')}?api-version=2025-11-15-preview" + ) + + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response_headers = {} + response = pipeline_response.http_response + response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) + + deserialized = _deserialize(MemoryStoreUpdateCompletedResult, response.json().get("result", None)) + if deserialized is None: + usage = MemoryStoreOperationUsage( + embedding_tokens=0, + input_tokens=0, + input_tokens_details=MemoryStoreOperationUsageInputTokensDetails(cached_tokens=0), + output_tokens=0, + output_tokens_details=MemoryStoreOperationUsageOutputTokensDetails(reasoning_tokens=0), + total_tokens=0, + ) + deserialized = MemoryStoreUpdateCompletedResult(memory_operations=[], usage=usage) + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + if polling is True: + polling_method: UpdateMemoriesLROPollingMethod = UpdateMemoriesLROPollingMethod(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: + polling_method = cast(UpdateMemoriesLROPollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return UpdateMemoriesLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return UpdateMemoriesLROPoller( + self._client, + raw_result, + get_long_running_output, + polling_method, # pylint: disable=possibly-used-before-assignment + ) From f51db465b7ab9e98d42bccd3d5bd377839c65232 Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Thu, 13 Nov 2025 07:00:48 -0800 Subject: [PATCH 03/24] Fix sync operations patch --- .../azure-ai-projects/azure/ai/projects/operations/_patch.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch.py index e301000fe7b5..105063e2d4ee 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch.py @@ -11,13 +11,12 @@ from ._patch_datasets import DatasetsOperations from ._patch_telemetry import TelemetryOperations from ._patch_connections import ConnectionsOperations -from ._patch_memories import UpdateMemoriesLROPoller, MemoryStoresOperations +from ._patch_memories import MemoryStoresOperations __all__: List[str] = [ "TelemetryOperations", "DatasetsOperations", "ConnectionsOperations", - "UpdateMemoriesLROPoller", "MemoryStoresOperations", ] # Add all objects you want publicly available to users at this package level From 9d5efa64dda95fdb7425a27cbfe5b8ae963a3066 Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Thu, 13 Nov 2025 10:12:37 -0800 Subject: [PATCH 04/24] Add async operations and classes --- .../ai/projects/aio/operations/_patch.py | 2 + .../aio/operations/_patch_memories.py | 230 ++++++++++++++++++ .../azure/ai/projects/models/_patch.py | 206 +++++++++++++--- .../ai/projects/operations/_patch_memories.py | 19 +- 4 files changed, 410 insertions(+), 47 deletions(-) create mode 100644 sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_memories.py diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py index 1aef0cc952c0..e226dd089486 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py @@ -11,11 +11,13 @@ from ._patch_datasets_async import DatasetsOperations from ._patch_telemetry_async import TelemetryOperations from ._patch_connections_async import ConnectionsOperations +from ._patch_memories import MemoryStoresOperations __all__: List[str] = [ "TelemetryOperations", "DatasetsOperations", "ConnectionsOperations", + "MemoryStoresOperations", ] # Add all objects you want publicly available to users at this package level diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_memories.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_memories.py new file mode 100644 index 000000000000..694e49cf1f44 --- /dev/null +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_memories.py @@ -0,0 +1,230 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import Union, Optional, Any, List, overload, IO, cast +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.polling import AsyncPollingMethod, AsyncNoPolling +from azure.core.utils import case_insensitive_dict +from ... import models as _models +from ...models import ( + MemoryStoreOperationUsage, + MemoryStoreOperationUsageInputTokensDetails, + MemoryStoreOperationUsageOutputTokensDetails, + MemoryStoreUpdateCompletedResult, + AsyncUpdateMemoriesLROPoller, + AsyncUpdateMemoriesLROPollingMethod, +) +from ._operations import JSON, _Unset, ClsType, MemoryStoresOperations as GenerateMemoryStoresOperations +from .._validation import api_version_validation +from .._utils.model_base import _deserialize + + +class MemoryStoresOperations(GenerateMemoryStoresOperations): + + @overload + async def begin_update_memories( + self, + name: str, + *, + scope: str, + content_type: str = "application/json", + items: Optional[List[_models.ItemParam]] = None, + previous_update_id: Optional[str] = None, + update_delay: Optional[int] = None, + **kwargs: Any, + ) -> AsyncUpdateMemoriesLROPoller: + """Update memory store with conversation memories. + + :param name: The name of the memory store to update. Required. + :type name: str + :keyword scope: The namespace that logically groups and isolates memories, such as a user ID. + Required. + :paramtype scope: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword items: Conversation items from which to extract memories. Default value is None. + :paramtype items: list[~azure.ai.projects.models.ItemParam] + :keyword previous_update_id: The unique ID of the previous update request, enabling incremental + memory updates from where the last operation left off. Default value is None. + :paramtype previous_update_id: str + :keyword update_delay: Timeout period before processing the memory update in seconds. + If a new update request is received during this period, it will cancel the current request and + reset the timeout. + Set to 0 to immediately trigger the update without delay. + Defaults to 300 (5 minutes). Default value is None. + :paramtype update_delay: int + :return: An instance of AsyncUpdateMemoriesLROPoller that returns MemoryStoreUpdateCompletedResult. The + MemoryStoreUpdateCompletedResult is compatible with MutableMapping + :rtype: + ~azure.ai.projects.models.AsyncUpdateMemoriesLROPoller + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update_memories( + self, name: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> AsyncUpdateMemoriesLROPoller: + """Update memory store with conversation memories. + + :param name: The name of the memory store to update. Required. + :type name: str + :param body: Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncUpdateMemoriesLROPoller that returns MemoryStoreUpdateCompletedResult. The + MemoryStoreUpdateCompletedResult is compatible with MutableMapping + :rtype: + ~azure.ai.projects.models.AsyncUpdateMemoriesLROPoller + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update_memories( + self, name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> AsyncUpdateMemoriesLROPoller: + """Update memory store with conversation memories. + + :param name: The name of the memory store to update. Required. + :type name: str + :param body: Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncUpdateMemoriesLROPoller that returns MemoryStoreUpdateCompletedResult. The + MemoryStoreUpdateCompletedResult is compatible with MutableMapping + :rtype: + ~azure.ai.projects.models.AsyncUpdateMemoriesLROPoller + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-11-15-preview", + params_added_on={"2025-11-15-preview": ["api_version", "name", "content_type", "accept"]}, + api_versions_list=["2025-11-15-preview"], + ) + async def begin_update_memories( + self, + name: str, + body: Union[JSON, IO[bytes]] = _Unset, + *, + scope: str = _Unset, + items: Optional[List[_models.ItemParam]] = None, + previous_update_id: Optional[str] = None, + update_delay: Optional[int] = None, + **kwargs: Any, + ) -> AsyncUpdateMemoriesLROPoller: + """Update memory store with conversation memories. + + :param name: The name of the memory store to update. Required. + :type name: str + :param body: Is either a JSON type or a IO[bytes] type. Required. + :type body: JSON or IO[bytes] + :keyword scope: The namespace that logically groups and isolates memories, such as a user ID. + Required. + :paramtype scope: str + :keyword items: Conversation items from which to extract memories. Default value is None. + :paramtype items: list[~azure.ai.projects.models.ItemParam] + :keyword previous_update_id: The unique ID of the previous update request, enabling incremental + memory updates from where the last operation left off. Default value is None. + :paramtype previous_update_id: str + :keyword update_delay: Timeout period before processing the memory update in seconds. + If a new update request is received during this period, it will cancel the current request and + reset the timeout. + Set to 0 to immediately trigger the update without delay. + Defaults to 300 (5 minutes). Default value is None. + :paramtype update_delay: int + :return: An instance of AsyncLROPoller that returns MemoryStoreUpdateCompletedResult. The + MemoryStoreUpdateCompletedResult is compatible with MutableMapping + :rtype: + ~azure.ai.projects.models.AsyncUpdateMemoriesLROPoller + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.MemoryStoreUpdateCompletedResult] = kwargs.pop("cls", None) + polling: Union[bool, AsyncUpdateMemoriesLROPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_memories_initial( + name=name, + body=body, + scope=scope, + items=items, + previous_update_id=previous_update_id, + update_delay=update_delay, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs, + ) + await raw_result.http_response.read() # type: ignore + + raw_result.http_response.status_code = 202 + raw_result.http_response.headers["Operation-Location"] = ( + f"{self._config.endpoint}/memory_stores/{name}/updates/{raw_result.http_response.json().get('update_id')}?api-version=2025-11-15-preview" + ) + + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response_headers = {} + response = pipeline_response.http_response + response_headers["Operation-Location"] = self._deserialize( + "str", response.headers.get("Operation-Location") + ) + + deserialized = _deserialize(MemoryStoreUpdateCompletedResult, response.json().get("result", None)) + if deserialized is None: + usage = MemoryStoreOperationUsage( + embedding_tokens=0, + input_tokens=0, + input_tokens_details=MemoryStoreOperationUsageInputTokensDetails(cached_tokens=0), + output_tokens=0, + output_tokens_details=MemoryStoreOperationUsageOutputTokensDetails(reasoning_tokens=0), + total_tokens=0, + ) + deserialized = MemoryStoreUpdateCompletedResult(memory_operations=[], usage=usage) + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncUpdateMemoriesLROPollingMethod = AsyncUpdateMemoriesLROPollingMethod( + lro_delay, path_format_arguments=path_format_arguments, **kwargs + ) + elif polling is False: + polling_method = cast(AsyncUpdateMemoriesLROPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncUpdateMemoriesLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncUpdateMemoriesLROPoller( + self._client, + raw_result, + get_long_running_output, + polling_method, # pylint: disable=possibly-used-before-assignment + ) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py index 54a536bdcc2a..58589c6e0e2b 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py @@ -9,12 +9,14 @@ from typing import List, Dict, Optional, Any from ._models import CustomCredential as CustomCredentialGenerated from ..models import MemoryStoreUpdateCompletedResult, MemoryStoreUpdateResult -from azure.core.polling import LROPoller, PollingMethod +from azure.core.polling import LROPoller, AsyncLROPoller, PollingMethod, AsyncPollingMethod from azure.core.polling.base_polling import ( LROBasePolling, OperationFailed, _raise_if_bad_http_status_and_method, ) +from azure.core.polling.async_base_polling import AsyncLROBasePolling + class CustomCredential(CustomCredentialGenerated): """Custom credential definition. @@ -32,52 +34,79 @@ class CustomCredential(CustomCredentialGenerated): _FINISHED = frozenset(["completed", "superseded"]) _FAILED = frozenset(["failed"]) -class UpdateMemoriesLROPoller(LROPoller[MemoryStoreUpdateCompletedResult]): - """Custom LROPoller for Memory Store update operations.""" - _polling_method: "UpdateMemoriesLROPollingMethod" +class UpdateMemoriesLROPollingMethod(LROBasePolling): + """A custom polling method implementation for Memory Store updates.""" @property - def update_id(self) -> str: - """Returns the update ID associated with the long-running update memories operation. + def _current_body(self) -> MemoryStoreUpdateResult: + try: + return MemoryStoreUpdateResult(self._pipeline_response.http_response.json()) + except Exception: # pylint: disable=broad-exception-caught + return MemoryStoreUpdateResult() # type: ignore[call-overload] - :return: Returns the update ID. - :rtype: str + def finished(self) -> bool: + """Is this polling finished? + + :return: True/False for whether polling is complete. + :rtype: bool """ - return self._polling_method._current_body.update_id + return self._finished(self.status()) - @property - def superseded_by(self) -> Optional[str]: - """Returns the ID of the operation that superseded this update. + @staticmethod + def _finished(status) -> bool: + if hasattr(status, "value"): + status = status.value + return str(status).lower() in _FINISHED - :return: Returns the ID of the superseding operation, if it exists. - :rtype: Optional[str] - """ - # pylint: disable=protected-access - return self._polling_method._current_body.superseded_by if self._polling_method._current_body else None + @staticmethod + def _failed(status) -> bool: + if hasattr(status, "value"): + status = status.value + return str(status).lower() in _FAILED - @classmethod - def from_continuation_token(cls, polling_method: PollingMethod[MemoryStoreUpdateCompletedResult], continuation_token: str, **kwargs: Any) -> "UpdateMemoriesLROPoller": - """Create a poller from a continuation token. + def get_continuation_token(self) -> str: + return self._current_body.update_id - :param polling_method: The polling strategy to adopt - :type polling_method: ~azure.core.polling.PollingMethod - :param continuation_token: An opaque continuation token - :type continuation_token: str - :return: An instance of UpdateMemoriesLROPoller - :rtype: UpdateMemoriesLROPoller - :raises ~azure.core.exceptions.HttpResponseError: If the continuation token is invalid. + # pylint: disable=arguments-differ + def from_continuation_token(self, continuation_token: str, **kwargs: Any) -> Tuple: # type: ignore[override] + try: + client = kwargs["client"] + except KeyError as exc: + raise ValueError("Need kwarg 'client' to be recreated from continuation_token") from exc + + try: + deserialization_callback = kwargs["deserialization_callback"] + except KeyError as exc: + raise ValueError("Need kwarg 'deserialization_callback' to be recreated from continuation_token") from exc + + return client, continuation_token, deserialization_callback + + def _poll(self) -> None: + """Poll status of operation so long as operation is incomplete and + we have an endpoint to query. + + :raises: OperationFailed if operation status 'Failed' or 'Canceled'. + :raises: BadStatus if response status invalid. + :raises: BadResponse if response invalid. """ - ( - client, - initial_response, - deserialization_callback, - ) = polling_method.from_continuation_token(continuation_token, **kwargs) - return cls(client, initial_response, deserialization_callback, polling_method) + if not self.finished(): + self.update_status() + while not self.finished(): + self._delay() + self.update_status() + if self._failed(self.status()): + raise OperationFailed("Operation failed or canceled") -class UpdateMemoriesLROPollingMethod(LROBasePolling): + final_get_url = self._operation.get_final_get_url(self._pipeline_response) + if final_get_url: + self._pipeline_response = self.request_status(final_get_url) + _raise_if_bad_http_status_and_method(self._pipeline_response.http_response) + + +class AsyncUpdateMemoriesLROPollingMethod(AsyncLROBasePolling): """A custom polling method implementation for Memory Store updates.""" @property @@ -124,7 +153,7 @@ def from_continuation_token(self, continuation_token: str, **kwargs: Any) -> Tup return client, continuation_token, deserialization_callback - def _poll(self) -> None: + async def _poll(self) -> None: """Poll status of operation so long as operation is incomplete and we have an endpoint to query. @@ -134,24 +163,123 @@ def _poll(self) -> None: """ if not self.finished(): - self.update_status() + await self.update_status() while not self.finished(): - self._delay() - self.update_status() + await self._delay() + await self.update_status() if self._failed(self.status()): raise OperationFailed("Operation failed or canceled") final_get_url = self._operation.get_final_get_url(self._pipeline_response) if final_get_url: - self._pipeline_response = self.request_status(final_get_url) + self._pipeline_response = await self.request_status(final_get_url) _raise_if_bad_http_status_and_method(self._pipeline_response.http_response) +class UpdateMemoriesLROPoller(LROPoller[MemoryStoreUpdateCompletedResult]): + """Custom LROPoller for Memory Store update operations.""" + + _polling_method: "UpdateMemoriesLROPollingMethod" + + @property + def update_id(self) -> str: + """Returns the update ID associated with the long-running update memories operation. + + :return: Returns the update ID. + :rtype: str + """ + return self._polling_method._current_body.update_id + + @property + def superseded_by(self) -> Optional[str]: + """Returns the ID of the operation that superseded this update. + + :return: Returns the ID of the superseding operation, if it exists. + :rtype: Optional[str] + """ + # pylint: disable=protected-access + return self._polling_method._current_body.superseded_by if self._polling_method._current_body else None + + @classmethod + def from_continuation_token( + cls, polling_method: PollingMethod[MemoryStoreUpdateCompletedResult], continuation_token: str, **kwargs: Any + ) -> "UpdateMemoriesLROPoller": + """Create a poller from a continuation token. + + :param polling_method: The polling strategy to adopt + :type polling_method: ~azure.core.polling.PollingMethod + :param continuation_token: An opaque continuation token + :type continuation_token: str + :return: An instance of UpdateMemoriesLROPoller + :rtype: UpdateMemoriesLROPoller + :raises ~azure.core.exceptions.HttpResponseError: If the continuation token is invalid. + """ + ( + client, + initial_response, + deserialization_callback, + ) = polling_method.from_continuation_token(continuation_token, **kwargs) + + return cls(client, initial_response, deserialization_callback, polling_method) + + +class AsyncUpdateMemoriesLROPoller(AsyncLROPoller[MemoryStoreUpdateCompletedResult]): + """Custom AsyncLROPoller for Memory Store update operations.""" + + _polling_method: "UpdateMemoriesLROPollingMethod" + + @property + def update_id(self) -> str: + """Returns the update ID associated with the long-running update memories operation. + + :return: Returns the update ID. + :rtype: str + """ + return self._polling_method._current_body.update_id + + @property + def superseded_by(self) -> Optional[str]: + """Returns the ID of the operation that superseded this update. + + :return: Returns the ID of the superseding operation, if it exists. + :rtype: Optional[str] + """ + # pylint: disable=protected-access + return self._polling_method._current_body.superseded_by if self._polling_method._current_body else None + + @classmethod + def from_continuation_token( + cls, + polling_method: AsyncPollingMethod[MemoryStoreUpdateCompletedResult], + continuation_token: str, + **kwargs: Any + ) -> "AsyncUpdateMemoriesLROPoller": + """Create a poller from a continuation token. + + :param polling_method: The polling strategy to adopt + :type polling_method: ~azure.core.polling.PollingMethod + :param continuation_token: An opaque continuation token + :type continuation_token: str + :return: An instance of AsyncUpdateMemoriesLROPoller + :rtype: AsyncUpdateMemoriesLROPoller + :raises ~azure.core.exceptions.HttpResponseError: If the continuation token is invalid. + """ + ( + client, + initial_response, + deserialization_callback, + ) = polling_method.from_continuation_token(continuation_token, **kwargs) + + return cls(client, initial_response, deserialization_callback, polling_method) + + __all__: List[str] = [ "CustomCredential", "UpdateMemoriesLROPollingMethod", + "AsyncUpdateMemoriesLROPollingMethod", "UpdateMemoriesLROPoller", + "AsyncUpdateMemoriesLROPoller", ] # Add all objects you want publicly available to users at this package level diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py index c77dc9552cdd..09b870276b06 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py @@ -13,19 +13,18 @@ from azure.core.utils import case_insensitive_dict from .. import models as _models from ..models import ( - ItemParam, MemoryStoreOperationUsage, MemoryStoreOperationUsageInputTokensDetails, MemoryStoreOperationUsageOutputTokensDetails, MemoryStoreUpdateCompletedResult, - MemoryStoreUpdateResult, UpdateMemoriesLROPoller, UpdateMemoriesLROPollingMethod, ) -from ._operations import (JSON, _Unset, ClsType, MemoryStoresOperations as GenerateMemoryStoresOperations) +from ._operations import JSON, _Unset, ClsType, MemoryStoresOperations as GenerateMemoryStoresOperations from .._validation import api_version_validation from .._utils.model_base import _deserialize + class MemoryStoresOperations(GenerateMemoryStoresOperations): @overload @@ -38,7 +37,7 @@ def begin_update_memories( items: Optional[List[_models.ItemParam]] = None, previous_update_id: Optional[str] = None, update_delay: Optional[int] = None, - **kwargs: Any + **kwargs: Any, ) -> UpdateMemoriesLROPoller: """Update memory store with conversation memories. @@ -121,7 +120,7 @@ def begin_update_memories( *, scope: str = _Unset, conversation_id: Optional[str] = None, - items: Optional[List[ItemParam]] = None, + items: Optional[List[_models.ItemParam]] = None, previous_update_id: Optional[str] = None, update_delay: Optional[int] = None, **kwargs: Any, @@ -161,7 +160,7 @@ def begin_update_memories( _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[MemoryStoreUpdateResult] = kwargs.pop("cls", None) + cls: ClsType[MemoryStoreUpdateCompletedResult] = kwargs.pop("cls", None) polling: Union[bool, UpdateMemoriesLROPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) @@ -192,7 +191,9 @@ def begin_update_memories( def get_long_running_output(pipeline_response): response_headers = {} response = pipeline_response.http_response - response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) + response_headers["Operation-Location"] = self._deserialize( + "str", response.headers.get("Operation-Location") + ) deserialized = _deserialize(MemoryStoreUpdateCompletedResult, response.json().get("result", None)) if deserialized is None: @@ -214,7 +215,9 @@ def get_long_running_output(pipeline_response): } if polling is True: - polling_method: UpdateMemoriesLROPollingMethod = UpdateMemoriesLROPollingMethod(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + polling_method: UpdateMemoriesLROPollingMethod = UpdateMemoriesLROPollingMethod( + lro_delay, path_format_arguments=path_format_arguments, **kwargs + ) elif polling is False: polling_method = cast(UpdateMemoriesLROPollingMethod, NoPolling()) else: From 119b0c9ca5fd4d5dc6b03ebb29fcc0259fa43174 Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Thu, 13 Nov 2025 10:41:01 -0800 Subject: [PATCH 05/24] Rename file --- .../azure/ai/projects/aio/operations/_patch.py | 2 +- .../{_patch_memories.py => _patch_memories_async.py} | 6 +++--- .../azure/ai/projects/operations/_patch_memories.py | 6 +++--- 3 files changed, 7 insertions(+), 7 deletions(-) rename sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/{_patch_memories.py => _patch_memories_async.py} (98%) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py index e226dd089486..6423852990a5 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py @@ -11,7 +11,7 @@ from ._patch_datasets_async import DatasetsOperations from ._patch_telemetry_async import TelemetryOperations from ._patch_connections_async import ConnectionsOperations -from ._patch_memories import MemoryStoresOperations +from ._patch_memories_async import MemoryStoresOperations __all__: List[str] = [ "TelemetryOperations", diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_memories.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_memories_async.py similarity index 98% rename from sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_memories.py rename to sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_memories_async.py index 694e49cf1f44..fd37a869ca14 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_memories.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_memories_async.py @@ -174,9 +174,9 @@ async def begin_update_memories( ) await raw_result.http_response.read() # type: ignore - raw_result.http_response.status_code = 202 - raw_result.http_response.headers["Operation-Location"] = ( - f"{self._config.endpoint}/memory_stores/{name}/updates/{raw_result.http_response.json().get('update_id')}?api-version=2025-11-15-preview" + raw_result.http_response.status_code = 202 # type: ignore + raw_result.http_response.headers["Operation-Location"] = ( # type: ignore + f"{self._config.endpoint}/memory_stores/{name}/updates/{raw_result.http_response.json().get('update_id')}?api-version=2025-11-15-preview" # type: ignore ) kwargs.pop("error_map", None) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py index 09b870276b06..73a38925afb2 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py @@ -181,9 +181,9 @@ def begin_update_memories( ) raw_result.http_response.read() # type: ignore - raw_result.http_response.status_code = 202 - raw_result.http_response.headers["Operation-Location"] = ( - f"{self._config.endpoint}/memory_stores/{name}/updates/{raw_result.http_response.json().get('update_id')}?api-version=2025-11-15-preview" + raw_result.http_response.status_code = 202 # type: ignore + raw_result.http_response.headers["Operation-Location"] = ( # type: ignore + f"{self._config.endpoint}/memory_stores/{name}/updates/{raw_result.http_response.json().get('update_id')}?api-version=2025-11-15-preview" # type: ignore ) kwargs.pop("error_map", None) From 302aea09cd7bfaa0f043cc804e045c96132f142d Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Thu, 13 Nov 2025 11:15:50 -0800 Subject: [PATCH 06/24] Fix missing import of Tuple --- sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py index 58589c6e0e2b..0f0d2b62e7be 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py @@ -6,7 +6,7 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import List, Dict, Optional, Any +from typing import List, Dict, Optional, Any, Tuple from ._models import CustomCredential as CustomCredentialGenerated from ..models import MemoryStoreUpdateCompletedResult, MemoryStoreUpdateResult from azure.core.polling import LROPoller, AsyncLROPoller, PollingMethod, AsyncPollingMethod From e430b887da8c1f9f95482a6a0e80da73385c3abf Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Thu, 13 Nov 2025 11:16:55 -0800 Subject: [PATCH 07/24] Add env variables for Memory Store samples --- sdk/ai/azure-ai-projects/.env.template | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/sdk/ai/azure-ai-projects/.env.template b/sdk/ai/azure-ai-projects/.env.template index a9f2e66be665..b744f0cef35d 100644 --- a/sdk/ai/azure-ai-projects/.env.template +++ b/sdk/ai/azure-ai-projects/.env.template @@ -21,6 +21,11 @@ AGENT_NAME= CONVERSATION_ID= CONNECTION_NAME= +# Used in Memory Store samples +AZURE_AI_AGENT_MODEL_DEPLOYMENT_NAME= +AZURE_AI_CHAT_MODEL_DEPLOYMENT_NAME= +AZURE_AI_EMBEDDING_MODEL_DEPLOYMENT_NAME= + ####################################################################### # # Used tests, excluding Agent tests From 32f2f01aa6148a54d11d4d6debf70909f40f47f4 Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Thu, 13 Nov 2025 12:02:05 -0800 Subject: [PATCH 08/24] Updates to sample_agent_memory.py --- .../agents/memory/sample_agent_memory.py | 205 ++++++++------- .../agents/memory/sample_memory_advanced.py | 241 +++++++++--------- .../agents/memory/sample_memory_basic.py | 153 ++++++----- .../agents/memory/sample_memory_crud.py | 73 +++--- 4 files changed, 362 insertions(+), 310 deletions(-) diff --git a/sdk/ai/azure-ai-projects/samples/agents/memory/sample_agent_memory.py b/sdk/ai/azure-ai-projects/samples/agents/memory/sample_agent_memory.py index 5d1f3018ac0b..7e97401d0b14 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/memory/sample_agent_memory.py +++ b/sdk/ai/azure-ai-projects/samples/agents/memory/sample_agent_memory.py @@ -21,99 +21,116 @@ Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview page of your Microsoft Foundry portal. - 2) AZURE_AI_AGENT_MODEL_DEPLOYMENT_NAME - The deployment name of the chat model for the agent, as found under the "Name" column in - the "Models + endpoints" tab in your Microsoft Foundry project. - 3) AZURE_AI_CHAT_MODEL_DEPLOYMENT_NAME - The deployment name of the chat model for memory, as found under the "Name" column in - the "Models + endpoints" tab in your Microsoft Foundry project. - 4) AZURE_AI_EMBEDDING_MODEL_DEPLOYMENT_NAME - The deployment name of the embedding model for memory, as found under the - "Name" column in the "Models + endpoints" tab in your Microsoft Foundry project. + 2) AZURE_AI_AGENT_MODEL_DEPLOYMENT_NAME - The deployment name of the chat model for the agent, + as found under the "Name" column in the "Models + endpoints" tab in your Microsoft Foundry project. + 3) AZURE_AI_CHAT_MODEL_DEPLOYMENT_NAME - The deployment name of the chat model for memory, + as found under the "Name" column in the "Models + endpoints" tab in your Microsoft Foundry project. + 4) AZURE_AI_EMBEDDING_MODEL_DEPLOYMENT_NAME - The deployment name of the embedding model for memory, + as found under the "Name" column in the "Models + endpoints" tab in your Microsoft Foundry project. """ -# import os -# from dotenv import load_dotenv -# from azure.identity import DefaultAzureCredential -# from azure.ai.projects import AIProjectClient -# from azure.ai.projects.models import ( -# MemoryStoreDefaultDefinition, -# MemoryStoreDefaultOptions, -# MemorySearchOptions, -# ResponsesUserMessageItemParam, -# MemorySearchTool, -# PromptAgentDefinition, -# ) - -# load_dotenv() - -# project_client = AIProjectClient(endpoint=os.environ["AZURE_AI_PROJECT_ENDPOINT"], credential=DefaultAzureCredential()) - -# with project_client: - -# openai_client = project_client.get_openai_client() - -# # Create a memory store -# definition = MemoryStoreDefaultDefinition( -# chat_model=os.environ["AZURE_AI_CHAT_MODEL_DEPLOYMENT_NAME"], -# embedding_model=os.environ["AZURE_AI_EMBEDDING_MODEL_DEPLOYMENT_NAME"], -# ) -# memory_store = project_client.memory_stores.create( -# name="my_memory_store", -# description="Example memory store for conversations", -# definition=definition, -# ) -# print(f"Created memory store: {memory_store.name} ({memory_store.id}): {memory_store.description}") - -# # Create a prompt agent with memory search tool -# agent = project_client.agents.create_version( -# agent_name="MyAgent", -# definition=PromptAgentDefinition( -# model=os.environ["AZURE_AI_AGENT_MODEL_DEPLOYMENT_NAME"], -# instructions="You are a helpful assistant that answers general questions", -# ), -# tools=[ -# MemorySearchTool( -# memory_store_name=memory_store.name, -# scope="{{$userId}}", -# update_delay=10, # Wait 5 seconds of inactivity before updating memories -# # In a real application, set this to a higher value like 300 (5 minutes, default) -# ) -# ], -# ) -# print(f"Agent created (id: {agent.id}, name: {agent.name}, version: {agent.version})") - -# # Create a conversation with the agent with memory tool enabled -# conversation = openai_client.conversations.create() -# print(f"Created conversation (id: {conversation.id})") - -# # Create an agent response to initial user message -# response = openai_client.responses.create( -# conversation=conversation.id, -# extra_body={"agent": AgentReference(name=agent.name).as_dict()}, -# input=[ResponsesUserMessageItemParam(content="I prefer dark roast coffee")], -# ) -# print(f"Response output: {response.output_text}") - -# # After an inactivity in the conversation, memories will be extracted from the conversation and stored -# sleep(60) - -# # Create a new conversation -# new_conversation = openai_client.conversations.create() -# print(f"Created new conversation (id: {new_conversation.id})") - -# # Create an agent response with stored memories -# new_response = openai_client.responses.create( -# conversation=new_conversation.id, -# extra_body={"agent": AgentReference(name=agent.name).as_dict()}, -# input=[ResponsesUserMessageItemParam(content="Please order my usual coffee")], -# ) -# print(f"Response output: {new_response.output_text}") - -# # Clean up -# openai_client.conversations.delete(conversation.id) -# openai_client.conversations.delete(new_conversation.id) -# print("Conversations deleted") - -# project_client.agents.delete_version(agent_name=agent.name, agent_version=agent.version) -# print("Agent deleted") - -# project_client.memory_stores.delete(memory_store.name) -# print("Memory store deleted") +import os +import time +from dotenv import load_dotenv +from azure.identity import DefaultAzureCredential +from azure.core.exceptions import ResourceNotFoundError +from azure.ai.projects import AIProjectClient +from azure.ai.projects.models import ( + MemoryStoreDefaultDefinition, + MemorySearchTool, + PromptAgentDefinition, + AgentReference, +) + +load_dotenv() + +endpoint = os.environ["AZURE_AI_PROJECT_ENDPOINT"] + +with ( + DefaultAzureCredential(exclude_interactive_browser_credential=False) as credential, + AIProjectClient(endpoint=endpoint, credential=credential) as project_client, + project_client.get_openai_client() as openai_client, +): + + # Delete memory store, if it already exists + memory_store_name = "my_memory_store" + try: + delete_response = project_client.memory_stores.delete(memory_store_name) + print(f"Memory store `{memory_store_name}` deleted") + except ResourceNotFoundError: + pass + + # Create a memory store + definition = MemoryStoreDefaultDefinition( + chat_model=os.environ["AZURE_AI_CHAT_MODEL_DEPLOYMENT_NAME"], + embedding_model=os.environ["AZURE_AI_EMBEDDING_MODEL_DEPLOYMENT_NAME"], + ) + memory_store = project_client.memory_stores.create( + name=memory_store_name, + description="Example memory store for conversations", + definition=definition, + ) + print(f"Created memory store: {memory_store.name} ({memory_store.id}): {memory_store.description}") + + # Set scope to associate the memories with + # You can also use "{{$userId}}"" to take the oid of the request authentication header + scope = "user_123" + + # Create a prompt agent with memory search tool + agent = project_client.agents.create_version( + agent_name="MyAgent", + definition=PromptAgentDefinition( + model=os.environ["AZURE_AI_MODEL_DEPLOYMENT_NAME"], + instructions="You are a helpful assistant that answers general questions", + tools=[ + MemorySearchTool( + memory_store_name=memory_store.name, + scope=scope, + update_delay=1, # Wait 1 second of inactivity before updating memories + # In a real application, set this to a higher value like 300 (5 minutes, default) + ) + ], + ), + ) + print(f"Agent created (id: {agent.id}, name: {agent.name}, version: {agent.version})") + + # Create a conversation with the agent with memory tool enabled + conversation = openai_client.conversations.create() + print(f"Created conversation (id: {conversation.id})") + + # Create an agent response to initial user message + response = openai_client.responses.create( + input="I prefer dark roast coffee", + conversation=conversation.id, + extra_body={"agent": {"name": agent.name, "type": "agent_reference"}}, + ) + print(f"Response output: {response.output_text}") + print(response.output) + + # After an inactivity in the conversation, memories will be extracted from the conversation and stored + print("Waiting for memories to be stored...") + time.sleep(60) + + # Create a new conversation + new_conversation = openai_client.conversations.create() + print(f"Created new conversation (id: {new_conversation.id})") + + # Create an agent response with stored memories + new_response = openai_client.responses.create( + input="Please order my usual coffee", + conversation=new_conversation.id, + extra_body={"agent": {"name": agent.name, "type": "agent_reference"}}, + ) + print(f"Response output: {new_response.output_text}") + print(new_response.output) + + # Clean up + openai_client.conversations.delete(conversation.id) + openai_client.conversations.delete(new_conversation.id) + print("Conversations deleted") + + project_client.agents.delete_version(agent_name=agent.name, agent_version=agent.version) + print("Agent deleted") + + project_client.memory_stores.delete(memory_store.name) + print("Memory store deleted") diff --git a/sdk/ai/azure-ai-projects/samples/agents/memory/sample_memory_advanced.py b/sdk/ai/azure-ai-projects/samples/agents/memory/sample_memory_advanced.py index a1dc39ffc5be..0345990d0647 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/memory/sample_memory_advanced.py +++ b/sdk/ai/azure-ai-projects/samples/agents/memory/sample_memory_advanced.py @@ -27,118 +27,129 @@ "Name" column in the "Models + endpoints" tab in your Microsoft Foundry project. """ -# import os -# from dotenv import load_dotenv -# from azure.identity import DefaultAzureCredential -# from azure.ai.projects import AIProjectClient -# from azure.ai.projects.models import ( -# MemoryStoreDefaultDefinition, -# MemoryStoreDefaultOptions, -# MemorySearchOptions, -# ResponsesUserMessageItemParam, -# ResponsesAssistantMessageItemParam, -# MemorySearchTool, -# PromptAgentDefinition, -# ) - -# load_dotenv() - -# project_client = AIProjectClient(endpoint=os.environ["AZURE_AI_PROJECT_ENDPOINT"], credential=DefaultAzureCredential()) - -# with project_client: - -# # Create memory store with advanced options -# options = MemoryStoreDefaultOptions( -# user_profile_enabled=True, -# user_profile_details="Preferences and interests relevant to coffee expert agent", -# chat_summary_enabled=True, -# ) -# definition = MemoryStoreDefaultDefinition( -# chat_model=os.environ["AZURE_AI_CHAT_MODEL_DEPLOYMENT_NAME"], -# embedding_model=os.environ["AZURE_AI_EMBEDDING_MODEL_DEPLOYMENT_NAME"], -# options=options, -# ) -# memory_store = project_client.memory_stores.create( -# name="my_memory_store_3", -# description="Example memory store for conversations", -# definition=definition, -# ) -# print(f"Created memory store: {memory_store.name} ({memory_store.id}): {memory_store.description}") - -# # Set scope to associate the memories with. -# # You can also use "{{$userId}}"" to take the oid of the request authentication header. -# scope = "user_123" - -# # Extract memories from messages and add them to the memory store -# user_message = ResponsesUserMessageItemParam( -# content="I prefer dark roast coffee and usually drink it in the morning" -# ) -# update_poller = project_client.memory_stores.begin_update_memories( -# name=memory_store.name, -# scope=scope, -# items=[user_message], # Pass conversation items that you want to add to memory -# # update_delay=300 # Keep default inactivity delay before starting update -# ) -# print(f"Scheduled memory update operation (Update ID: {update_poller.update_id}, Status: {update_poller.status()})") - -# # Extend the previous update with another update and more messages -# new_message = ResponsesUserMessageItemParam(content="I also like cappuccinos in the afternoon") -# new_update_poller = project_client.memory_stores.begin_update_memories( -# name=memory_store.name, -# scope=scope, -# items=[new_message], -# previous_update_id=update_poller.update_id, # Extend from previous update ID -# update_delay=0, # Trigger update immediately without waiting for inactivity -# ) -# print( -# f"Scheduled memory update operation (Update ID: {new_update_poller.update_id}, Status: {new_update_poller.status()})" -# ) - -# # As first update has not started yet, the new update will cancel the first update and cover both sets of messages -# print( -# f"Superseded first memory update operation (Update ID: {update_poller.update_id}, Status: {update_poller.status()})" -# ) - -# new_update_result = new_update_poller.result() -# print( -# f"Second update {new_update_poller.update_id} completed with {len(new_update_result.memory_operations)} memory operations" -# ) -# for operation in new_update_result.memory_operations: -# print( -# f" - Operation: {operation.kind}, Memory ID: {operation.memory_item.memory_id}, Content: {operation.memory_item.content}" -# ) - -# # Retrieve memories from the memory store -# query_message = ResponsesUserMessageItemParam(content="What are my morning coffee preferences?") -# search_response = project_client.memory_stores.search_memories( -# name=memory_store.name, scope=scope, items=[query_message], options=MemorySearchOptions(max_memories=5) -# ) -# print(f"Found {len(search_response.memories)} memories") -# for memory in search_response.memories: -# print(f" - Memory ID: {memory.memory_item.memory_id}, Content: {memory.memory_item.content}") - -# # Perform another search using the previous search as context -# agent_message = ResponsesAssistantMessageItemParam( -# content="You previously indicated a preference for dark roast coffee in the morning." -# ) -# followup_query = ResponsesUserMessageItemParam( -# content="What about afternoon?" # Follow-up assuming context from previous messages -# ) -# followup_search_response = project_client.memory_stores.search_memories( -# name=memory_store.name, -# scope=scope, -# items=[agent_message, followup_query], -# previous_search_id=search_response.search_id, -# options=MemorySearchOptions(max_memories=5), -# ) -# print(f"Found {len(followup_search_response.memories)} memories") -# for memory in followup_search_response.memories: -# print(f" - Memory ID: {memory.memory_item.memory_id}, Content: {memory.memory_item.content}") - -# # Delete memories for the current scope -# delete_scope_response = project_client.memory_stores.delete_scope(name=memory_store.name, scope=scope) -# print(f"Deleted memories for scope '{scope}': {delete_scope_response.deleted}") - -# # Delete memory store -# delete_response = project_client.memory_stores.delete(memory_store.name) -# print(f"Deleted: {delete_response.deleted}") +import os +from dotenv import load_dotenv +from azure.identity import DefaultAzureCredential +from azure.ai.projects import AIProjectClient +from azure.ai.projects.models import ( + MemoryStoreDefaultDefinition, + MemoryStoreDefaultOptions, + MemorySearchOptions, + ResponsesUserMessageItemParam, + ResponsesAssistantMessageItemParam, + MemorySearchTool, + PromptAgentDefinition, +) + +load_dotenv() + +endpoint = os.environ["AZURE_AI_PROJECT_ENDPOINT"] + +with ( + DefaultAzureCredential(exclude_interactive_browser_credential=False) as credential, + AIProjectClient(endpoint=endpoint, credential=credential) as project_client, +): + + # Delete memory store, if it already exists + memory_store_name = "my_memory_store" + try: + delete_response = project_client.memory_stores.delete(memory_store_name) + print(f"Deleted memory store: {delete_response.deleted}") + except Exception: + pass + + # Create memory store with advanced options + options = MemoryStoreDefaultOptions( + user_profile_enabled=True, + user_profile_details="Preferences and interests relevant to coffee expert agent", + chat_summary_enabled=True, + ) + definition = MemoryStoreDefaultDefinition( + chat_model=os.environ["AZURE_AI_CHAT_MODEL_DEPLOYMENT_NAME"], + embedding_model=os.environ["AZURE_AI_EMBEDDING_MODEL_DEPLOYMENT_NAME"], + options=options, + ) + memory_store = project_client.memory_stores.create( + name=memory_store_name, + description="Example memory store for conversations", + definition=definition, + ) + print(f"Created memory store: {memory_store.name} ({memory_store.id}): {memory_store.description}") + + # Set scope to associate the memories with. + # You can also use "{{$userId}}"" to take the oid of the request authentication header. + scope = "user_123" + + # Extract memories from messages and add them to the memory store + user_message = ResponsesUserMessageItemParam( + content="I prefer dark roast coffee and usually drink it in the morning" + ) + update_poller = project_client.memory_stores.begin_update_memories( + name=memory_store.name, + scope=scope, + items=[user_message], # Pass conversation items that you want to add to memory + # update_delay=300 # Keep default inactivity delay before starting update + ) + print(f"Scheduled memory update operation (Update ID: {update_poller.update_id}, Status: {update_poller.status()})") + + # Extend the previous update with another update and more messages + new_message = ResponsesUserMessageItemParam(content="I also like cappuccinos in the afternoon") + new_update_poller = project_client.memory_stores.begin_update_memories( + name=memory_store.name, + scope=scope, + items=[new_message], + previous_update_id=update_poller.update_id, # Extend from previous update ID + update_delay=0, # Trigger update immediately without waiting for inactivity + ) + print( + f"Scheduled memory update operation (Update ID: {new_update_poller.update_id}, Status: {new_update_poller.status()})" + ) + + # As first update has not started yet, the new update will cancel the first update and cover both sets of messages + print( + f"Superseded first memory update operation (Update ID: {update_poller.update_id}, Status: {update_poller.status()})" + ) + + new_update_result = new_update_poller.result() + print( + f"Second update {new_update_poller.update_id} completed with {len(new_update_result.memory_operations)} memory operations" + ) + for operation in new_update_result.memory_operations: + print( + f" - Operation: {operation.kind}, Memory ID: {operation.memory_item.memory_id}, Content: {operation.memory_item.content}" + ) + + # Retrieve memories from the memory store + query_message = ResponsesUserMessageItemParam(content="What are my morning coffee preferences?") + search_response = project_client.memory_stores.search_memories( + name=memory_store.name, scope=scope, items=[query_message], options=MemorySearchOptions(max_memories=5) + ) + print(f"Found {len(search_response.memories)} memories") + for memory in search_response.memories: + print(f" - Memory ID: {memory.memory_item.memory_id}, Content: {memory.memory_item.content}") + + # Perform another search using the previous search as context + agent_message = ResponsesAssistantMessageItemParam( + content="You previously indicated a preference for dark roast coffee in the morning." + ) + followup_query = ResponsesUserMessageItemParam( + content="What about afternoon?" # Follow-up assuming context from previous messages + ) + followup_search_response = project_client.memory_stores.search_memories( + name=memory_store.name, + scope=scope, + items=[agent_message, followup_query], + previous_search_id=search_response.search_id, + options=MemorySearchOptions(max_memories=5), + ) + print(f"Found {len(followup_search_response.memories)} memories") + for memory in followup_search_response.memories: + print(f" - Memory ID: {memory.memory_item.memory_id}, Content: {memory.memory_item.content}") + + # Delete memories for the current scope + delete_scope_response = project_client.memory_stores.delete_scope(name=memory_store.name, scope=scope) + print(f"Deleted memories for scope '{scope}': {delete_scope_response.deleted}") + + # Delete memory store + delete_response = project_client.memory_stores.delete(memory_store.name) + print(f"Deleted: {delete_response.deleted}") diff --git a/sdk/ai/azure-ai-projects/samples/agents/memory/sample_memory_basic.py b/sdk/ai/azure-ai-projects/samples/agents/memory/sample_memory_basic.py index 247ace2ae138..794f23de2289 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/memory/sample_memory_basic.py +++ b/sdk/ai/azure-ai-projects/samples/agents/memory/sample_memory_basic.py @@ -27,73 +27,86 @@ "Name" column in the "Models + endpoints" tab in your Microsoft Foundry project. """ -# import os -# from dotenv import load_dotenv -# from azure.identity import DefaultAzureCredential -# from azure.ai.projects import AIProjectClient -# from azure.ai.projects.models import ( -# MemoryStoreDefaultDefinition, -# MemoryStoreDefaultOptions, -# MemorySearchOptions, -# ResponsesUserMessageItemParam, -# MemorySearchTool, -# PromptAgentDefinition, -# ) - -# load_dotenv() - -# project_client = AIProjectClient(endpoint=os.environ["AZURE_AI_PROJECT_ENDPOINT"], credential=DefaultAzureCredential()) - -# with project_client: - -# # Create a memory store -# definition = MemoryStoreDefaultDefinition( -# chat_model=os.environ["AZURE_AI_CHAT_MODEL_DEPLOYMENT_NAME"], -# embedding_model=os.environ["AZURE_AI_EMBEDDING_MODEL_DEPLOYMENT_NAME"], -# ) -# memory_store = project_client.memory_stores.create( -# name="my_memory_store", -# description="Example memory store for conversations", -# definition=definition, -# ) -# print(f"Created memory store: {memory_store.name} ({memory_store.id}): {memory_store.description}") - -# # Set scope to associate the memories with -# # You can also use "{{$userId}}"" to take the oid of the request authentication header -# scope = "user_123" - -# # Add memories to the memory store -# user_message = ResponsesUserMessageItemParam( -# content="I prefer dark roast coffee and usually drink it in the morning" -# ) -# update_poller = project_client.memory_stores.begin_update_memories( -# name=memory_store.name, -# scope=scope, -# items=[user_message], # Pass conversation items that you want to add to memory -# update_delay=0, # Trigger update immediately without waiting for inactivity -# ) - -# # Wait for the update operation to complete, but can also fire and forget -# update_result = update_poller.result() -# print(f"Updated with {len(update_result.memory_operations)} memory operations") -# for operation in update_result.memory_operations: -# print( -# f" - Operation: {operation.kind}, Memory ID: {operation.memory_item.memory_id}, Content: {operation.memory_item.content}" -# ) - -# # Retrieve memories from the memory store -# query_message = ResponsesUserMessageItemParam(content="What are my coffee preferences?") -# search_response = project_client.memory_stores.search_memories( -# name=memory_store.name, scope=scope, items=[query_message], options=MemorySearchOptions(max_memories=5) -# ) -# print(f"Found {len(search_response.memories)} memories") -# for memory in search_response.memories: -# print(f" - Memory ID: {memory.memory_item.memory_id}, Content: {memory.memory_item.content}") - -# # Delete memories for a specific scope -# delete_scope_response = project_client.memory_stores.delete_scope(name=memory_store.name, scope=scope) -# print(f"Deleted memories for scope '{scope}': {delete_scope_response.deleted}") - -# # Delete memory store -# delete_response = project_client.memory_stores.delete(memory_store.name) -# print(f"Deleted: {delete_response.deleted}") +import os +from dotenv import load_dotenv +from azure.identity import DefaultAzureCredential +from azure.ai.projects import AIProjectClient +from azure.ai.projects.models import ( + MemoryStoreDefaultDefinition, + MemoryStoreDefaultOptions, + MemorySearchOptions, + ResponsesUserMessageItemParam, + MemorySearchTool, + PromptAgentDefinition, +) + +load_dotenv() + +endpoint = os.environ["AZURE_AI_PROJECT_ENDPOINT"] + +with ( + DefaultAzureCredential(exclude_interactive_browser_credential=False) as credential, + AIProjectClient(endpoint=endpoint, credential=credential) as project_client, +): + + # Delete memory store, if it already exists + memory_store_name = "my_memory_store" + try: + delete_response = project_client.memory_stores.delete(memory_store_name) + print(f"Deleted memory store: {delete_response.deleted}") + except Exception: + pass + + # Create a memory store + definition = MemoryStoreDefaultDefinition( + chat_model=os.environ["AZURE_AI_CHAT_MODEL_DEPLOYMENT_NAME"], + embedding_model=os.environ["AZURE_AI_EMBEDDING_MODEL_DEPLOYMENT_NAME"], + ) + memory_store = project_client.memory_stores.create( + name=memory_store_name, + description="Example memory store for conversations", + definition=definition, + ) + print(f"Created memory store: {memory_store.name} ({memory_store.id}): {memory_store.description}") + print(f" - Chat model: {memory_store.definition.chat_model}") + print(f" - Embedding model: {memory_store.definition.embedding_model}") + + # Set scope to associate the memories with + # You can also use "{{$userId}}"" to take the oid of the request authentication header + scope = "user_123" + + # Add memories to the memory store + user_message = ResponsesUserMessageItemParam( + content="I prefer dark roast coffee and usually drink it in the morning" + ) + update_poller = project_client.memory_stores.begin_update_memories( + name=memory_store.name, + scope=scope, + items=[user_message], # Pass conversation items that you want to add to memory + update_delay=0, # Trigger update immediately without waiting for inactivity + ) + + # Wait for the update operation to complete, but can also fire and forget + update_result = update_poller.result() + print(f"Updated with {len(update_result.memory_operations)} memory operations") + for operation in update_result.memory_operations: + print( + f" - Operation: {operation.kind}, Memory ID: {operation.memory_item.memory_id}, Content: {operation.memory_item.content}" + ) + + # Retrieve memories from the memory store + query_message = ResponsesUserMessageItemParam(content="What are my coffee preferences?") + search_response = project_client.memory_stores.search_memories( + name=memory_store.name, scope=scope, items=[query_message], options=MemorySearchOptions(max_memories=5) + ) + print(f"Found {len(search_response.memories)} memories") + for memory in search_response.memories: + print(f" - Memory ID: {memory.memory_item.memory_id}, Content: {memory.memory_item.content}") + + # Delete memories for a specific scope + delete_scope_response = project_client.memory_stores.delete_scope(name=memory_store.name, scope=scope) + print(f"Deleted memories for scope '{scope}': {delete_scope_response.deleted}") + + # Delete memory store + delete_response = project_client.memory_stores.delete(memory_store.name) + print(f"Deleted: {delete_response.deleted}") diff --git a/sdk/ai/azure-ai-projects/samples/agents/memory/sample_memory_crud.py b/sdk/ai/azure-ai-projects/samples/agents/memory/sample_memory_crud.py index 9305d35ee5f5..30fa08082f80 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/memory/sample_memory_crud.py +++ b/sdk/ai/azure-ai-projects/samples/agents/memory/sample_memory_crud.py @@ -24,42 +24,53 @@ "Name" column in the "Models + endpoints" tab in your Microsoft Foundry project. """ -# import os -# from dotenv import load_dotenv -# from azure.identity import DefaultAzureCredential -# from azure.ai.projects import AIProjectClient -# from azure.ai.projects.models import MemoryStoreDefaultDefinition +import os +from dotenv import load_dotenv +from azure.identity import DefaultAzureCredential +from azure.ai.projects import AIProjectClient +from azure.ai.projects.models import MemoryStoreDefaultDefinition -# load_dotenv() +load_dotenv() -# project_client = AIProjectClient(endpoint=os.environ["AZURE_AI_PROJECT_ENDPOINT"], credential=DefaultAzureCredential()) +endpoint = os.environ["AZURE_AI_PROJECT_ENDPOINT"] -# with project_client: +with ( + DefaultAzureCredential(exclude_interactive_browser_credential=False) as credential, + AIProjectClient(endpoint=endpoint, credential=credential) as project_client, +): -# # Create Memory Store -# definition = MemoryStoreDefaultDefinition( -# chat_model=os.environ["AZURE_AI_CHAT_MODEL_DEPLOYMENT_NAME"], -# embedding_model=os.environ["AZURE_AI_EMBEDDING_MODEL_DEPLOYMENT_NAME"], -# ) -# memory_store = project_client.memory_stores.create( -# name="my_memory_store", description="Example memory store for conversations", definition=definition -# ) -# print(f"Created memory store: {memory_store.name} ({memory_store.id}): {memory_store.description}") + # Delete memory store, if it already exists + memory_store_name = "my_memory_store" + try: + delete_response = project_client.memory_stores.delete(memory_store_name) + print(f"Deleted memory store: {delete_response.deleted}") + except Exception: + pass -# # Get Memory Store -# get_store = project_client.memory_stores.get(memory_store.name) -# print(f"Retrieved: {get_store.name} ({get_store.id}): {get_store.description}") + # Create Memory Store + definition = MemoryStoreDefaultDefinition( + chat_model=os.environ["AZURE_AI_CHAT_MODEL_DEPLOYMENT_NAME"], + embedding_model=os.environ["AZURE_AI_EMBEDDING_MODEL_DEPLOYMENT_NAME"], + ) + memory_store = project_client.memory_stores.create( + name=memory_store_name, description="Example memory store for conversations", definition=definition + ) + print(f"Created memory store: {memory_store.name} ({memory_store.id}): {memory_store.description}") -# # Update Memory Store -# updated_store = project_client.memory_stores.update(name=memory_store.name, description="Updated description") -# print(f"Updated: {updated_store.name} ({updated_store.id}): {updated_store.description}") + # Get Memory Store + get_store = project_client.memory_stores.get(memory_store.name) + print(f"Retrieved: {get_store.name} ({get_store.id}): {get_store.description}") -# # List Memory Store -# memory_stores = list(project_client.memory_stores.list(limit=10)) -# print(f"Found {len(memory_stores)} memory stores") -# for store in memory_stores: -# print(f" - {store.name} ({store.id}): {store.description}") + # Update Memory Store + updated_store = project_client.memory_stores.update(name=memory_store.name, description="Updated description") + print(f"Updated: {updated_store.name} ({updated_store.id}): {updated_store.description}") -# # Delete Memory Store -# delete_response = project_client.memory_stores.delete(memory_store.name) -# print(f"Deleted: {delete_response.deleted}") + # List Memory Store + memory_stores = list(project_client.memory_stores.list(limit=10)) + print(f"Found {len(memory_stores)} memory stores") + for store in memory_stores: + print(f" - {store.name} ({store.id}): {store.description}") + + # Delete Memory Store + delete_response = project_client.memory_stores.delete(memory_store.name) + print(f"Deleted: {delete_response.deleted}") From 5d5d9250cd12c0d46028706e63ea938a3cbc8b38 Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Thu, 13 Nov 2025 12:40:40 -0800 Subject: [PATCH 09/24] Move/rename samples --- .../sample_agent_memory_search.py} | 9 ++++++--- .../memory => memories}/sample_memory_advanced.py | 0 .../{agents/memory => memories}/sample_memory_basic.py | 0 .../{agents/memory => memories}/sample_memory_crud.py | 0 4 files changed, 6 insertions(+), 3 deletions(-) rename sdk/ai/azure-ai-projects/samples/agents/{memory/sample_agent_memory.py => tools/sample_agent_memory_search.py} (96%) rename sdk/ai/azure-ai-projects/samples/{agents/memory => memories}/sample_memory_advanced.py (100%) rename sdk/ai/azure-ai-projects/samples/{agents/memory => memories}/sample_memory_basic.py (100%) rename sdk/ai/azure-ai-projects/samples/{agents/memory => memories}/sample_memory_crud.py (100%) diff --git a/sdk/ai/azure-ai-projects/samples/agents/memory/sample_agent_memory.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py similarity index 96% rename from sdk/ai/azure-ai-projects/samples/agents/memory/sample_agent_memory.py rename to sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py index 7e97401d0b14..2454dc165c5b 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/memory/sample_agent_memory.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py @@ -6,10 +6,14 @@ """ DESCRIPTION: - This sample demonstrates how to integrate memory into a prompt agent. + This sample demonstrates how to integrate memory into a prompt agent, + by using the Memory Search Tool to retrieve relevant past user messages. + + For memory management, see also samples in the folder "samples/memories" + folder. USAGE: - python sample_agent_memory.py + python sample_agent_memory_search.py Before running the sample: @@ -39,7 +43,6 @@ MemoryStoreDefaultDefinition, MemorySearchTool, PromptAgentDefinition, - AgentReference, ) load_dotenv() diff --git a/sdk/ai/azure-ai-projects/samples/agents/memory/sample_memory_advanced.py b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced.py similarity index 100% rename from sdk/ai/azure-ai-projects/samples/agents/memory/sample_memory_advanced.py rename to sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced.py diff --git a/sdk/ai/azure-ai-projects/samples/agents/memory/sample_memory_basic.py b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic.py similarity index 100% rename from sdk/ai/azure-ai-projects/samples/agents/memory/sample_memory_basic.py rename to sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic.py diff --git a/sdk/ai/azure-ai-projects/samples/agents/memory/sample_memory_crud.py b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_crud.py similarity index 100% rename from sdk/ai/azure-ai-projects/samples/agents/memory/sample_memory_crud.py rename to sdk/ai/azure-ai-projects/samples/memories/sample_memory_crud.py From b4eb8223cbd6d580090277052b9fc52a5cd74487 Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Thu, 13 Nov 2025 13:02:24 -0800 Subject: [PATCH 10/24] Fix import in async memory operations. Add async MemorySearchTool sample --- .../aio/operations/_patch_memories_async.py | 6 +- .../tools/sample_agent_memory_search.py | 5 +- .../tools/sample_agent_memory_search_async.py | 145 ++++++++++++++++++ 3 files changed, 150 insertions(+), 6 deletions(-) create mode 100644 sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search_async.py diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_memories_async.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_memories_async.py index fd37a869ca14..43a468a292aa 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_memories_async.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_memories_async.py @@ -9,7 +9,7 @@ """ from typing import Union, Optional, Any, List, overload, IO, cast from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.polling import AsyncPollingMethod, AsyncNoPolling +from azure.core.polling import AsyncNoPolling from azure.core.utils import case_insensitive_dict from ... import models as _models from ...models import ( @@ -21,8 +21,8 @@ AsyncUpdateMemoriesLROPollingMethod, ) from ._operations import JSON, _Unset, ClsType, MemoryStoresOperations as GenerateMemoryStoresOperations -from .._validation import api_version_validation -from .._utils.model_base import _deserialize +from ..._validation import api_version_validation +from ..._utils.model_base import _deserialize class MemoryStoresOperations(GenerateMemoryStoresOperations): diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py index 2454dc165c5b..b05bb75103ed 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py @@ -8,6 +8,7 @@ DESCRIPTION: This sample demonstrates how to integrate memory into a prompt agent, by using the Memory Search Tool to retrieve relevant past user messages. + This sample uses the synchronous AIProjectClient and OpenAI clients. For memory management, see also samples in the folder "samples/memories" folder. @@ -58,7 +59,7 @@ # Delete memory store, if it already exists memory_store_name = "my_memory_store" try: - delete_response = project_client.memory_stores.delete(memory_store_name) + project_client.memory_stores.delete(memory_store_name) print(f"Memory store `{memory_store_name}` deleted") except ResourceNotFoundError: pass @@ -108,7 +109,6 @@ extra_body={"agent": {"name": agent.name, "type": "agent_reference"}}, ) print(f"Response output: {response.output_text}") - print(response.output) # After an inactivity in the conversation, memories will be extracted from the conversation and stored print("Waiting for memories to be stored...") @@ -125,7 +125,6 @@ extra_body={"agent": {"name": agent.name, "type": "agent_reference"}}, ) print(f"Response output: {new_response.output_text}") - print(new_response.output) # Clean up openai_client.conversations.delete(conversation.id) diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search_async.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search_async.py new file mode 100644 index 000000000000..c1f6220daecb --- /dev/null +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search_async.py @@ -0,0 +1,145 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + This sample demonstrates how to integrate memory into a prompt agent, + by using the Memory Search Tool to retrieve relevant past user messages. + This sample uses the asynchronous AIProjectClient and AsyncOpenAI clients. + + For memory management, see also samples in the folder "samples/memories" + folder. + +USAGE: + python sample_agent_memory_search_async.py + + Before running the sample: + + pip install "azure-ai-projects>=2.0.0b1" azure-identity openai python-dotenv aiohttp + + Deploy a chat model (e.g. gpt-4.1) and an embedding model (e.g. text-embedding-3-small). + Once you have deployed models, set the deployment name in the variables below. + + Set these environment variables with your own values: + 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview + page of your Microsoft Foundry portal. + 2) AZURE_AI_AGENT_MODEL_DEPLOYMENT_NAME - The deployment name of the chat model for the agent, + as found under the "Name" column in the "Models + endpoints" tab in your Microsoft Foundry project. + 3) AZURE_AI_CHAT_MODEL_DEPLOYMENT_NAME - The deployment name of the chat model for memory, + as found under the "Name" column in the "Models + endpoints" tab in your Microsoft Foundry project. + 4) AZURE_AI_EMBEDDING_MODEL_DEPLOYMENT_NAME - The deployment name of the embedding model for memory, + as found under the "Name" column in the "Models + endpoints" tab in your Microsoft Foundry project. +""" + +import asyncio +import os +from dotenv import load_dotenv +from azure.identity.aio import DefaultAzureCredential +from azure.core.exceptions import ResourceNotFoundError +from azure.ai.projects.aio import AIProjectClient +from azure.ai.projects.models import ( + MemoryStoreDefaultDefinition, + MemorySearchTool, + PromptAgentDefinition, +) + +load_dotenv() + + +async def main() -> None: + + endpoint = os.environ["AZURE_AI_PROJECT_ENDPOINT"] + + async with ( + DefaultAzureCredential() as credential, + AIProjectClient(endpoint=endpoint, credential=credential) as project_client, + project_client.get_openai_client() as openai_client, + ): + + # Delete memory store, if it already exists + memory_store_name = "my_memory_store" + try: + await project_client.memory_stores.delete(memory_store_name) + print(f"Memory store `{memory_store_name}` deleted") + except ResourceNotFoundError: + pass + + # Create a memory store + definition = MemoryStoreDefaultDefinition( + chat_model=os.environ["AZURE_AI_CHAT_MODEL_DEPLOYMENT_NAME"], + embedding_model=os.environ["AZURE_AI_EMBEDDING_MODEL_DEPLOYMENT_NAME"], + ) + memory_store = await project_client.memory_stores.create( + name=memory_store_name, + description="Example memory store for conversations", + definition=definition, + ) + print(f"Created memory store: {memory_store.name} ({memory_store.id}): {memory_store.description}") + + # Set scope to associate the memories with + # You can also use "{{$userId}}"" to take the oid of the request authentication header + scope = "user_123" + + # Create a prompt agent with memory search tool + agent = await project_client.agents.create_version( + agent_name="MyAgent", + definition=PromptAgentDefinition( + model=os.environ["AZURE_AI_MODEL_DEPLOYMENT_NAME"], + instructions="You are a helpful assistant that answers general questions", + tools=[ + MemorySearchTool( + memory_store_name=memory_store.name, + scope=scope, + update_delay=1, # Wait 1 second of inactivity before updating memories + # In a real application, set this to a higher value like 300 (5 minutes, default) + ) + ], + ), + ) + print(f"Agent created (id: {agent.id}, name: {agent.name}, version: {agent.version})") + + # Create a conversation with the agent with memory tool enabled + conversation = await openai_client.conversations.create() + print(f"Created conversation (id: {conversation.id})") + + # Create an agent response to initial user message + response = await openai_client.responses.create( + input="I prefer dark roast coffee", + conversation=conversation.id, + extra_body={"agent": {"name": agent.name, "type": "agent_reference"}}, + ) + print(f"Response output: {response.output_text}") + + # After an inactivity in the conversation, memories will be extracted from the conversation and stored + print("Waiting for memories to be stored...") + await asyncio.sleep(60) + + # Create a new conversation + new_conversation = await openai_client.conversations.create() + print(f"Created new conversation (id: {new_conversation.id})") + + # Create an agent response with stored memories + new_response = await openai_client.responses.create( + input="Please order my usual coffee", + conversation=new_conversation.id, + extra_body={"agent": {"name": agent.name, "type": "agent_reference"}}, + ) + print(f"Response output: {new_response.output_text}") + + # Clean up + await openai_client.conversations.delete(conversation.id) + await openai_client.conversations.delete(new_conversation.id) + print("Conversations deleted") + + await project_client.agents.delete_version(agent_name=agent.name, agent_version=agent.version) + print("Agent deleted") + + await project_client.memory_stores.delete(memory_store.name) + print("Memory store deleted") + + +if __name__ == "__main__": + asyncio.run(main()) From 3426033196db3fffce0ac2771d61efb86105e03d Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Thu, 13 Nov 2025 13:30:24 -0800 Subject: [PATCH 11/24] Update changelog and package readme --- sdk/ai/azure-ai-projects/CHANGELOG.md | 6 ++++++ sdk/ai/azure-ai-projects/README.md | 1 + 2 files changed, 7 insertions(+) diff --git a/sdk/ai/azure-ai-projects/CHANGELOG.md b/sdk/ai/azure-ai-projects/CHANGELOG.md index 3adf60a51611..b89ef16eaf01 100644 --- a/sdk/ai/azure-ai-projects/CHANGELOG.md +++ b/sdk/ai/azure-ai-projects/CHANGELOG.md @@ -4,6 +4,9 @@ ### Features Added +* Agent Memory operations, including code for custom LRO poller. See methods on the ".memory_store" +property of `AIProjectClient`. + ### Breaking changes * `get_openai_client()` method on the asynchronous AIProjectClient is no longer an "async" method. @@ -12,6 +15,9 @@ ### Bugs Fixed ### Sample updates + +* Added samples to show usage of the Memory Search Tool (see sample_agent_memory_search.py) and its async equivalent. +* Added samples to show Memory management. See samples in the folder `samples\memories`. * Added `finetuning` samples for operations create, retrieve, list, list_events, list_checkpoints, cancel, pause and resume. Also, these samples includes various finetuning techniques like Supervised (SFT), Reinforcement (RFT) and Direct performance optimization (DPO). ## 2.0.0b1 (2025-11-11) diff --git a/sdk/ai/azure-ai-projects/README.md b/sdk/ai/azure-ai-projects/README.md index 166680d5265e..2af62c6745ea 100644 --- a/sdk/ai/azure-ai-projects/README.md +++ b/sdk/ai/azure-ai-projects/README.md @@ -5,6 +5,7 @@ resources in your Microsoft Foundry Project. Use it to: * **Create and run Agents** using methods on methods on the `.agents` client property. * **Enhance Agents with specialized tools**: + * Agent Memory Search * Agent-to-Agent (A2A) * Azure AI Search * Bing Custom Search From 1300023092c8927a6c54e80368df780c8fddc8a5 Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Thu, 13 Nov 2025 14:08:51 -0800 Subject: [PATCH 12/24] Fix MyPy errors --- sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py | 2 +- .../samples/memories/sample_memory_basic.py | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py index 0f0d2b62e7be..fa8cd08425fc 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py @@ -227,7 +227,7 @@ def from_continuation_token( class AsyncUpdateMemoriesLROPoller(AsyncLROPoller[MemoryStoreUpdateCompletedResult]): """Custom AsyncLROPoller for Memory Store update operations.""" - _polling_method: "UpdateMemoriesLROPollingMethod" + _polling_method: "AsyncUpdateMemoriesLROPollingMethod" @property def update_id(self) -> str: diff --git a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic.py b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic.py index 794f23de2289..063625fb6c88 100644 --- a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic.py +++ b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic.py @@ -68,8 +68,9 @@ definition=definition, ) print(f"Created memory store: {memory_store.name} ({memory_store.id}): {memory_store.description}") - print(f" - Chat model: {memory_store.definition.chat_model}") - print(f" - Embedding model: {memory_store.definition.embedding_model}") + if isinstance(memory_store.definition, MemoryStoreDefaultDefinition): + print(f" - Chat model: {memory_store.definition.chat_model}") + print(f" - Embedding model: {memory_store.definition.embedding_model}") # Set scope to associate the memories with # You can also use "{{$userId}}"" to take the oid of the request authentication header From 3805ebbce49739f6c236e9765f1880dc7a1032ab Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Thu, 13 Nov 2025 14:35:02 -0800 Subject: [PATCH 13/24] Fix quality gates --- .../aio/operations/_patch_memories_async.py | 4 ++-- .../azure/ai/projects/models/_patch.py | 18 ++++++++++-------- .../ai/projects/operations/_patch_memories.py | 2 +- .../sample_finetuning_supervised_job.py | 4 ++-- .../sample_finetuning_supervised_job_async.py | 4 ++-- 5 files changed, 17 insertions(+), 15 deletions(-) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_memories_async.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_memories_async.py index 43a468a292aa..61b4ac43a42f 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_memories_async.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_memories_async.py @@ -174,7 +174,7 @@ async def begin_update_memories( ) await raw_result.http_response.read() # type: ignore - raw_result.http_response.status_code = 202 # type: ignore + raw_result.http_response.status_code = 202 # type: ignore raw_result.http_response.headers["Operation-Location"] = ( # type: ignore f"{self._config.endpoint}/memory_stores/{name}/updates/{raw_result.http_response.json().get('update_id')}?api-version=2025-11-15-preview" # type: ignore ) @@ -224,7 +224,7 @@ def get_long_running_output(pipeline_response): ) return AsyncUpdateMemoriesLROPoller( self._client, - raw_result, + raw_result, # type: ignore[possibly-undefined] get_long_running_output, polling_method, # pylint: disable=possibly-used-before-assignment ) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py index fa8cd08425fc..12fb23c4b7d6 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py @@ -7,8 +7,6 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ from typing import List, Dict, Optional, Any, Tuple -from ._models import CustomCredential as CustomCredentialGenerated -from ..models import MemoryStoreUpdateCompletedResult, MemoryStoreUpdateResult from azure.core.polling import LROPoller, AsyncLROPoller, PollingMethod, AsyncPollingMethod from azure.core.polling.base_polling import ( LROBasePolling, @@ -16,6 +14,8 @@ _raise_if_bad_http_status_and_method, ) from azure.core.polling.async_base_polling import AsyncLROBasePolling +from ._models import CustomCredential as CustomCredentialGenerated +from ..models import MemoryStoreUpdateCompletedResult, MemoryStoreUpdateResult class CustomCredential(CustomCredentialGenerated): @@ -189,7 +189,7 @@ def update_id(self) -> str: :return: Returns the update ID. :rtype: str """ - return self._polling_method._current_body.update_id + return self._polling_method._current_body.update_id # pylint: disable=protected-access @property def superseded_by(self) -> Optional[str]: @@ -198,8 +198,9 @@ def superseded_by(self) -> Optional[str]: :return: Returns the ID of the superseding operation, if it exists. :rtype: Optional[str] """ - # pylint: disable=protected-access - return self._polling_method._current_body.superseded_by if self._polling_method._current_body else None + return ( + self._polling_method._current_body.superseded_by if self._polling_method._current_body else None # pylint: disable=protected-access + ) @classmethod def from_continuation_token( @@ -236,7 +237,7 @@ def update_id(self) -> str: :return: Returns the update ID. :rtype: str """ - return self._polling_method._current_body.update_id + return self._polling_method._current_body.update_id # pylint: disable=protected-access @property def superseded_by(self) -> Optional[str]: @@ -245,8 +246,9 @@ def superseded_by(self) -> Optional[str]: :return: Returns the ID of the superseding operation, if it exists. :rtype: Optional[str] """ - # pylint: disable=protected-access - return self._polling_method._current_body.superseded_by if self._polling_method._current_body else None + return ( + self._polling_method._current_body.superseded_by if self._polling_method._current_body else None # pylint: disable=protected-access + ) @classmethod def from_continuation_token( diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py index 73a38925afb2..a0e36180698b 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py @@ -231,7 +231,7 @@ def get_long_running_output(pipeline_response): ) return UpdateMemoriesLROPoller( self._client, - raw_result, + raw_result, # type: ignore[possibly-undefined] get_long_running_output, polling_method, # pylint: disable=possibly-used-before-assignment ) diff --git a/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_supervised_job.py b/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_supervised_job.py index ee0fbb2577eb..bb457fce18e4 100644 --- a/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_supervised_job.py +++ b/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_supervised_job.py @@ -7,9 +7,9 @@ """ DESCRIPTION: Given an AIProjectClient, this sample demonstrates how to use the synchronous - `.fine_tuning.jobs` methods to create, get, list, cancel, pause, resume, list events + `.fine_tuning.jobs` methods to create, get, list, cancel, pause, resume, list events and list checkpoints supervised fine-tuning jobs. - It also shows how to deploy the fine-tuned model using Azure Cognitive Services Management + It also shows how to deploy the fine-tuned model using Azure Cognitive Services Management Client and perform inference on the deployed model. Supported OpenAI models: GPT 4o, 4o-mini, 4.1, 4.1-mini diff --git a/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_supervised_job_async.py b/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_supervised_job_async.py index bb6ad7d6f44e..9e2cf13bea8b 100644 --- a/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_supervised_job_async.py +++ b/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_supervised_job_async.py @@ -7,9 +7,9 @@ """ DESCRIPTION: Given an AIProjectClient, this sample demonstrates how to use the asynchronous - .fine_tuning.jobs methods to create, get, list, cancel, pause, resume, list events + .fine_tuning.jobs methods to create, get, list, cancel, pause, resume, list events and list checkpoints supervised fine-tuning jobs. - It also shows how to deploy the fine-tuned model using Azure Cognitive Services Management + It also shows how to deploy the fine-tuned model using Azure Cognitive Services Management Client and perform inference on the deployed model. Supported OpenAI models: GPT 4o, 4o-mini, 4.1, 4.1-mini From 4e2235e32bfed33cfa9a79acaaa76276b15e1c02 Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Thu, 13 Nov 2025 15:13:36 -0800 Subject: [PATCH 14/24] Minor updates to CRUD sample. Add async CRUD sample --- .../samples/memories/sample_memory_crud.py | 10 ++- .../memories/sample_memory_crud_async.py | 90 +++++++++++++++++++ 2 files changed, 96 insertions(+), 4 deletions(-) create mode 100644 sdk/ai/azure-ai-projects/samples/memories/sample_memory_crud_async.py diff --git a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_crud.py b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_crud.py index 30fa08082f80..121abe88210c 100644 --- a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_crud.py +++ b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_crud.py @@ -6,7 +6,8 @@ """ DESCRIPTION: - This sample demonstrates how to perform CRUD operations on a memory store using the Azure AI Projects SDK. + This sample demonstrates how to perform CRUD operations on a memory store + using the synchronous AIProjectClient. USAGE: python sample_memory_crud.py @@ -26,6 +27,7 @@ import os from dotenv import load_dotenv +from azure.core.exceptions import ResourceNotFoundError from azure.identity import DefaultAzureCredential from azure.ai.projects import AIProjectClient from azure.ai.projects.models import MemoryStoreDefaultDefinition @@ -42,9 +44,9 @@ # Delete memory store, if it already exists memory_store_name = "my_memory_store" try: - delete_response = project_client.memory_stores.delete(memory_store_name) - print(f"Deleted memory store: {delete_response.deleted}") - except Exception: + project_client.memory_stores.delete(memory_store_name) + print(f"Memory store `{memory_store_name}` deleted") + except ResourceNotFoundError: pass # Create Memory Store diff --git a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_crud_async.py b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_crud_async.py new file mode 100644 index 000000000000..fe6bf06fdb72 --- /dev/null +++ b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_crud_async.py @@ -0,0 +1,90 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + This sample demonstrates how to perform CRUD operations on a memory store + using the asynchronous AIProjectClient. + +USAGE: + python sample_memory_crud_async.py + + Before running the sample: + + pip install "azure-ai-projects>=2.0.0b1" azure-identity openai python-dotenv aiohttp + + Set these environment variables with your own values: + 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview + page of your Microsoft Foundry portal. + 2) AZURE_AI_CHAT_MODEL_DEPLOYMENT_NAME - The deployment name of the chat model, as found under the "Name" column in + the "Models + endpoints" tab in your Microsoft Foundry project. + 3) AZURE_AI_EMBEDDING_MODEL_DEPLOYMENT_NAME - The deployment name of the embedding model, as found under the + "Name" column in the "Models + endpoints" tab in your Microsoft Foundry project. +""" + +import asyncio +import os +from dotenv import load_dotenv +from azure.core.exceptions import ResourceNotFoundError +from azure.identity.aio import DefaultAzureCredential +from azure.ai.projects.aio import AIProjectClient +from azure.ai.projects.models import MemoryStoreDefaultDefinition + +load_dotenv() + + +async def main() -> None: + + endpoint = os.environ["AZURE_AI_PROJECT_ENDPOINT"] + + async with ( + DefaultAzureCredential() as credential, + AIProjectClient(endpoint=endpoint, credential=credential) as project_client, + ): + + # Delete memory store, if it already exists + memory_store_name = "my_memory_store" + try: + await project_client.memory_stores.delete(memory_store_name) + print(f"Memory store `{memory_store_name}` deleted") + except ResourceNotFoundError: + pass + + # Create Memory Store + definition = MemoryStoreDefaultDefinition( + chat_model=os.environ["AZURE_AI_CHAT_MODEL_DEPLOYMENT_NAME"], + embedding_model=os.environ["AZURE_AI_EMBEDDING_MODEL_DEPLOYMENT_NAME"], + ) + memory_store = await project_client.memory_stores.create( + name=memory_store_name, description="Example memory store for conversations", definition=definition + ) + print(f"Created memory store: {memory_store.name} ({memory_store.id}): {memory_store.description}") + + # Get Memory Store + get_store = await project_client.memory_stores.get(memory_store.name) + print(f"Retrieved: {get_store.name} ({get_store.id}): {get_store.description}") + + # Update Memory Store + updated_store = await project_client.memory_stores.update( + name=memory_store.name, description="Updated description" + ) + print(f"Updated: {updated_store.name} ({updated_store.id}): {updated_store.description}") + + # List Memory Store + memory_stores = [] + async for store in project_client.memory_stores.list(limit=10): + memory_stores.append(store) + print(f"Found {len(memory_stores)} memory stores") + for store in memory_stores: + print(f" - {store.name} ({store.id}): {store.description}") + + # Delete Memory Store + delete_response = await project_client.memory_stores.delete(memory_store.name) + print(f"Deleted: {delete_response.deleted}") + + +if __name__ == "__main__": + asyncio.run(main()) From deb35ee8f8fba64f6c5e7428d8fce2ccfcc4ecae Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Thu, 13 Nov 2025 16:01:42 -0800 Subject: [PATCH 15/24] Add default options to MemoryStoreDefaultDefinition to workaround service bug --- .../azure-ai-projects/azure/ai/projects/models/_patch.py | 8 ++++++-- .../samples/agents/tools/sample_agent_memory_search.py | 4 ++++ .../agents/tools/sample_agent_memory_search_async.py | 4 ++++ .../samples/memories/sample_memory_basic.py | 3 +++ 4 files changed, 17 insertions(+), 2 deletions(-) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py index 12fb23c4b7d6..dc22eb07463d 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py @@ -199,7 +199,9 @@ def superseded_by(self) -> Optional[str]: :rtype: Optional[str] """ return ( - self._polling_method._current_body.superseded_by if self._polling_method._current_body else None # pylint: disable=protected-access + self._polling_method._current_body.superseded_by # pylint: disable=protected-access + if self._polling_method._current_body # pylint: disable=protected-access + else None ) @classmethod @@ -247,7 +249,9 @@ def superseded_by(self) -> Optional[str]: :rtype: Optional[str] """ return ( - self._polling_method._current_body.superseded_by if self._polling_method._current_body else None # pylint: disable=protected-access + self._polling_method._current_body.superseded_by # pylint: disable=protected-access + if self._polling_method._current_body # pylint: disable=protected-access + else None ) @classmethod diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py index b05bb75103ed..2ed399c03aa0 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py @@ -44,6 +44,7 @@ MemoryStoreDefaultDefinition, MemorySearchTool, PromptAgentDefinition, + MemoryStoreDefaultOptions, ) load_dotenv() @@ -68,6 +69,9 @@ definition = MemoryStoreDefaultDefinition( chat_model=os.environ["AZURE_AI_CHAT_MODEL_DEPLOYMENT_NAME"], embedding_model=os.environ["AZURE_AI_EMBEDDING_MODEL_DEPLOYMENT_NAME"], + options=MemoryStoreDefaultOptions( + user_profile_enabled=True, chat_summary_enabled=True + ), # Note: This line will not be needed once the service is fixed to use correct defaults ) memory_store = project_client.memory_stores.create( name=memory_store_name, diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search_async.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search_async.py index c1f6220daecb..0c3df1149a39 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search_async.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search_async.py @@ -44,6 +44,7 @@ MemoryStoreDefaultDefinition, MemorySearchTool, PromptAgentDefinition, + MemoryStoreDefaultOptions, ) load_dotenv() @@ -71,6 +72,9 @@ async def main() -> None: definition = MemoryStoreDefaultDefinition( chat_model=os.environ["AZURE_AI_CHAT_MODEL_DEPLOYMENT_NAME"], embedding_model=os.environ["AZURE_AI_EMBEDDING_MODEL_DEPLOYMENT_NAME"], + options=MemoryStoreDefaultOptions( + user_profile_enabled=True, chat_summary_enabled=True + ), # Note: This line will not be needed once the service is fixed to use correct defaults ) memory_store = await project_client.memory_stores.create( name=memory_store_name, diff --git a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic.py b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic.py index 063625fb6c88..260b4fe726f8 100644 --- a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic.py +++ b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic.py @@ -61,6 +61,9 @@ definition = MemoryStoreDefaultDefinition( chat_model=os.environ["AZURE_AI_CHAT_MODEL_DEPLOYMENT_NAME"], embedding_model=os.environ["AZURE_AI_EMBEDDING_MODEL_DEPLOYMENT_NAME"], + options=MemoryStoreDefaultOptions( + user_profile_enabled=True, chat_summary_enabled=True + ), # Note: This line will not be needed once the service is fixed to use correct defaults ) memory_store = project_client.memory_stores.create( name=memory_store_name, From 97a9d201710efe1d2a5fc612e0835d77ffdc5c3e Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Thu, 13 Nov 2025 16:30:32 -0800 Subject: [PATCH 16/24] Sample updates --- .../memories/sample_memory_advanced.py | 20 ++++++++++--------- .../samples/memories/sample_memory_basic.py | 20 ++++++++++--------- .../samples/memories/sample_memory_crud.py | 3 +++ .../memories/sample_memory_crud_async.py | 3 +++ 4 files changed, 28 insertions(+), 18 deletions(-) diff --git a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced.py b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced.py index 0345990d0647..c87521993c1a 100644 --- a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced.py +++ b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced.py @@ -8,6 +8,9 @@ DESCRIPTION: This sample demonstrates how to interact with the memory store to add and retrieve memory. + See also /samples/agents/tools/sample_agent_memory_search.py that shows + how to use the Memory Search Tool in a prompt agent. + USAGE: python sample_memory_advanced.py @@ -29,6 +32,7 @@ import os from dotenv import load_dotenv +from azure.core.exceptions import ResourceNotFoundError from azure.identity import DefaultAzureCredential from azure.ai.projects import AIProjectClient from azure.ai.projects.models import ( @@ -37,8 +41,6 @@ MemorySearchOptions, ResponsesUserMessageItemParam, ResponsesAssistantMessageItemParam, - MemorySearchTool, - PromptAgentDefinition, ) load_dotenv() @@ -53,9 +55,9 @@ # Delete memory store, if it already exists memory_store_name = "my_memory_store" try: - delete_response = project_client.memory_stores.delete(memory_store_name) - print(f"Deleted memory store: {delete_response.deleted}") - except Exception: + project_client.memory_stores.delete(memory_store_name) + print(f"Memory store `{memory_store_name}` deleted") + except ResourceNotFoundError: pass # Create memory store with advanced options @@ -147,9 +149,9 @@ print(f" - Memory ID: {memory.memory_item.memory_id}, Content: {memory.memory_item.content}") # Delete memories for the current scope - delete_scope_response = project_client.memory_stores.delete_scope(name=memory_store.name, scope=scope) - print(f"Deleted memories for scope '{scope}': {delete_scope_response.deleted}") + project_client.memory_stores.delete_scope(name=memory_store.name, scope=scope) + print(f"Deleted memories for scope '{scope}'") # Delete memory store - delete_response = project_client.memory_stores.delete(memory_store.name) - print(f"Deleted: {delete_response.deleted}") + project_client.memory_stores.delete(memory_store.name) + print(f"Deleted memory store `{memory_store.name}`") diff --git a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic.py b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic.py index 260b4fe726f8..a0b35bc70b1d 100644 --- a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic.py +++ b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic.py @@ -8,6 +8,9 @@ DESCRIPTION: This sample demonstrates how to interact with the memory store to add and retrieve memory. + See also /samples/agents/tools/sample_agent_memory_search.py that shows + how to use the Memory Search Tool in a prompt agent. + USAGE: python sample_memory_basic.py @@ -29,6 +32,7 @@ import os from dotenv import load_dotenv +from azure.core.exceptions import ResourceNotFoundError from azure.identity import DefaultAzureCredential from azure.ai.projects import AIProjectClient from azure.ai.projects.models import ( @@ -36,8 +40,6 @@ MemoryStoreDefaultOptions, MemorySearchOptions, ResponsesUserMessageItemParam, - MemorySearchTool, - PromptAgentDefinition, ) load_dotenv() @@ -52,9 +54,9 @@ # Delete memory store, if it already exists memory_store_name = "my_memory_store" try: - delete_response = project_client.memory_stores.delete(memory_store_name) - print(f"Deleted memory store: {delete_response.deleted}") - except Exception: + project_client.memory_stores.delete(memory_store_name) + print(f"Memory store `{memory_store_name}` deleted") + except ResourceNotFoundError: pass # Create a memory store @@ -108,9 +110,9 @@ print(f" - Memory ID: {memory.memory_item.memory_id}, Content: {memory.memory_item.content}") # Delete memories for a specific scope - delete_scope_response = project_client.memory_stores.delete_scope(name=memory_store.name, scope=scope) - print(f"Deleted memories for scope '{scope}': {delete_scope_response.deleted}") + project_client.memory_stores.delete_scope(name=memory_store.name, scope=scope) + print(f"Deleted memories for scope '{scope}'") # Delete memory store - delete_response = project_client.memory_stores.delete(memory_store.name) - print(f"Deleted: {delete_response.deleted}") + project_client.memory_stores.delete(memory_store.name) + print(f"Deleted memory store `{memory_store.name}`") diff --git a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_crud.py b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_crud.py index 121abe88210c..b505f2b62263 100644 --- a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_crud.py +++ b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_crud.py @@ -9,6 +9,9 @@ This sample demonstrates how to perform CRUD operations on a memory store using the synchronous AIProjectClient. + See also /samples/agents/tools/sample_agent_memory_search.py that shows + how to use the Memory Search Tool in a prompt agent. + USAGE: python sample_memory_crud.py diff --git a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_crud_async.py b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_crud_async.py index fe6bf06fdb72..b12d28ebffe9 100644 --- a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_crud_async.py +++ b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_crud_async.py @@ -9,6 +9,9 @@ This sample demonstrates how to perform CRUD operations on a memory store using the asynchronous AIProjectClient. + See also /samples/agents/tools/sample_agent_memory_search_async.py that shows + how to use the Memory Search Tool in a prompt agent. + USAGE: python sample_memory_crud_async.py From e4d1a5349611e8d48c532d2e8ec6b26576beb943 Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Thu, 13 Nov 2025 20:15:58 -0800 Subject: [PATCH 17/24] Fix bug in sync patched code. Add async basic sample --- .../ai/projects/operations/_patch_memories.py | 13 +- .../memories/sample_memory_basic_async.py | 127 ++++++++++++++++++ 2 files changed, 130 insertions(+), 10 deletions(-) create mode 100644 sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic_async.py diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py index a0e36180698b..b5620c936de9 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py @@ -119,11 +119,10 @@ def begin_update_memories( body: Union[JSON, IO[bytes]] = _Unset, *, scope: str = _Unset, - conversation_id: Optional[str] = None, items: Optional[List[_models.ItemParam]] = None, previous_update_id: Optional[str] = None, update_delay: Optional[int] = None, - **kwargs: Any, + **kwargs: Any ) -> UpdateMemoriesLROPoller: """Update memory store with conversation memories. @@ -134,15 +133,10 @@ def begin_update_memories( :keyword scope: The namespace that logically groups and isolates memories, such as a user ID. Required. :paramtype scope: str - :keyword conversation_id: The conversation ID from which to extract memories. Only one of - conversation_id or items should be provided. Default value is None. - :paramtype conversation_id: str - :keyword items: Conversation items from which to extract memories. Only one of conversation_id - or items should be provided. Default value is None. + :keyword items: Conversation items from which to extract memories. Default value is None. :paramtype items: list[~azure.ai.projects.models.ItemParam] :keyword previous_update_id: The unique ID of the previous update request, enabling incremental - memory updates from where the last operation left off. Cannot be used together with - conversation_id. Default value is None. + memory updates from where the last operation left off. Default value is None. :paramtype previous_update_id: str :keyword update_delay: Timeout period before processing the memory update in seconds. If a new update request is received during this period, it will cancel the current request and @@ -169,7 +163,6 @@ def begin_update_memories( name=name, body=body, scope=scope, - conversation_id=conversation_id, items=items, previous_update_id=previous_update_id, update_delay=update_delay, diff --git a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic_async.py b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic_async.py new file mode 100644 index 000000000000..ac3320eadd3f --- /dev/null +++ b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic_async.py @@ -0,0 +1,127 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + This sample demonstrates how to interact with the memory store to add and retrieve memory + using the asynchronous AIProjectClient. + + See also /samples/agents/tools/sample_agent_memory_search_async.py that shows + how to use the Memory Search Tool in a prompt agent. + +USAGE: + python sample_memory_basic_async.py + + Before running the sample: + + pip install "azure-ai-projects>=2.0.0b1" azure-identity openai python-dotenv aiohttp + + Deploy a chat model (e.g. gpt-4.1) and an embedding model (e.g. text-embedding-3-small). + Once you have deployed models, set the deployment name in the variables below. + + Set these environment variables with your own values: + 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview + page of your Microsoft Foundry portal. + 2) AZURE_AI_CHAT_MODEL_DEPLOYMENT_NAME - The deployment name of the chat model, as found under the "Name" column in + the "Models + endpoints" tab in your Microsoft Foundry project. + 3) AZURE_AI_EMBEDDING_MODEL_DEPLOYMENT_NAME - The deployment name of the embedding model, as found under the + "Name" column in the "Models + endpoints" tab in your Microsoft Foundry project. +""" + +import asyncio +import os +from dotenv import load_dotenv +from azure.core.exceptions import ResourceNotFoundError +from azure.identity.aio import DefaultAzureCredential +from azure.ai.projects.aio import AIProjectClient +from azure.ai.projects.models import ( + MemoryStoreDefaultDefinition, + MemoryStoreDefaultOptions, + MemorySearchOptions, + ResponsesUserMessageItemParam, +) + +load_dotenv() + + +async def main() -> None: + + endpoint = os.environ["AZURE_AI_PROJECT_ENDPOINT"] + + async with ( + DefaultAzureCredential() as credential, + AIProjectClient(endpoint=endpoint, credential=credential) as project_client, + ): + + # Delete memory store, if it already exists + memory_store_name = "my_memory_store" + try: + await project_client.memory_stores.delete(memory_store_name) + print(f"Memory store `{memory_store_name}` deleted") + except ResourceNotFoundError: + pass + + # Create a memory store + definition = MemoryStoreDefaultDefinition( + chat_model=os.environ["AZURE_AI_CHAT_MODEL_DEPLOYMENT_NAME"], + embedding_model=os.environ["AZURE_AI_EMBEDDING_MODEL_DEPLOYMENT_NAME"], + options=MemoryStoreDefaultOptions( + user_profile_enabled=True, chat_summary_enabled=True + ), # Note: This line will not be needed once the service is fixed to use correct defaults + ) + memory_store = await project_client.memory_stores.create( + name=memory_store_name, + description="Example memory store for conversations", + definition=definition, + ) + print(f"Created memory store: {memory_store.name} ({memory_store.id}): {memory_store.description}") + if isinstance(memory_store.definition, MemoryStoreDefaultDefinition): + print(f" - Chat model: {memory_store.definition.chat_model}") + print(f" - Embedding model: {memory_store.definition.embedding_model}") + + # Set scope to associate the memories with + # You can also use "{{$userId}}"" to take the oid of the request authentication header + scope = "user_123" + + # Add memories to the memory store + user_message = ResponsesUserMessageItemParam( + content="I prefer dark roast coffee and usually drink it in the morning" + ) + update_poller = await project_client.memory_stores.begin_update_memories( + name=memory_store.name, + scope=scope, + items=[user_message], # Pass conversation items that you want to add to memory + update_delay=0, # Trigger update immediately without waiting for inactivity + ) + + # Wait for the update operation to complete, but can also fire and forget + update_result = await update_poller.result() + print(f"Updated with {len(update_result.memory_operations)} memory operations") + for operation in update_result.memory_operations: + print( + f" - Operation: {operation.kind}, Memory ID: {operation.memory_item.memory_id}, Content: {operation.memory_item.content}" + ) + + # Retrieve memories from the memory store + query_message = ResponsesUserMessageItemParam(content="What are my coffee preferences?") + search_response = await project_client.memory_stores.search_memories( + name=memory_store.name, scope=scope, items=[query_message], options=MemorySearchOptions(max_memories=5) + ) + print(f"Found {len(search_response.memories)} memories") + for memory in search_response.memories: + print(f" - Memory ID: {memory.memory_item.memory_id}, Content: {memory.memory_item.content}") + + # Delete memories for a specific scope + await project_client.memory_stores.delete_scope(name=memory_store.name, scope=scope) + print(f"Deleted memories for scope '{scope}'") + + # Delete memory store + await project_client.memory_stores.delete(memory_store.name) + print(f"Deleted memory store `{memory_store.name}`") + + +if __name__ == "__main__": + asyncio.run(main()) From 430dd3c26cee4a00fdcae22b0c40babed166ebf1 Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Thu, 13 Nov 2025 20:16:28 -0800 Subject: [PATCH 18/24] Run `black --config ../../../eng/black-pyproject.toml .` --- .../azure/ai/projects/operations/_patch_memories.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py index b5620c936de9..e69e8d2988ec 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py @@ -122,7 +122,7 @@ def begin_update_memories( items: Optional[List[_models.ItemParam]] = None, previous_update_id: Optional[str] = None, update_delay: Optional[int] = None, - **kwargs: Any + **kwargs: Any, ) -> UpdateMemoriesLROPoller: """Update memory store with conversation memories. From 9476ba44e436b54141738c34cf028080a5e9f90c Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Thu, 13 Nov 2025 21:20:00 -0800 Subject: [PATCH 19/24] Add advanced aasync sample by CoPilot. Have not run it yet --- .../memories/sample_memory_advanced.py | 4 +- .../memories/sample_memory_advanced_async.py | 169 ++++++++++++++++++ 2 files changed, 172 insertions(+), 1 deletion(-) create mode 100644 sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced_async.py diff --git a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced.py b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced.py index c87521993c1a..277288faff62 100644 --- a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced.py +++ b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced.py @@ -6,7 +6,9 @@ """ DESCRIPTION: - This sample demonstrates how to interact with the memory store to add and retrieve memory. + This sample demonstrates how to interact with the memory store to add and retrieve memory + using the asynchronous AIProjectClient. It uses some additional operations compared + to the basic memory sample. See also /samples/agents/tools/sample_agent_memory_search.py that shows how to use the Memory Search Tool in a prompt agent. diff --git a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced_async.py b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced_async.py new file mode 100644 index 000000000000..be25e6324920 --- /dev/null +++ b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced_async.py @@ -0,0 +1,169 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + This sample demonstrates how to interact with the memory store to add and retrieve memory + using the asynchronous AIProjectClient. It uses some additional operations compared + to the basic memory sample. + + See also /samples/agents/tools/sample_agent_memory_search_async.py that shows + how to use the Memory Search Tool in a prompt agent. + +USAGE: + python sample_memory_advanced_async.py + + Before running the sample: + + pip install "azure-ai-projects>=2.0.0b1" azure-identity openai python-dotenv aiohttp + + Deploy a chat model (e.g. gpt-4.1) and an embedding model (e.g. text-embedding-3-small). + Once you have deployed models, set the deployment name in the variables below. + + Set these environment variables with your own values: + 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview + page of your Microsoft Foundry portal. + 2) AZURE_AI_CHAT_MODEL_DEPLOYMENT_NAME - The deployment name of the chat model, as found under the "Name" column in + the "Models + endpoints" tab in your Microsoft Foundry project. + 3) AZURE_AI_EMBEDDING_MODEL_DEPLOYMENT_NAME - The deployment name of the embedding model, as found under the + "Name" column in the "Models + endpoints" tab in your Microsoft Foundry project. +""" + +import asyncio +import os +from dotenv import load_dotenv +from azure.core.exceptions import ResourceNotFoundError +from azure.identity.aio import DefaultAzureCredential +from azure.ai.projects.aio import AIProjectClient +from azure.ai.projects.models import ( + MemoryStoreDefaultDefinition, + MemoryStoreDefaultOptions, + MemorySearchOptions, + ResponsesUserMessageItemParam, + ResponsesAssistantMessageItemParam, +) + +load_dotenv() + + +async def main() -> None: + + endpoint = os.environ["AZURE_AI_PROJECT_ENDPOINT"] + + async with ( + DefaultAzureCredential() as credential, + AIProjectClient(endpoint=endpoint, credential=credential) as project_client, + ): + + # Delete memory store, if it already exists + memory_store_name = "my_memory_store" + try: + await project_client.memory_stores.delete(memory_store_name) + print(f"Memory store `{memory_store_name}` deleted") + except ResourceNotFoundError: + pass + + # Create memory store with advanced options + options = MemoryStoreDefaultOptions( + user_profile_enabled=True, + user_profile_details="Preferences and interests relevant to coffee expert agent", + chat_summary_enabled=True, + ) + definition = MemoryStoreDefaultDefinition( + chat_model=os.environ["AZURE_AI_CHAT_MODEL_DEPLOYMENT_NAME"], + embedding_model=os.environ["AZURE_AI_EMBEDDING_MODEL_DEPLOYMENT_NAME"], + options=options, + ) + memory_store = await project_client.memory_stores.create( + name=memory_store_name, + description="Example memory store for conversations", + definition=definition, + ) + print(f"Created memory store: {memory_store.name} ({memory_store.id}): {memory_store.description}") + + # Set scope to associate the memories with. + # You can also use "{{$userId}}"" to take the oid of the request authentication header. + scope = "user_123" + + # Extract memories from messages and add them to the memory store + user_message = ResponsesUserMessageItemParam( + content="I prefer dark roast coffee and usually drink it in the morning" + ) + update_poller = await project_client.memory_stores.begin_update_memories( + name=memory_store.name, + scope=scope, + items=[user_message], # Pass conversation items that you want to add to memory + # update_delay=300 # Keep default inactivity delay before starting update + ) + print( + f"Scheduled memory update operation (Update ID: {update_poller.update_id}, Status: {update_poller.status()})" + ) + + # Extend the previous update with another update and more messages + new_message = ResponsesUserMessageItemParam(content="I also like cappuccinos in the afternoon") + new_update_poller = await project_client.memory_stores.begin_update_memories( + name=memory_store.name, + scope=scope, + items=[new_message], + previous_update_id=update_poller.update_id, # Extend from previous update ID + update_delay=0, # Trigger update immediately without waiting for inactivity + ) + print( + f"Scheduled memory update operation (Update ID: {new_update_poller.update_id}, Status: {new_update_poller.status()})" + ) + + # As first update has not started yet, the new update will cancel the first update and cover both sets of messages + print( + f"Superseded first memory update operation (Update ID: {update_poller.update_id}, Status: {update_poller.status()})" + ) + + new_update_result = await new_update_poller.result() + print( + f"Second update {new_update_poller.update_id} completed with {len(new_update_result.memory_operations)} memory operations" + ) + for operation in new_update_result.memory_operations: + print( + f" - Operation: {operation.kind}, Memory ID: {operation.memory_item.memory_id}, Content: {operation.memory_item.content}" + ) + + # Retrieve memories from the memory store + query_message = ResponsesUserMessageItemParam(content="What are my morning coffee preferences?") + search_response = await project_client.memory_stores.search_memories( + name=memory_store.name, scope=scope, items=[query_message], options=MemorySearchOptions(max_memories=5) + ) + print(f"Found {len(search_response.memories)} memories") + for memory in search_response.memories: + print(f" - Memory ID: {memory.memory_item.memory_id}, Content: {memory.memory_item.content}") + + # Perform another search using the previous search as context + agent_message = ResponsesAssistantMessageItemParam( + content="You previously indicated a preference for dark roast coffee in the morning." + ) + followup_query = ResponsesUserMessageItemParam( + content="What about afternoon?" # Follow-up assuming context from previous messages + ) + followup_search_response = await project_client.memory_stores.search_memories( + name=memory_store.name, + scope=scope, + items=[agent_message, followup_query], + previous_search_id=search_response.search_id, + options=MemorySearchOptions(max_memories=5), + ) + print(f"Found {len(followup_search_response.memories)} memories") + for memory in followup_search_response.memories: + print(f" - Memory ID: {memory.memory_item.memory_id}, Content: {memory.memory_item.content}") + + # Delete memories for the current scope + await project_client.memory_stores.delete_scope(name=memory_store.name, scope=scope) + print(f"Deleted memories for scope '{scope}'") + + # Delete memory store + await project_client.memory_stores.delete(memory_store.name) + print(f"Deleted memory store `{memory_store.name}`") + + +if __name__ == "__main__": + asyncio.run(main()) From 416fc725f9a6506e9cfd6713e2da478a62b89c28 Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Thu, 13 Nov 2025 21:48:47 -0800 Subject: [PATCH 20/24] Upate README.md --- sdk/ai/azure-ai-projects/README.md | 23 +++++++++++++++++++ .../tools/sample_agent_memory_search.py | 18 ++++++++------- 2 files changed, 33 insertions(+), 8 deletions(-) diff --git a/sdk/ai/azure-ai-projects/README.md b/sdk/ai/azure-ai-projects/README.md index 2af62c6745ea..ac146425677b 100644 --- a/sdk/ai/azure-ai-projects/README.md +++ b/sdk/ai/azure-ai-projects/README.md @@ -367,6 +367,29 @@ These tools work immediately without requiring external connections. See the full sample code in [sample_agent_function_tool.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_function_tool.py). +* **Memory Search Tool** + + The Memory Store Tool adds Memory to an Agents, allowing the AI model to search for past information related to the current user prompt. + + + ```python + # Set scope to associate the memories with + # You can also use "{{$userId}}"" to take the oid of the request authentication header + scope = "user_123" + + tool = MemorySearchTool( + memory_store_name=memory_store.name, + scope=scope, + update_delay=1, # Wait 1 second of inactivity before updating memories + # In a real application, set this to a higher value like 300 (5 minutes, default) + ) + ``` + + + See the full [sample_agent_memory_search.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py) showing how to create an Agent with a memory store, and use it in multiple conversations. + + See also samples in the folder [samples\memories](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/ai/azure-ai-projects/samples/memories) showing how to manage memory stores. + #### Connection-Based Tools These tools require configuring connections in your AI Foundry project and use `project_connection_id`. diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py index 2ed399c03aa0..28443a922c24 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py @@ -80,24 +80,26 @@ ) print(f"Created memory store: {memory_store.name} ({memory_store.id}): {memory_store.description}") + # [START memory_search_tool_declaration] # Set scope to associate the memories with # You can also use "{{$userId}}"" to take the oid of the request authentication header scope = "user_123" + tool = MemorySearchTool( + memory_store_name=memory_store.name, + scope=scope, + update_delay=1, # Wait 1 second of inactivity before updating memories + # In a real application, set this to a higher value like 300 (5 minutes, default) + ) + # [END memory_search_tool_declaration] + # Create a prompt agent with memory search tool agent = project_client.agents.create_version( agent_name="MyAgent", definition=PromptAgentDefinition( model=os.environ["AZURE_AI_MODEL_DEPLOYMENT_NAME"], instructions="You are a helpful assistant that answers general questions", - tools=[ - MemorySearchTool( - memory_store_name=memory_store.name, - scope=scope, - update_delay=1, # Wait 1 second of inactivity before updating memories - # In a real application, set this to a higher value like 300 (5 minutes, default) - ) - ], + tools=[tool], ), ) print(f"Agent created (id: {agent.id}, name: {agent.name}, version: {agent.version})") From b8b59b5d863ce20033343f1b50d59dbdbc1da4d5 Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Thu, 13 Nov 2025 21:53:03 -0800 Subject: [PATCH 21/24] Fix code snippet in README.md --- sdk/ai/azure-ai-projects/README.md | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/sdk/ai/azure-ai-projects/README.md b/sdk/ai/azure-ai-projects/README.md index ac146425677b..4438d9f6f1a4 100644 --- a/sdk/ai/azure-ai-projects/README.md +++ b/sdk/ai/azure-ai-projects/README.md @@ -373,17 +373,17 @@ These tools work immediately without requiring external connections. ```python - # Set scope to associate the memories with - # You can also use "{{$userId}}"" to take the oid of the request authentication header - scope = "user_123" - - tool = MemorySearchTool( - memory_store_name=memory_store.name, - scope=scope, - update_delay=1, # Wait 1 second of inactivity before updating memories - # In a real application, set this to a higher value like 300 (5 minutes, default) - ) - ``` + # Set scope to associate the memories with + # You can also use "{{$userId}}"" to take the oid of the request authentication header + scope = "user_123" + + tool = MemorySearchTool( + memory_store_name=memory_store.name, + scope=scope, + update_delay=1, # Wait 1 second of inactivity before updating memories + # In a real application, set this to a higher value like 300 (5 minutes, default) + ) + ``` See the full [sample_agent_memory_search.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py) showing how to create an Agent with a memory store, and use it in multiple conversations. From 58c30a836b59e5d3aa7f876d4767b22e44f9b4db Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Thu, 13 Nov 2025 21:54:57 -0800 Subject: [PATCH 22/24] Fix typo in README.md --- sdk/ai/azure-ai-projects/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/ai/azure-ai-projects/README.md b/sdk/ai/azure-ai-projects/README.md index 4438d9f6f1a4..6a72e975e261 100644 --- a/sdk/ai/azure-ai-projects/README.md +++ b/sdk/ai/azure-ai-projects/README.md @@ -369,7 +369,7 @@ These tools work immediately without requiring external connections. * **Memory Search Tool** - The Memory Store Tool adds Memory to an Agents, allowing the AI model to search for past information related to the current user prompt. + The Memory Store Tool adds Memory to an Agent, allowing the Agent's AI model to search for past information related to the current user prompt. ```python From 9d80501c71d547c8f636f95e7fef0e055fbc0d1d Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Fri, 14 Nov 2025 02:11:48 -0800 Subject: [PATCH 23/24] Address Copilot code review comments --- sdk/ai/azure-ai-projects/.env.template | 1 - sdk/ai/azure-ai-projects/README.md | 2 +- .../samples/agents/tools/sample_agent_memory_search.py | 4 ++-- .../samples/agents/tools/sample_agent_memory_search_async.py | 4 ++-- .../samples/memories/sample_memory_advanced.py | 4 ++-- .../samples/memories/sample_memory_advanced_async.py | 2 +- .../azure-ai-projects/samples/memories/sample_memory_basic.py | 2 +- .../samples/memories/sample_memory_basic_async.py | 2 +- 8 files changed, 10 insertions(+), 11 deletions(-) diff --git a/sdk/ai/azure-ai-projects/.env.template b/sdk/ai/azure-ai-projects/.env.template index ce5aafdf7a35..0caaf2e72cb8 100644 --- a/sdk/ai/azure-ai-projects/.env.template +++ b/sdk/ai/azure-ai-projects/.env.template @@ -25,7 +25,6 @@ AZURE_AI_PROJECTS_AZURE_RESOURCE_GROUP= AZURE_AI_PROJECTS_AZURE_AOAI_ACCOUNT= # Used in Memory Store samples -AZURE_AI_AGENT_MODEL_DEPLOYMENT_NAME= AZURE_AI_CHAT_MODEL_DEPLOYMENT_NAME= AZURE_AI_EMBEDDING_MODEL_DEPLOYMENT_NAME= diff --git a/sdk/ai/azure-ai-projects/README.md b/sdk/ai/azure-ai-projects/README.md index 6a72e975e261..07605a59183b 100644 --- a/sdk/ai/azure-ai-projects/README.md +++ b/sdk/ai/azure-ai-projects/README.md @@ -374,7 +374,7 @@ These tools work immediately without requiring external connections. ```python # Set scope to associate the memories with - # You can also use "{{$userId}}"" to take the oid of the request authentication header + # You can also use "{{$userId}}" to take the oid of the request authentication header scope = "user_123" tool = MemorySearchTool( diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py index 28443a922c24..3e004acc6607 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py @@ -26,7 +26,7 @@ Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview page of your Microsoft Foundry portal. - 2) AZURE_AI_AGENT_MODEL_DEPLOYMENT_NAME - The deployment name of the chat model for the agent, + 2) AZURE_AI_MODEL_DEPLOYMENT_NAME - The deployment name of the Agent's AI model, as found under the "Name" column in the "Models + endpoints" tab in your Microsoft Foundry project. 3) AZURE_AI_CHAT_MODEL_DEPLOYMENT_NAME - The deployment name of the chat model for memory, as found under the "Name" column in the "Models + endpoints" tab in your Microsoft Foundry project. @@ -82,7 +82,7 @@ # [START memory_search_tool_declaration] # Set scope to associate the memories with - # You can also use "{{$userId}}"" to take the oid of the request authentication header + # You can also use "{{$userId}}" to take the oid of the request authentication header scope = "user_123" tool = MemorySearchTool( diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search_async.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search_async.py index 0c3df1149a39..e64961f2fb65 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search_async.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search_async.py @@ -26,7 +26,7 @@ Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview page of your Microsoft Foundry portal. - 2) AZURE_AI_AGENT_MODEL_DEPLOYMENT_NAME - The deployment name of the chat model for the agent, + 2) AZURE_AI_MODEL_DEPLOYMENT_NAME - The deployment name of the Agent's AI model, as found under the "Name" column in the "Models + endpoints" tab in your Microsoft Foundry project. 3) AZURE_AI_CHAT_MODEL_DEPLOYMENT_NAME - The deployment name of the chat model for memory, as found under the "Name" column in the "Models + endpoints" tab in your Microsoft Foundry project. @@ -84,7 +84,7 @@ async def main() -> None: print(f"Created memory store: {memory_store.name} ({memory_store.id}): {memory_store.description}") # Set scope to associate the memories with - # You can also use "{{$userId}}"" to take the oid of the request authentication header + # You can also use "{{$userId}}" to take the oid of the request authentication header scope = "user_123" # Create a prompt agent with memory search tool diff --git a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced.py b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced.py index 277288faff62..37c8a434cb33 100644 --- a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced.py +++ b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced.py @@ -7,7 +7,7 @@ """ DESCRIPTION: This sample demonstrates how to interact with the memory store to add and retrieve memory - using the asynchronous AIProjectClient. It uses some additional operations compared + using the synchronous AIProjectClient. It uses some additional operations compared to the basic memory sample. See also /samples/agents/tools/sample_agent_memory_search.py that shows @@ -81,7 +81,7 @@ print(f"Created memory store: {memory_store.name} ({memory_store.id}): {memory_store.description}") # Set scope to associate the memories with. - # You can also use "{{$userId}}"" to take the oid of the request authentication header. + # You can also use "{{$userId}}" to take the oid of the request authentication header. scope = "user_123" # Extract memories from messages and add them to the memory store diff --git a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced_async.py b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced_async.py index be25e6324920..a23be53f6620 100644 --- a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced_async.py +++ b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced_async.py @@ -85,7 +85,7 @@ async def main() -> None: print(f"Created memory store: {memory_store.name} ({memory_store.id}): {memory_store.description}") # Set scope to associate the memories with. - # You can also use "{{$userId}}"" to take the oid of the request authentication header. + # You can also use "{{$userId}}" to take the oid of the request authentication header. scope = "user_123" # Extract memories from messages and add them to the memory store diff --git a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic.py b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic.py index a0b35bc70b1d..0e492e1ceeec 100644 --- a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic.py +++ b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic.py @@ -78,7 +78,7 @@ print(f" - Embedding model: {memory_store.definition.embedding_model}") # Set scope to associate the memories with - # You can also use "{{$userId}}"" to take the oid of the request authentication header + # You can also use "{{$userId}}" to take the oid of the request authentication header scope = "user_123" # Add memories to the memory store diff --git a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic_async.py b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic_async.py index ac3320eadd3f..36626ab838b7 100644 --- a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic_async.py +++ b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic_async.py @@ -83,7 +83,7 @@ async def main() -> None: print(f" - Embedding model: {memory_store.definition.embedding_model}") # Set scope to associate the memories with - # You can also use "{{$userId}}"" to take the oid of the request authentication header + # You can also use "{{$userId}}" to take the oid of the request authentication header scope = "user_123" # Add memories to the memory store From d739ee5e2c57fc2368dc94ce3cc5880e588ccb59 Mon Sep 17 00:00:00 2001 From: Paul Hsu Date: Fri, 14 Nov 2025 12:06:18 -0800 Subject: [PATCH 24/24] Add failed to finished state. --- sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py index dc22eb07463d..fc88ae38be4a 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py @@ -31,7 +31,7 @@ class CustomCredential(CustomCredentialGenerated): """The secret custom credential keys. Required.""" -_FINISHED = frozenset(["completed", "superseded"]) +_FINISHED = frozenset(["completed", "superseded", "failed"]) _FAILED = frozenset(["failed"])