diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/pyproject.toml b/sdk/agentserver/azure-ai-agentserver-agentframework/pyproject.toml index 814d1d6d1a1e..a76ebff78bed 100644 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/pyproject.toml +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/pyproject.toml @@ -9,6 +9,7 @@ authors = [ ] license = "MIT" classifiers = [ + "Development Status :: 7 - Inactive", "Programming Language :: Python", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3", diff --git a/sdk/agentserver/azure-ai-agentserver-core/CHANGELOG.md b/sdk/agentserver/azure-ai-agentserver-core/CHANGELOG.md index cfcf2445e256..39d9af1830f3 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/CHANGELOG.md +++ b/sdk/agentserver/azure-ai-agentserver-core/CHANGELOG.md @@ -1,7 +1,21 @@ # Release History -## 1.0.0b1 (2025-11-07) +## 2.0.0b1 (Unreleased) ### Features Added -First version +- `AgentHost` host framework with health probe (`/readiness`), graceful shutdown, and port binding. +- `TracingHelper` for OpenTelemetry tracing with Azure Monitor and OTLP exporters. +- Auto-enable tracing when Application Insights or OTLP endpoint is configured. +- W3C Trace Context propagation and `leaf_customer_span_id` baggage re-parenting. +- `create_error_response()` utility for standard error envelope responses. +- `get_logger()` for library-scoped logging. +- `register_routes()` for pluggable protocol composition. +- Hypercorn-based ASGI server with HTTP/1.1 support. + +### Breaking Changes + +- Replaced `ErrorResponse.create()` static method with module-level `create_error_response()` function. +- Replaced `AgentLogger.get()` static method with module-level `get_logger()` function. +- Removed `AGENT_LOG_LEVEL` and `AGENT_GRACEFUL_SHUTDOWN_TIMEOUT` environment variable support from `Constants`. +- Renamed health endpoint from `/healthy` to `/readiness`. diff --git a/sdk/agentserver/azure-ai-agentserver-core/LICENSE b/sdk/agentserver/azure-ai-agentserver-core/LICENSE index 63447fd8bbbf..4c3581d3b052 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/LICENSE +++ b/sdk/agentserver/azure-ai-agentserver-core/LICENSE @@ -12,10 +12,10 @@ furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. -THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. \ No newline at end of file +SOFTWARE. diff --git a/sdk/agentserver/azure-ai-agentserver-core/MANIFEST.in b/sdk/agentserver/azure-ai-agentserver-core/MANIFEST.in index eefbfbed7925..15a42f74dc4b 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/MANIFEST.in +++ b/sdk/agentserver/azure-ai-agentserver-core/MANIFEST.in @@ -2,7 +2,6 @@ include *.md include LICENSE recursive-include tests *.py recursive-include samples *.py *.md -recursive-include doc *.rst *.md include azure/__init__.py include azure/ai/__init__.py include azure/ai/agentserver/__init__.py diff --git a/sdk/agentserver/azure-ai-agentserver-core/README.md b/sdk/agentserver/azure-ai-agentserver-core/README.md index ff60cf460196..01aabf94e1e4 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/README.md +++ b/sdk/agentserver/azure-ai-agentserver-core/README.md @@ -1,105 +1,137 @@ -# Azure AI Agent Server Adapter for Python +# Azure AI AgentHost Core for Python +The `azure-ai-agentserver-core` package provides the foundation host framework for building Azure AI Hosted Agent containers. It handles the protocol-agnostic infrastructure — health probes, graceful shutdown, OpenTelemetry tracing, and ASGI serving — so that protocol packages can focus on their endpoint logic. ## Getting started +### Install the package + ```bash pip install azure-ai-agentserver-core ``` +To enable OpenTelemetry tracing with Azure Monitor and OTLP exporters: + +```bash +pip install azure-ai-agentserver-core[tracing] +``` + +### Prerequisites + +- Python 3.10 or later + ## Key concepts -This is the core package for Azure AI Agent server. It hosts your agent as a container on the cloud. +### AgentHost + +`AgentHost` is the host process for Azure AI Hosted Agent containers. It provides: + +- **Health probe** — `GET /readiness` returns `200 OK` when the server is ready. +- **Graceful shutdown** — On `SIGTERM` the server drains in-flight requests (default 30 s timeout) before exiting. +- **OpenTelemetry tracing** — Automatic span creation with Azure Monitor and OTLP export when configured. +- **Hypercorn ASGI server** — Serves on `0.0.0.0:${PORT:-8088}` with HTTP/1.1. + +Protocol packages (e.g. `azure-ai-agentserver-invocations`) plug into `AgentHost` by calling `register_routes()` to add their endpoints. -You can talk to your agent using azure-ai-project sdk. +### Environment variables +| Variable | Description | Default | +|---|---|---| +| `PORT` | Listen port | `8088` | +| `FOUNDRY_AGENT_NAME` | Agent name (used in tracing) | `""` | +| `FOUNDRY_AGENT_VERSION` | Agent version (used in tracing) | `""` | +| `FOUNDRY_PROJECT_ENDPOINT` | Azure AI Foundry project endpoint | `""` | +| `FOUNDRY_PROJECT_ARM_ID` | Foundry project ARM resource ID (used in tracing) | `""` | +| `FOUNDRY_AGENT_SESSION_ID` | Default session ID when not provided per-request | `""` | +| `APPLICATIONINSIGHTS_CONNECTION_STRING` | Azure Monitor connection string | — | +| `OTEL_EXPORTER_OTLP_ENDPOINT` | OTLP collector endpoint | — | ## Examples -If your agent is not built using a supported framework such as LangGraph and Agent-framework, you can still make it compatible with Microsoft AI Foundry by manually implementing the predefined interface. +`AgentHost` is typically used with a protocol package. The simplest setup with the invocations protocol: ```python -import datetime +from azure.ai.agentserver.core import AgentHost +from azure.ai.agentserver.invocations import InvocationHandler +from starlette.responses import JSONResponse -from azure.ai.agentserver.core import FoundryCBAgent -from azure.ai.agentserver.core.models import ( - CreateResponse, - Response as OpenAIResponse, -) -from azure.ai.agentserver.core.models.projects import ( - ItemContentOutputText, - ResponsesAssistantMessageItemResource, - ResponseTextDeltaEvent, - ResponseTextDoneEvent, -) +server = AgentHost() +invocations = InvocationHandler(server) + +@invocations.invoke_handler +async def handle(request): + body = await request.json() + return JSONResponse({"greeting": f"Hello, {body['name']}!"}) + +server.run() +``` +### Using AgentHost standalone -def stream_events(text: str): - assembled = "" - for i, token in enumerate(text.split(" ")): - piece = token if i == len(text.split(" ")) - 1 else token + " " - assembled += piece - yield ResponseTextDeltaEvent(delta=piece) - # Done with text - yield ResponseTextDoneEvent(text=assembled) - - -async def agent_run(request_body: CreateResponse): - agent = request_body.agent - print(f"agent:{agent}") - - if request_body.stream: - return stream_events("I am mock agent with no intelligence in stream mode.") - - # Build assistant output content - output_content = [ - ItemContentOutputText( - text="I am mock agent with no intelligence.", - annotations=[], - ) - ] - - response = OpenAIResponse( - metadata={}, - temperature=0.0, - top_p=0.0, - user="me", - id="id", - created_at=datetime.datetime.now(), - output=[ - ResponsesAssistantMessageItemResource( - status="completed", - content=output_content, - ) - ], - ) - return response - - -my_agent = FoundryCBAgent() -my_agent.agent_run = agent_run - -if __name__ == "__main__": - my_agent.run() +For custom protocol implementations, use `AgentHost` directly and register your own routes: +```python +from azure.ai.agentserver.core import AgentHost +from starlette.requests import Request +from starlette.responses import JSONResponse +from starlette.routing import Route + +async def my_endpoint(request: Request): + return JSONResponse({"status": "ok"}) + +server = AgentHost() +server.register_routes([Route("/my-endpoint", my_endpoint, methods=["POST"])]) +server.run() +``` + +### Shutdown handler + +Register a cleanup function that runs during graceful shutdown: + +```python +server = AgentHost() + +@server.shutdown_handler +async def on_shutdown(): + # Close database connections, flush buffers, etc. + pass +``` + +### Configuring tracing + +Tracing is enabled automatically when an Application Insights connection string is available: + +```python +server = AgentHost( + application_insights_connection_string="InstrumentationKey=...", +) +``` + +Or via environment variable: + +```bash +export APPLICATIONINSIGHTS_CONNECTION_STRING="InstrumentationKey=..." +python my_agent.py ``` ## Troubleshooting -First run your agent with azure-ai-agentserver-core locally. +### Logging -If it works on local by failed on cloud. Check your logs in the application insight connected to your Azure AI Foundry Project. +Set the log level to `DEBUG` for detailed diagnostics: +```python +server = AgentHost(log_level="DEBUG") +``` ### Reporting issues -To report an issue with the client library, or request additional features, please open a GitHub issue [here](https://github.com/Azure/azure-sdk-for-python/issues). Mention the package name "azure-ai-agents" in the title or content. - +To report an issue with the client library, or request additional features, please open a GitHub issue [here](https://github.com/Azure/azure-sdk-for-python/issues). ## Next steps -Please visit [Samples](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/agentserver/azure-ai-agentserver-core/samples) folder. There are several cases for you to build your agent with azure-ai-agentserver - +- Install [`azure-ai-agentserver-invocations`](https://pypi.org/project/azure-ai-agentserver-invocations/) to add the invocation protocol endpoints. +- See the [container image spec](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/agentserver) for the full hosted agent contract. ## Contributing @@ -117,3 +149,5 @@ This project has adopted the [Microsoft Open Source Code of Conduct][code_of_conduct]. For more information, see the Code of Conduct FAQ or contact opencode@microsoft.com with any additional questions or comments. + +[code_of_conduct]: https://opensource.microsoft.com/codeofconduct/ diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/__init__.py index d55ccad1f573..8db66d3d0f0f 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/__init__.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/__init__.py @@ -1 +1 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/__init__.py index d55ccad1f573..8db66d3d0f0f 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/__init__.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/__init__.py @@ -1 +1 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/__init__.py index d55ccad1f573..8db66d3d0f0f 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/__init__.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/__init__.py @@ -1 +1 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/__init__.py index 895074d32ae3..65174ea22d53 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/__init__.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/__init__.py @@ -1,14 +1,35 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- +"""Azure AI AgentHost core framework. + +Provides the :class:`AgentHost` host and shared utilities for +building Azure AI Hosted Agent containers. + +Public API:: + + from azure.ai.agentserver.core import ( + get_logger, + AgentHost, + Constants, + create_error_response, + TracingHelper, + ) +""" __path__ = __import__("pkgutil").extend_path(__path__, __name__) +from ._base import AgentHost +from ._constants import Constants +from ._errors import create_error_response +from ._logger import get_logger +from ._tracing import TracingHelper from ._version import VERSION -from .logger import configure as config_logging -from .server.base import FoundryCBAgent -from .server.common.agent_run_context import AgentRunContext - -config_logging() -__all__ = ["FoundryCBAgent", "AgentRunContext"] +__all__ = [ + "get_logger", + "AgentHost", + "Constants", + "create_error_response", + "TracingHelper", +] __version__ = VERSION diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/_base.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/_base.py new file mode 100644 index 000000000000..41d223df328d --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/_base.py @@ -0,0 +1,320 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +import asyncio # pylint: disable=do-not-import-asyncio +import contextlib +import logging +from collections.abc import AsyncGenerator, Awaitable, Callable # pylint: disable=import-error +from typing import Any, Optional + +from starlette.applications import Starlette +from starlette.middleware import Middleware +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.requests import Request +from starlette.responses import Response +from starlette.routing import Route + +from . import _config +from ._logger import get_logger +from ._tracing import TracingHelper + +logger = get_logger() + +# Pre-built health-check response to avoid per-request allocation. +_HEALTHY_BODY = b'{"status":"healthy"}' + +# Server identity header value (name only — no version to avoid information disclosure). +_PLATFORM_SERVER_VALUE = "azure-ai-agentserver-core" + +# Sentinel attribute name set on the console handler to prevent adding duplicates +# across multiple AgentHost instantiations. +_CONSOLE_HANDLER_ATTR = "_agentserver_console" + + +class _PlatformHeaderMiddleware(BaseHTTPMiddleware): + """Middleware that adds x-platform-server identity header to all responses.""" + + async def dispatch(self, request: Request, call_next): # type: ignore[no-untyped-def, override] + response = await call_next(request) + response.headers["x-platform-server"] = _PLATFORM_SERVER_VALUE + return response + + +class AgentHost: + """Agent server host framework with built-in protocol endpoints. + + Provides the protocol-agnostic infrastructure required by all Azure AI + Hosted Agent containers: + + - Health probe (``GET /readiness``) + - Graceful shutdown handling (SIGTERM, configurable timeout) + - OpenTelemetry tracing with Azure Monitor and OTLP exporters + - Hypercorn-based ASGI server with HTTP/1.1 + + Protocol packages (e.g. ``azure-ai-agentserver-invocations``) plug into + this host by calling :meth:`register_routes` to add their endpoints. + + Usage:: + + from azure.ai.agentserver.core import AgentHost + from azure.ai.agentserver.invocations import InvocationHandler + + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request): + return JSONResponse({"ok": True}) + + server.run() + + :param application_insights_connection_string: Application Insights + connection string for exporting traces and logs to Azure Monitor. + When *None* (default) the ``APPLICATIONINSIGHTS_CONNECTION_STRING`` + env var is consulted. Tracing is automatically enabled when a + connection string is available. Requires ``opentelemetry-sdk`` and + ``azure-monitor-opentelemetry-exporter`` (included in the + ``[tracing]`` extras group). + :type application_insights_connection_string: Optional[str] + :param graceful_shutdown_timeout: Seconds to wait for in-flight requests to + complete after receiving SIGTERM / shutdown signal. When *None* (default) + the default is 30 seconds. Set to ``0`` to disable the drain period. + :type graceful_shutdown_timeout: Optional[int] + :param log_level: Library log level (e.g. ``"DEBUG"``, ``"INFO"``). When + *None* (default) the default is ``"INFO"``. + :type log_level: Optional[str] + """ + + def __init__( + self, + *, + application_insights_connection_string: Optional[str] = None, + graceful_shutdown_timeout: Optional[int] = None, + log_level: Optional[str] = None, + ) -> None: + # Shutdown handler slot (server-level lifecycle) ------------------- + self._shutdown_fn: Optional[Callable[[], Awaitable[None]]] = None + + # Logging ---------------------------------------------------------- + resolved_level = _config.resolve_log_level(log_level) + logger.setLevel(resolved_level) + if not any(getattr(h, _CONSOLE_HANDLER_ATTR, False) for h in logger.handlers): + _console = logging.StreamHandler() + _console.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(name)s: %(message)s")) + setattr(_console, _CONSOLE_HANDLER_ATTR, True) + logger.addHandler(_console) + + # Tracing — enabled when App Insights or OTLP endpoint is configured + _conn_str = _config.resolve_appinsights_connection_string(application_insights_connection_string) + _otlp_endpoint = _config.resolve_otlp_endpoint() + _tracing_on = bool(_conn_str or _otlp_endpoint) + self._tracing: Optional[TracingHelper] = None + if _tracing_on: + try: + self._tracing = TracingHelper(connection_string=_conn_str) + except Exception: # pylint: disable=broad-exception-caught + logger.warning("Failed to initialize tracing; continuing without tracing.", exc_info=True) + self._tracing = None + + # Timeouts --------------------------------------------------------- + self._graceful_shutdown_timeout = _config.resolve_graceful_shutdown_timeout( + graceful_shutdown_timeout + ) + + # Protocol routes (registered by protocol packages via register_routes) + self._protocol_routes: list[Route] = [] + + # App is built lazily on first access + self._app: Optional[Starlette] = None + + # ------------------------------------------------------------------ + # ASGI app accessor (lazy build) + # ------------------------------------------------------------------ + + @property + def app(self) -> Starlette: + """Return the Starlette ASGI application, building it on first access. + + :return: The configured Starlette application. + :rtype: Starlette + """ + if self._app is None: + self._build_app() + return self._app # type: ignore[return-value] # _build_app sets _app + + # ------------------------------------------------------------------ + # Tracing accessor (for protocol packages) + # ------------------------------------------------------------------ + + @property + def tracing(self) -> Optional[TracingHelper]: + """Return the tracing helper, or *None* when tracing is disabled. + + :return: The tracing helper instance. + :rtype: Optional[TracingHelper] + """ + return self._tracing + + # ------------------------------------------------------------------ + # Shutdown handler (server-level lifecycle) + # ------------------------------------------------------------------ + + def shutdown_handler(self, fn: Callable[[], Awaitable[None]]) -> Callable[[], Awaitable[None]]: + """Register a function as the shutdown handler. + + :param fn: Async function called during graceful shutdown. + :type fn: Callable[[], Awaitable[None]] + :return: The original function (unmodified). + :rtype: Callable[[], Awaitable[None]] + """ + self._shutdown_fn = fn + return fn + + async def _dispatch_shutdown(self) -> None: + """Dispatch to the registered shutdown handler, or no-op.""" + if self._shutdown_fn is not None: + await self._shutdown_fn() + + # ------------------------------------------------------------------ + # Protocol route registration + # ------------------------------------------------------------------ + + def register_routes(self, routes: list[Route]) -> None: + """Register additional routes from a protocol package. + + Invalidates the cached Starlette app so it will be rebuilt with the + new routes on next access. Called by protocol packages (e.g. + ``InvocationHandler``) during setup. + + :param routes: List of Starlette Route objects to add. + :type routes: list[Route] + """ + if not routes: + return + if self._app is not None: + logger.warning( + "register_routes() called after the ASGI app was already built. " + "The new routes will be included on the next app rebuild, but " + "will NOT affect an already-running server." + ) + self._protocol_routes.extend(routes) + self._app = None # invalidate — rebuilt lazily via .app property + + # ------------------------------------------------------------------ + # Run helpers + # ------------------------------------------------------------------ + + def _build_hypercorn_config(self, host: str, port: int) -> object: + """Create a Hypercorn config with resolved host, port and timeouts. + + :param host: Network interface to bind. + :type host: str + :param port: Port to bind. + :type port: int + :return: Configured Hypercorn config. + :rtype: hypercorn.config.Config + """ + from hypercorn.config import Config as HypercornConfig + + config = HypercornConfig() + config.bind = [f"{host}:{port}"] + config.graceful_timeout = float(self._graceful_shutdown_timeout) + return config + + def run(self, host: str = "0.0.0.0", port: Optional[int] = None) -> None: + """Start the server synchronously. + + Uses Hypercorn as the ASGI server, which supports HTTP/1.1 and HTTP/2. + + :param host: Network interface to bind. Defaults to ``"0.0.0.0"`` + (all interfaces). + :type host: str + :param port: Port to bind. Defaults to ``PORT`` env var or 8088. + :type port: Optional[int] + """ + from hypercorn.asyncio import serve as _hypercorn_serve + + resolved_port = _config.resolve_port(port) + logger.info("AgentHost starting on %s:%s", host, resolved_port) + config = self._build_hypercorn_config(host, resolved_port) + asyncio.run(_hypercorn_serve(self.app, config)) # type: ignore[arg-type] # Starlette is ASGI-compatible + + async def run_async(self, host: str = "0.0.0.0", port: Optional[int] = None) -> None: + """Start the server asynchronously (awaitable). + + Uses Hypercorn as the ASGI server, which supports HTTP/1.1 and HTTP/2. + + :param host: Network interface to bind. Defaults to ``"0.0.0.0"`` + (all interfaces). + :type host: str + :param port: Port to bind. Defaults to ``PORT`` env var or 8088. + :type port: Optional[int] + """ + from hypercorn.asyncio import serve as _hypercorn_serve + + resolved_port = _config.resolve_port(port) + logger.info("AgentHost starting on %s:%s (async)", host, resolved_port) + config = self._build_hypercorn_config(host, resolved_port) + await _hypercorn_serve(self.app, config) # type: ignore[arg-type] # Starlette is ASGI-compatible + + # ------------------------------------------------------------------ + # Private: app construction + # ------------------------------------------------------------------ + + def _build_app(self) -> None: + """Construct the Starlette ASGI application with all routes.""" + + @contextlib.asynccontextmanager + async def _lifespan(_app: Starlette) -> AsyncGenerator[None, None]: # noqa: RUF029 + logger.info("AgentHost started") + yield + + # --- SHUTDOWN: runs once when the server is stopping --- + logger.info( + "AgentHost shutting down (graceful timeout=%ss)", + self._graceful_shutdown_timeout, + ) + if self._graceful_shutdown_timeout == 0: + logger.info("Graceful shutdown drain period disabled (timeout=0)") + else: + try: + await asyncio.wait_for( + self._dispatch_shutdown(), + timeout=self._graceful_shutdown_timeout, + ) + except asyncio.TimeoutError: + logger.warning( + "on_shutdown did not complete within %ss timeout", + self._graceful_shutdown_timeout, + ) + except Exception: # pylint: disable=broad-exception-caught + logger.exception("Error in on_shutdown") + + # All routes: protocol routes + health + routes: list[Any] = list(self._protocol_routes) + routes.append( + Route("/readiness", self._readiness_endpoint, methods=["GET"], name="readiness"), + ) + + self._app = Starlette( + routes=routes, + lifespan=_lifespan, + middleware=[Middleware(_PlatformHeaderMiddleware)], + ) + + # ------------------------------------------------------------------ + # Health endpoint + # ------------------------------------------------------------------ + + async def _readiness_endpoint(self, request: Request) -> Response: # pylint: disable=unused-argument + """GET /readiness — readiness check endpoint. + + Return ``200 OK`` when the process is alive and ready to serve traffic. + The hosting platform maps this to its readiness probe. + + :param request: The incoming Starlette request. + :type request: Request + :return: 200 OK response. + :rtype: Response + """ + return Response(_HEALTHY_BODY, media_type="application/json") diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/_config.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/_config.py new file mode 100644 index 000000000000..a57ddf68b262 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/_config.py @@ -0,0 +1,198 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Configuration resolution helpers for AgentHost hosting. + +Each ``resolve_*`` function follows the same hierarchy: +1. Explicit argument (if not *None*) +2. Environment variable +3. Built-in default + +A value of ``0`` conventionally disables the corresponding feature. + +Invalid environment variable values raise ``ValueError`` immediately so +misconfiguration is surfaced at startup rather than silently masked. +""" +import os +from typing import Optional + +from ._constants import Constants + + +def _parse_int_env(var_name: str) -> Optional[int]: + """Parse an integer environment variable, raising on invalid values. + + :param var_name: Name of the environment variable. + :type var_name: str + :return: The parsed integer or None if the variable is not set. + :rtype: Optional[int] + :raises ValueError: If the variable is set but cannot be parsed as an integer. + """ + raw = os.environ.get(var_name) + if raw is None: + return None + try: + return int(raw) + except ValueError as exc: + raise ValueError( + f"Invalid value for {var_name}: {raw!r} (expected an integer)" + ) from exc + + +def _require_int(name: str, value: object) -> int: + """Validate that *value* is an integer. + + :param name: Human-readable parameter/env-var name for the error message. + :type name: str + :param value: The value to validate. + :type value: object + :return: The value cast to int. + :rtype: int + :raises ValueError: If *value* is not an integer. + """ + if isinstance(value, bool) or not isinstance(value, int): + raise ValueError( + f"Invalid value for {name}: {value!r} (expected an integer)" + ) + return value + + +def _validate_port(value: int, source: str) -> int: + """Validate that a port number is within the valid range. + + :param value: The port number to validate. + :type value: int + :param source: Human-readable source name for the error message. + :type source: str + :return: The validated port number. + :rtype: int + :raises ValueError: If the port is outside 1-65535. + """ + if not 1 <= value <= 65535: + raise ValueError( + f"Invalid value for {source}: {value} (expected 1-65535)" + ) + return value + + +def resolve_port(port: Optional[int]) -> int: + """Resolve the server port from argument, env var, or default. + + Resolution order: explicit *port* → ``PORT`` env var → ``8088``. + + :param port: Explicitly requested port or None. + :type port: Optional[int] + :return: The resolved port number. + :rtype: int + :raises ValueError: If the port value is not a valid integer or is outside 1-65535. + """ + if port is not None: + return _validate_port(_require_int("port", port), "port") + env_port = _parse_int_env(Constants.PORT) + if env_port is not None: + return _validate_port(env_port, Constants.PORT) + return Constants.DEFAULT_PORT + + +_DEFAULT_GRACEFUL_SHUTDOWN_TIMEOUT = 30 + + +def resolve_graceful_shutdown_timeout(timeout: Optional[int]) -> int: + """Resolve the graceful shutdown timeout from argument or default. + + :param timeout: Explicitly requested timeout or None. + :type timeout: Optional[int] + :return: The resolved timeout in seconds (default 30). + :rtype: int + """ + if timeout is not None: + return max(0, _require_int("graceful_shutdown_timeout", timeout)) + return _DEFAULT_GRACEFUL_SHUTDOWN_TIMEOUT + + +_VALID_LOG_LEVELS = ("DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL") + + +def resolve_appinsights_connection_string( + connection_string: Optional[str], +) -> Optional[str]: + """Resolve the Application Insights connection string. + + Resolution order: + + 1. Explicit *connection_string* argument (if not *None*). + 2. ``APPLICATIONINSIGHTS_CONNECTION_STRING`` env var (standard Azure + Monitor convention). + 3. *None* — no connection string available. + + :param connection_string: Explicitly provided connection string or None. + :type connection_string: Optional[str] + :return: The resolved connection string, or None. + :rtype: Optional[str] + """ + if connection_string is not None: + return connection_string + return os.environ.get( + Constants.APPLICATIONINSIGHTS_CONNECTION_STRING + ) + + +def resolve_log_level(level: Optional[str]) -> str: + """Resolve the library log level from argument or default (``INFO``). + + :param level: Explicitly requested level (e.g. ``"DEBUG"``) or None. + :type level: Optional[str] + :return: Validated, upper-cased log level string. + :rtype: str + :raises ValueError: If the value is not one of DEBUG/INFO/WARNING/ERROR/CRITICAL. + """ + if level is not None: + normalized = level.upper() + else: + normalized = "INFO" + if normalized not in _VALID_LOG_LEVELS: + raise ValueError( + f"Invalid log level: {normalized!r} " + f"(expected one of {', '.join(_VALID_LOG_LEVELS)})" + ) + return normalized + + +def resolve_agent_name() -> str: + """Resolve the agent name from the ``FOUNDRY_AGENT_NAME`` environment variable. + + :return: The agent name, or an empty string if not set. + :rtype: str + """ + return os.environ.get(Constants.FOUNDRY_AGENT_NAME, "") + + +def resolve_agent_version() -> str: + """Resolve the agent version from the ``FOUNDRY_AGENT_VERSION`` environment variable. + + :return: The agent version, or an empty string if not set. + :rtype: str + """ + return os.environ.get(Constants.FOUNDRY_AGENT_VERSION, "") + + +def resolve_project_id() -> str: + """Resolve the Foundry project ARM resource ID from the ``FOUNDRY_PROJECT_ARM_ID`` environment variable. + + The UX queries spans using this ID, so it must be present in trace + attributes for portal integration. + + :return: The project ARM resource ID, or an empty string if not set. + :rtype: str + """ + return os.environ.get(Constants.FOUNDRY_PROJECT_ARM_ID, "") + + +def resolve_otlp_endpoint() -> Optional[str]: + """Resolve the OTLP exporter endpoint from the ``OTEL_EXPORTER_OTLP_ENDPOINT`` environment variable. + + :return: The OTLP endpoint URL, or None if not set or empty. + :rtype: Optional[str] + """ + value = os.environ.get(Constants.OTEL_EXPORTER_OTLP_ENDPOINT, "") + return value if value else None diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/_constants.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/_constants.py new file mode 100644 index 000000000000..8d0f1aeb16e4 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/_constants.py @@ -0,0 +1,24 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + + +class Constants: + """Well-known environment variables and defaults for AgentHost hosting.""" + + # Foundry identity + FOUNDRY_AGENT_NAME = "FOUNDRY_AGENT_NAME" + FOUNDRY_AGENT_VERSION = "FOUNDRY_AGENT_VERSION" + FOUNDRY_PROJECT_ENDPOINT = "FOUNDRY_PROJECT_ENDPOINT" + FOUNDRY_PROJECT_ARM_ID = "FOUNDRY_PROJECT_ARM_ID" + + # Network + PORT = "PORT" + DEFAULT_PORT = 8088 + + # Tracing + APPLICATIONINSIGHTS_CONNECTION_STRING = "APPLICATIONINSIGHTS_CONNECTION_STRING" + OTEL_EXPORTER_OTLP_ENDPOINT = "OTEL_EXPORTER_OTLP_ENDPOINT" + + # Session identity + FOUNDRY_AGENT_SESSION_ID = "FOUNDRY_AGENT_SESSION_ID" diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/_errors.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/_errors.py new file mode 100644 index 000000000000..c5b1c9e01efe --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/_errors.py @@ -0,0 +1,63 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Standardized error response builder for AgentHost. + +Every error returned by the framework uses the shape:: + + { + "error": { + "code": "...", // required – machine-readable error code + "message": "...", // required – human-readable description + "type": "...", // optional – error type classification + "details": [ ... ] // optional – child errors + } + } + +Usage:: + + from azure.ai.agentserver.core import create_error_response + + return create_error_response("not_found", "Resource missing", status_code=404) +""" +from typing import Any, Optional + +from starlette.responses import JSONResponse + + +def create_error_response( + code: str, + message: str, + *, + status_code: int, + error_type: Optional[str] = None, + details: Optional[list[dict[str, Any]]] = None, + headers: Optional[dict[str, str]] = None, +) -> JSONResponse: + """Build a ``JSONResponse`` with the standard error envelope. + + :param code: Machine-readable error code (e.g. ``"internal_error"``). + :type code: str + :param message: Human-readable error message. + :type message: str + :keyword status_code: HTTP status code for the response. + :paramtype status_code: int + :keyword error_type: Optional error type classification string. When + provided, included as ``"type"`` in the error body. + :paramtype error_type: Optional[str] + :keyword details: Child error objects, each with at least ``code`` and + ``message`` keys. + :paramtype details: Optional[list[dict[str, Any]]] + :keyword headers: Extra HTTP headers to include on the response. + :paramtype headers: Optional[dict[str, str]] + :return: A ready-to-send JSON error response. + :rtype: JSONResponse + """ + body: dict[str, Any] = {"code": code, "message": message} + if error_type is not None: + body["type"] = error_type + if details is not None: + body["details"] = details + return JSONResponse( + {"error": body}, status_code=status_code, headers=headers + ) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/_logger.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/_logger.py new file mode 100644 index 000000000000..c1612b9b3ae6 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/_logger.py @@ -0,0 +1,22 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Logging facade for AgentHost. + +Usage:: + + from azure.ai.agentserver.core import get_logger + + logger = get_logger() + logger.info("Processing request") +""" +import logging + + +def get_logger() -> logging.Logger: + """Return the library-scoped logger. + + :return: Logger instance for ``azure.ai.agentserver``. + :rtype: logging.Logger + """ + return logging.getLogger("azure.ai.agentserver") diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/_tracing.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/_tracing.py new file mode 100644 index 000000000000..c182e2bc352f --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/_tracing.py @@ -0,0 +1,883 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""OpenTelemetry tracing for AgentHost. + +Tracing is automatically enabled when an Application Insights connection +string (``APPLICATIONINSIGHTS_CONNECTION_STRING``) or an OTLP exporter +endpoint (``OTEL_EXPORTER_OTLP_ENDPOINT``) is available. + +Requires ``opentelemetry-api`` to be installed:: + + pip install azure-ai-agentserver-core[tracing] + +If the package is not installed, tracing silently becomes a no-op. + +When an Application Insights connection string is available (via constructor +or ``APPLICATIONINSIGHTS_CONNECTION_STRING`` env var), traces **and** logs are +automatically exported to Azure Monitor. This requires the additional +``opentelemetry-sdk`` and ``azure-monitor-opentelemetry-exporter`` packages +(both included in the ``[tracing]`` extras group). + +When the platform sets ``OTEL_EXPORTER_OTLP_ENDPOINT``, an OTLP exporter is +also registered for traces and logs. +""" +import logging +from collections.abc import AsyncIterable, AsyncIterator, Mapping # pylint: disable=import-error +from contextlib import contextmanager +from typing import TYPE_CHECKING, Any, Iterator, Optional, Union + +from . import _config +from ._logger import get_logger + +#: Starlette's ``Content`` type — the element type for streaming bodies. +_Content = Union[str, bytes, memoryview] + +#: W3C Trace Context header names used for distributed trace propagation. +_W3C_HEADERS = ("traceparent", "tracestate") + + +# ------------------------------------------------------------------ +# GenAI semantic convention attribute keys +# ------------------------------------------------------------------ +_ATTR_SERVICE_NAME = "service.name" +_ATTR_GEN_AI_SYSTEM = "gen_ai.system" +_ATTR_GEN_AI_PROVIDER_NAME = "gen_ai.provider.name" +_ATTR_GEN_AI_AGENT_ID = "gen_ai.agent.id" +_ATTR_GEN_AI_AGENT_NAME = "gen_ai.agent.name" +_ATTR_GEN_AI_AGENT_VERSION = "gen_ai.agent.version" +_ATTR_GEN_AI_RESPONSE_ID = "gen_ai.response.id" +_ATTR_GEN_AI_OPERATION_NAME = "gen_ai.operation.name" +_ATTR_GEN_AI_CONVERSATION_ID = "gen_ai.conversation.id" + +# Foundry project identity +_ATTR_FOUNDRY_PROJECT_ID = "microsoft.foundry.project.id" + +# Constant values +_SERVICE_NAME_VALUE = "azure.ai.agentserver" +_GEN_AI_SYSTEM_VALUE = "azure.ai.agentserver" +_GEN_AI_PROVIDER_NAME_VALUE = "AzureAI Hosted Agents" + +logger = get_logger() + +_HAS_OTEL = False +_HAS_BAGGAGE = False +try: + from opentelemetry import context as _otel_context, trace + from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator + + _HAS_OTEL = True + try: + from opentelemetry import baggage as _otel_baggage + + _HAS_BAGGAGE = True + except ImportError: + pass +except ImportError: + if TYPE_CHECKING: + from opentelemetry import trace + from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator + + +class TracingHelper: + """Lightweight wrapper around OpenTelemetry. + + Only instantiate when tracing is enabled. If ``opentelemetry-api`` is + not installed, a warning is logged and all methods become no-ops. + + When *connection_string* is provided, a :class:`TracerProvider` with an + Azure Monitor exporter is configured globally and log records from the + ``azure.ai.agentserver`` logger are forwarded to Application Insights. + This requires ``opentelemetry-sdk`` and + ``azure-monitor-opentelemetry-exporter``. + """ + + def __init__( + self, + connection_string: Optional[str] = None, + ) -> None: + self._enabled = _HAS_OTEL + self._tracer: Any = None + self._propagator: Any = None + + # Resolve agent identity from environment variables. + self._agent_name = _config.resolve_agent_name() + self._agent_version = _config.resolve_agent_version() + self._project_id = _config.resolve_project_id() + + # gen_ai.agent.id format: + # "{name}:{version}" when both present, "{name}" when only name, "" otherwise + if self._agent_name and self._agent_version: + self._agent_id = f"{self._agent_name}:{self._agent_version}" + elif self._agent_name: + self._agent_id = self._agent_name + else: + self._agent_id = "" + + if not self._enabled: + logger.warning( + "Tracing was enabled but opentelemetry-api is not installed. " + "Install it with: pip install azure-ai-agentserver-core[tracing]" + ) + return + + # Create OTel resource once for all exporters + resource = _create_resource() + + # Ensure a single TracerProvider exists for all exporters. + # Create it once up front so that both Azure Monitor and OTLP + # exporters add processors to the same provider, regardless of + # which combination is configured or the order they are set up. + trace_provider = _ensure_trace_provider(resource) + + # Register enrichment processor so Foundry identity attributes + # appear on ALL spans (including those from underlying frameworks). + if trace_provider is not None: + enrichment = _FoundryEnrichmentSpanProcessor( + agent_name=self._agent_name or None, + agent_version=self._agent_version or None, + agent_id=self._agent_id or None, + project_id=self._project_id or None, + ) + trace_provider.add_span_processor(enrichment) + + if connection_string: + self._setup_azure_monitor( + connection_string, resource, trace_provider) + + # OTLP exporter + otlp_endpoint = _config.resolve_otlp_endpoint() + if otlp_endpoint: + self._setup_otlp_export(otlp_endpoint, resource, trace_provider) + + self._tracer = trace.get_tracer("azure.ai.agentserver") + self._propagator = TraceContextTextMapPropagator() + + # ------------------------------------------------------------------ + # Azure Monitor auto-configuration + # ------------------------------------------------------------------ + + def _extract_context( + self, + carrier: Optional[dict[str, str]], + ) -> Any: + """Extract parent trace context from a W3C carrier dict. + + Uses the standard ``traceparent`` / ``tracestate`` headers via + the OpenTelemetry :class:`TraceContextTextMapPropagator`. + + :param carrier: W3C trace-context headers or None. + :type carrier: Optional[dict[str, str]] + :return: The extracted OTel context, or None. + :rtype: Any + """ + if not carrier or self._propagator is None: + return None + + return self._propagator.extract(carrier=carrier) + + @staticmethod + def _setup_azure_monitor(connection_string: str, resource: Any, trace_provider: Any) -> None: + """Configure global TracerProvider and LoggerProvider for App Insights. + + Sets up ``AzureMonitorTraceExporter`` so spans are exported to + Application Insights, and ``AzureMonitorLogExporter`` so log records + from the ``azure.ai.agentserver`` namespace are forwarded. + + If the required packages are not installed, a warning is logged and + export is silently skipped — span creation still works via the + default no-op or user-configured provider. + + :param connection_string: Application Insights connection string. + :type connection_string: str + :param resource: Pre-created OTel resource, or None. + :type resource: Any + :param trace_provider: The shared TracerProvider, or None. + :type trace_provider: Any + """ + if resource is None: + return + _setup_trace_export(trace_provider, connection_string) + _setup_log_export(resource, connection_string) + + @staticmethod + def _setup_otlp_export(endpoint: str, resource: Any, trace_provider: Any) -> None: + """Configure OTLP exporter for traces and logs. + + Per container-image-spec, when ``OTEL_EXPORTER_OTLP_ENDPOINT`` + is set, the container must register an OTLP exporter. + + :param endpoint: The OTLP collector endpoint URL. + :type endpoint: str + :param resource: Pre-created OTel resource, or None. + :type resource: Any + :param trace_provider: The shared TracerProvider, or None. + :type trace_provider: Any + """ + if resource is None: + return + _setup_otlp_trace_export(trace_provider, endpoint) + _setup_otlp_log_export(resource, endpoint) + + # ------------------------------------------------------------------ + # Span naming and attribute helpers (shared by all protocols) + # ------------------------------------------------------------------ + + def span_name(self, span_operation: str) -> str: + """Build a span name using the operation and agent label. + + Per invocation-protocol-spec: + ``"invoke_agent {Name}:{Version}"`` or ``"invoke_agent {Name}"`` + or ``"invoke_agent"``. + + :param span_operation: The span operation (e.g. ``"invoke_agent"``). + This becomes the first token of the OTel span name. + :type span_operation: str + :return: ``" :"`` or just + ``""``. + :rtype: str + """ + if self._agent_id: + return f"{span_operation} {self._agent_id}" + return span_operation + + def build_span_attrs( + self, + invocation_id: str, + session_id: str, + operation_name: Optional[str] = None, + ) -> dict[str, str]: + """Build GenAI semantic convention span attributes. + + These attributes are common across all protocol heads. + Per invocation-protocol-spec. + + :param invocation_id: The invocation/request ID for this request. + :type invocation_id: str + :param session_id: The session ID (empty string if absent). + :type session_id: str + :param operation_name: Optional ``gen_ai.operation.name`` value + (e.g. ``"invoke_agent"``). Omitted from the dict when *None*. + :type operation_name: Optional[str] + :return: Span attribute dict. + :rtype: dict[str, str] + """ + attrs: dict[str, str] = { + # Identity & GenAI convention tags + _ATTR_SERVICE_NAME: _SERVICE_NAME_VALUE, + _ATTR_GEN_AI_SYSTEM: _GEN_AI_SYSTEM_VALUE, + _ATTR_GEN_AI_PROVIDER_NAME: _GEN_AI_PROVIDER_NAME_VALUE, + _ATTR_GEN_AI_RESPONSE_ID: invocation_id, + _ATTR_GEN_AI_AGENT_ID: self._agent_id, + } + if self._agent_name: + attrs[_ATTR_GEN_AI_AGENT_NAME] = self._agent_name + if self._agent_version: + attrs[_ATTR_GEN_AI_AGENT_VERSION] = self._agent_version + if operation_name: + attrs[_ATTR_GEN_AI_OPERATION_NAME] = operation_name + if session_id: + attrs[_ATTR_GEN_AI_CONVERSATION_ID] = session_id + if self._project_id: + attrs[_ATTR_FOUNDRY_PROJECT_ID] = self._project_id + return attrs + + @contextmanager + def span( + self, + name: str, + attributes: Optional[dict[str, str]] = None, + carrier: Optional[dict[str, str]] = None, + ) -> Iterator[Any]: + """Create a traced span if tracing is enabled, otherwise no-op. + + Yields the OpenTelemetry span object when tracing is active, or + ``None`` when tracing is disabled. Callers may use the yielded span + together with :meth:`record_error` to attach error information. + + :param name: Span name, e.g. ``"invoke_agent my_agent:1.0"``. + :type name: str + :param attributes: Key-value span attributes. + :type attributes: Optional[dict[str, str]] + :param carrier: Incoming HTTP headers for W3C trace-context propagation. + :type carrier: Optional[dict[str, str]] + :return: Context manager that yields the OTel span or *None*. + :rtype: Iterator[Any] + """ + if not self._enabled or self._tracer is None: + yield None + return + + ctx = self._extract_context(carrier) + + with self._tracer.start_as_current_span( + name=name, + attributes=attributes or {}, + kind=trace.SpanKind.SERVER, + context=ctx, + ) as otel_span: + yield otel_span + + def start_span( + self, + name: str, + attributes: Optional[dict[str, str]] = None, + carrier: Optional[dict[str, str]] = None, + ) -> Any: + """Start a span without a context manager. + + Use this for streaming responses where the span must outlive the + initial ``invoke()`` call. The caller **must** call :meth:`end_span` + when the work is finished. + + :param name: Span name, e.g. ``"invoke_agent my_agent:1.0"``. + :type name: str + :param attributes: Key-value span attributes. + :type attributes: Optional[dict[str, str]] + :param carrier: Incoming HTTP headers for W3C trace-context propagation. + :type carrier: Optional[dict[str, str]] + :return: The OTel span, or *None* when tracing is disabled. + :rtype: Any + """ + if not self._enabled or self._tracer is None: + return None + + ctx = self._extract_context(carrier) + + return self._tracer.start_span( + name=name, + attributes=attributes or {}, + kind=trace.SpanKind.SERVER, + context=ctx, + ) + + # ------------------------------------------------------------------ + # Request-level convenience wrappers + # ------------------------------------------------------------------ + + def _prepare_request_span_args( + self, + headers: Mapping[str, str], + invocation_id: str, + span_operation: str, + operation_name: Optional[str] = None, + session_id: str = "", + ) -> tuple[str, dict[str, str], dict[str, str]]: + """Extract headers and build span arguments for a request. + + Shared pipeline used by :meth:`start_request_span` and + :meth:`request_span` to avoid duplicating header extraction, + attribute building, and span naming. + + :param headers: HTTP request headers (any ``Mapping[str, str]``). + :type headers: Mapping[str, str] + :param invocation_id: The invocation/request ID. + :type invocation_id: str + :param span_operation: Span operation (e.g. ``"invoke_agent"``). + :type span_operation: str + :param operation_name: Optional ``gen_ai.operation.name`` value. + :type operation_name: Optional[str] + :param session_id: Session ID from the ``agent_session_id`` query + parameter. Defaults to ``""`` (no session). + :type session_id: str + :return: ``(name, attributes, carrier)`` ready for + :meth:`span` or :meth:`start_span`. + :rtype: tuple[str, dict[str, str], dict[str, str]] + """ + carrier = _extract_w3c_carrier(headers) + span_attrs = self.build_span_attrs( + invocation_id, session_id, operation_name=operation_name + ) + return self.span_name(span_operation), span_attrs, carrier + + def start_request_span( + self, + headers: Mapping[str, str], + invocation_id: str, + span_operation: str, + operation_name: Optional[str] = None, + session_id: str = "", + ) -> Any: + """Start a request-scoped span, extracting context from HTTP headers. + + Convenience method that combines header extraction, attribute + building, span naming, and span creation into a single call. + Use for streaming responses where the span must outlive the + initial handler call. The caller **must** call :meth:`end_span` + when work is finished. + + :param headers: HTTP request headers (any ``Mapping[str, str]``). + :type headers: Mapping[str, str] + :param invocation_id: The invocation/request ID. + :type invocation_id: str + :param span_operation: Span operation (e.g. ``"invoke_agent"``). + Becomes the first token of the OTel span name via + :meth:`span_name`. + :type span_operation: str + :param operation_name: Optional ``gen_ai.operation.name`` attribute + value (e.g. ``"invoke_agent"``). Omitted when *None*. + :type operation_name: Optional[str] + :param session_id: Session ID from the ``agent_session_id`` query + parameter. Defaults to ``""`` (no session). + :type session_id: str + :return: The OTel span, or *None* when tracing is disabled. + :rtype: Any + """ + name, attrs, carrier = self._prepare_request_span_args( + headers, invocation_id, span_operation, operation_name, + session_id=session_id, + ) + return self.start_span(name, attributes=attrs, carrier=carrier) + + @contextmanager + def request_span( + self, + headers: Mapping[str, str], + invocation_id: str, + span_operation: str, + operation_name: Optional[str] = None, + session_id: str = "", + ) -> Iterator[Any]: + """Create a request-scoped span as a context manager. + + Convenience method that combines header extraction, attribute + building, span naming, and span creation into a single call. + Use for non-streaming request handlers where the span should + cover the entire handler execution. + + :param headers: HTTP request headers (any ``Mapping[str, str]``). + :type headers: Mapping[str, str] + :param invocation_id: The invocation/request ID. + :type invocation_id: str + :param span_operation: Span operation (e.g. ``"get_invocation"``). + Becomes the first token of the OTel span name via + :meth:`span_name`. + :type span_operation: str + :param operation_name: Optional ``gen_ai.operation.name`` attribute + value. Omitted when *None*. + :type operation_name: Optional[str] + :param session_id: Session ID from the ``agent_session_id`` query + parameter. Defaults to ``""`` (no session). + :type session_id: str + :return: Context manager that yields the OTel span or *None*. + :rtype: Iterator[Any] + """ + name, attrs, carrier = self._prepare_request_span_args( + headers, invocation_id, span_operation, operation_name, + session_id=session_id, + ) + with self.span(name, attributes=attrs, carrier=carrier) as otel_span: + yield otel_span + + # ------------------------------------------------------------------ + # Span lifecycle helpers + # ------------------------------------------------------------------ + + def end_span(self, span: Any, exc: Optional[BaseException] = None) -> None: + """End a span started with :meth:`start_span`. + + Optionally records an error before ending. No-op when *span* is + ``None`` (tracing disabled). + + :param span: The OTel span, or *None*. + :type span: Any + :param exc: Optional exception to record before ending. + :type exc: Optional[BaseException] + """ + if span is None: + return + if exc is not None: + self.record_error(span, exc) + span.end() + + @staticmethod + def record_error(span: Any, exc: BaseException) -> None: + """Record an exception and ERROR status on a span. + + No-op when *span* is ``None`` (tracing disabled) or when + ``opentelemetry-api`` is not installed. + + :param span: The OTel span returned by :meth:`span`, or *None*. + :type span: Any + :param exc: The exception to record. + :type exc: BaseException + """ + if span is not None and _HAS_OTEL: + span.set_status(trace.StatusCode.ERROR, str(exc)) + span.record_exception(exc) + + @staticmethod + def set_baggage(keys: dict[str, str]) -> Any: + """Set W3C Baggage entries on the current context. + + Baggage keys propagate to downstream services via + the ``baggage`` header. No-op when the OTel baggage API is not + available. + + :param keys: Mapping of baggage key → value to set. + :type keys: dict[str, str] + :return: A context token that must be passed to :meth:`detach_baggage` + when the scope ends, or *None* when baggage is unavailable. + :rtype: Any + """ + if not _HAS_BAGGAGE: + return None + ctx = _otel_context.get_current() + for key, value in keys.items(): + ctx = _otel_baggage.set_baggage(key, value, context=ctx) + return _otel_context.attach(ctx) + + @staticmethod + def detach_baggage(token: Any) -> None: + """Detach a baggage context previously attached by :meth:`set_baggage`. + + :param token: The token returned by :meth:`set_baggage`. + :type token: Any + """ + if token is not None and _HAS_BAGGAGE: + _otel_context.detach(token) + + @staticmethod + def set_current_span(span: Any) -> Any: + """Set a span as the current span in the OTel context. + + This makes *span* the active parent for any child spans created + by downstream code (e.g. framework handlers). Without this, + spans created inside the handler would become siblings rather + than children of *span*. + + Returns a context token that **must** be passed to + :meth:`detach_context` when the scope ends. No-op when *span* + is ``None`` or tracing is not available. + + :param span: The OTel span to make current, or *None*. + :type span: Any + :return: A context token, or *None*. + :rtype: Any + """ + if span is None or not _HAS_OTEL: + return None + ctx = trace.set_span_in_context(span) + return _otel_context.attach(ctx) + + @staticmethod + def detach_context(token: Any) -> None: + """Detach a context previously attached by :meth:`set_current_span`. + + :param token: The token returned by :meth:`set_current_span`. + :type token: Any + """ + if token is not None and _HAS_OTEL: + _otel_context.detach(token) + + async def trace_stream( + self, iterator: AsyncIterable[_Content], span: Any + ) -> AsyncIterator[_Content]: + """Wrap a streaming body iterator so the tracing span covers the full + duration of data transmission. + + Yields chunks from *iterator* unchanged. When the iterator is + exhausted or raises an exception the span is ended (with error status + if applicable). Safe to call when tracing is disabled (*span* is + ``None``). + + :param iterator: The original async body iterator from + :class:`~starlette.responses.StreamingResponse`. + :type iterator: AsyncIterable[Union[str, bytes, memoryview]] + :param span: The OTel span (or *None* when tracing is disabled). + :type span: Any + :return: An async iterator that yields chunks unchanged. + :rtype: AsyncIterator[Union[str, bytes, memoryview]] + """ + error: Optional[BaseException] = None + try: + async for chunk in iterator: + yield chunk + except BaseException as exc: + error = exc + raise + finally: + self.end_span(span, exc=error) + + +class _FoundryEnrichmentSpanProcessor: + """Span processor that adds Foundry identity attributes to all spans. + + Per the container image spec, ``gen_ai.agent.name``, + ``gen_ai.agent.version``, ``gen_ai.agent.id``, and + ``microsoft.foundry.project.id`` must be present on **every** span + generated by the server — including spans created by underlying + frameworks (e.g. HTTP client libraries, LLM SDKs). + + Registering this processor on the global :class:`TracerProvider` + ensures enrichment happens automatically regardless of which + library creates the span. + + :param agent_name: The Foundry agent name, or *None*. + :type agent_name: Optional[str] + :param agent_version: The Foundry agent version, or *None*. + :type agent_version: Optional[str] + :param agent_id: The combined agent identifier (``name:version``), or *None*. + :type agent_id: Optional[str] + :param project_id: The Foundry project ARM resource ID, or *None*. + :type project_id: Optional[str] + """ + + def __init__( + self, + agent_name: Optional[str] = None, + agent_version: Optional[str] = None, + agent_id: Optional[str] = None, + project_id: Optional[str] = None, + ) -> None: + self.agent_name = agent_name + self.agent_version = agent_version + self.agent_id = agent_id + self.project_id = project_id + + def on_start(self, span: Any, parent_context: Any = None) -> None: # pylint: disable=unused-argument + """Add Foundry identity attributes when a span starts.""" + if self.agent_name: + span.set_attribute(_ATTR_GEN_AI_AGENT_NAME, self.agent_name) + if self.agent_version: + span.set_attribute(_ATTR_GEN_AI_AGENT_VERSION, self.agent_version) + if self.agent_id: + span.set_attribute(_ATTR_GEN_AI_AGENT_ID, self.agent_id) + if self.project_id: + span.set_attribute(_ATTR_FOUNDRY_PROJECT_ID, self.project_id) + + def _on_ending(self, span: Any) -> None: # pylint: disable=unused-argument + """No-op on span ending (called before on_end with mutable span).""" + + def on_end(self, span: Any) -> None: # pylint: disable=unused-argument + """No-op on span end.""" + + def shutdown(self) -> None: + """No-op shutdown.""" + + def force_flush(self, timeout_millis: int = 30000) -> bool: # pylint: disable=unused-argument + """No-op flush — always returns True.""" + return True + + +def _create_resource() -> Any: + """Create the OTel resource for exporters. + + :return: A :class:`~opentelemetry.sdk.resources.Resource`, or *None* + if the required packages are not installed. + :rtype: Any + """ + try: + from opentelemetry.sdk.resources import Resource + except ImportError: + logger.warning( + "Required OTel SDK packages are not installed. Install them with: " + "pip install azure-ai-agentserver-core[tracing]" + ) + return None + return Resource.create({_ATTR_SERVICE_NAME: _SERVICE_NAME_VALUE}) + + +def _ensure_trace_provider(resource: Any) -> Any: + """Return or create the global :class:`TracerProvider`. + + If a user-configured ``TracerProvider`` already exists (one that + supports ``add_span_processor``), it is reused. Otherwise a new + ``SdkTracerProvider`` is created with the given *resource* and set + as the global provider. + + Creating the provider once and passing it to both + :func:`_setup_trace_export` and :func:`_setup_otlp_trace_export` + removes the order-dependent initialization that existed previously. + + :param resource: The OTel resource describing this service, or *None*. + :type resource: Any + :return: A ``TracerProvider``, or *None* if the SDK is not installed. + :rtype: Any + """ + # Called only when _HAS_OTEL is True, so the module-level ``trace`` + # import is guaranteed to be bound. + if resource is None: + return None + try: + from opentelemetry.sdk.trace import TracerProvider as SdkTracerProvider + except ImportError: + return None + + current_provider = trace.get_tracer_provider() + if hasattr(current_provider, "add_span_processor"): + return current_provider + + provider = SdkTracerProvider(resource=resource) + trace.set_tracer_provider(provider) + return provider + + +# Sentinel flags to prevent adding duplicate exporters across multiple +# TracingHelper instantiations within the same process. +_az_trace_export_configured = False +_az_log_export_configured = False +_otlp_trace_export_configured = False +_otlp_log_export_configured = False + + +def _setup_trace_export(provider: Any, connection_string: str) -> None: + """Add an Azure Monitor span processor to the given *provider*. + + :param provider: The TracerProvider to attach the exporter to, or *None*. + :type provider: Any + :param connection_string: Application Insights connection string. + :type connection_string: str + """ + global _az_trace_export_configured # pylint: disable=global-statement + if _az_trace_export_configured: + return + if provider is None: + return + try: + from opentelemetry.sdk.trace.export import BatchSpanProcessor + + from azure.monitor.opentelemetry.exporter import ( # type: ignore[import-untyped] + AzureMonitorTraceExporter, + ) + except ImportError: + logger.warning( + "Trace export to Application Insights requires " + "opentelemetry-sdk and azure-monitor-opentelemetry-exporter. " + "Traces will not be forwarded." + ) + return + + exporter = AzureMonitorTraceExporter(connection_string=connection_string) + provider.add_span_processor(BatchSpanProcessor(exporter)) + _az_trace_export_configured = True + logger.info("Application Insights trace exporter configured.") + + +def _setup_log_export(resource: Any, connection_string: str) -> None: + """Configure a global :class:`LoggerProvider` that exports to App Insights. + + :param resource: The OTel resource describing this service. + :type resource: Any + :param connection_string: Application Insights connection string. + :type connection_string: str + """ + global _az_log_export_configured # pylint: disable=global-statement + if _az_log_export_configured: + return + try: + from opentelemetry._logs import set_logger_provider + from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler + from opentelemetry.sdk._logs.export import BatchLogRecordProcessor + + from azure.monitor.opentelemetry.exporter import ( # type: ignore[import-untyped] + AzureMonitorLogExporter, + ) + except ImportError: + logger.warning( + "Log export to Application Insights requires " + "opentelemetry-sdk. Logs will not be forwarded." + ) + return + + log_provider = LoggerProvider(resource=resource) + set_logger_provider(log_provider) + log_exporter = AzureMonitorLogExporter(connection_string=connection_string) + log_provider.add_log_record_processor( + BatchLogRecordProcessor(log_exporter)) + handler = LoggingHandler(logger_provider=log_provider) + logging.getLogger().addHandler(handler) + _az_log_export_configured = True + logger.info("Application Insights log exporter configured.") + + +def _setup_otlp_trace_export(provider: Any, endpoint: str) -> None: + """Add an OTLP span processor to the given *provider*. + + :param provider: The TracerProvider to attach the exporter to, or *None*. + :type provider: Any + :param endpoint: The OTLP collector endpoint URL. + :type endpoint: str + """ + global _otlp_trace_export_configured # pylint: disable=global-statement + if _otlp_trace_export_configured: + return + if provider is None: + return + try: + from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter + from opentelemetry.sdk.trace.export import BatchSpanProcessor + except ImportError: + logger.warning( + "OTLP trace export requires opentelemetry-sdk and " + "opentelemetry-exporter-otlp-proto-grpc. " + "Traces will not be forwarded via OTLP." + ) + return + + exporter = OTLPSpanExporter(endpoint=endpoint) + provider.add_span_processor(BatchSpanProcessor(exporter)) + _otlp_trace_export_configured = True + logger.info("OTLP trace exporter configured (endpoint=%s).", endpoint) + + +def _setup_otlp_log_export(resource: Any, endpoint: str) -> None: + """Configure OTLP log exporter. + + :param resource: The OTel resource describing this service. + :type resource: Any + :param endpoint: The OTLP collector endpoint URL. + :type endpoint: str + """ + global _otlp_log_export_configured # pylint: disable=global-statement + if _otlp_log_export_configured: + return + try: + from opentelemetry._logs import get_logger_provider + from opentelemetry.exporter.otlp.proto.grpc._log_exporter import OTLPLogExporter + from opentelemetry.sdk._logs import LoggerProvider + from opentelemetry.sdk._logs.export import BatchLogRecordProcessor + except ImportError: + logger.warning( + "OTLP log export requires opentelemetry-sdk and " + "opentelemetry-exporter-otlp-proto-grpc. " + "Logs will not be forwarded via OTLP." + ) + return + + current_provider = get_logger_provider() + if hasattr(current_provider, "add_log_record_processor"): + log_provider = current_provider + else: + from opentelemetry._logs import set_logger_provider + + log_provider = LoggerProvider(resource=resource) + set_logger_provider(log_provider) + + log_exporter = OTLPLogExporter(endpoint=endpoint) + log_provider.add_log_record_processor( + BatchLogRecordProcessor(log_exporter)) # type: ignore[union-attr] + _otlp_log_export_configured = True + logger.info("OTLP log exporter configured (endpoint=%s).", endpoint) + + +def _extract_w3c_carrier(headers: Mapping[str, str]) -> dict[str, str]: + """Extract W3C trace-context headers from a mapping. + + Filters the input to only ``traceparent`` and ``tracestate`` — the two + headers defined by the `W3C Trace Context`_ standard. This avoids + passing unrelated headers (e.g. ``authorization``, ``cookie``) into the + OpenTelemetry propagator. + + .. _W3C Trace Context: https://www.w3.org/TR/trace-context/ + + :param headers: A mapping of header name to value (e.g. + ``request.headers``). + :type headers: Mapping[str, str] + :return: A dict containing only the W3C propagation headers present + in *headers*. + :rtype: dict[str, str] + """ + result: dict[str, str] = {k: v for k in _W3C_HEADERS if ( + v := headers.get(k)) is not None} + return result diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/_version.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/_version.py index be71c81bd282..71775f48670c 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/_version.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/_version.py @@ -1,9 +1,5 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- +# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) Python Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- +# --------------------------------------------------------- -VERSION = "1.0.0b1" +VERSION = "2.0.0b1" diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/constants.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/constants.py deleted file mode 100644 index a13f23aa261e..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/constants.py +++ /dev/null @@ -1,14 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- -class Constants: - # well-known environment variables - APPLICATION_INSIGHTS_CONNECTION_STRING = "_AGENT_RUNTIME_APP_INSIGHTS_CONNECTION_STRING" - AZURE_AI_PROJECT_ENDPOINT = "AZURE_AI_PROJECT_ENDPOINT" - AGENT_ID = "AGENT_ID" - AGENT_NAME = "AGENT_NAME" - AGENT_PROJECT_RESOURCE_ID = "AGENT_PROJECT_NAME" - OTEL_EXPORTER_ENDPOINT = "OTEL_EXPORTER_ENDPOINT" - AGENT_LOG_LEVEL = "AGENT_LOG_LEVEL" - AGENT_DEBUG_ERRORS = "AGENT_DEBUG_ERRORS" - ENABLE_APPLICATION_INSIGHTS_LOGGER = "ENABLE_APPLICATION_INSIGHTS_LOGGER" diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/logger.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/logger.py deleted file mode 100644 index f062398c0d3b..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/logger.py +++ /dev/null @@ -1,159 +0,0 @@ -# pylint: disable=broad-exception-caught,dangerous-default-value -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- -import contextvars -import logging -import os -from logging import config - -from ._version import VERSION -from .constants import Constants - -default_log_config = { - "version": 1, - "disable_existing_loggers": False, - "loggers": { - "azure.ai.agentserver": { - "handlers": ["console"], - "level": "INFO", - "propagate": False, - }, - }, - "handlers": { - "console": {"formatter": "std_out", "class": "logging.StreamHandler", "level": "INFO"}, - }, - "formatters": {"std_out": {"format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s"}}, -} - -request_context = contextvars.ContextVar("request_context", default=None) - - -def get_dimensions(): - env_values = {name: value for name, value in vars(Constants).items() if not name.startswith("_")} - res = {"azure.ai.agentserver.version": VERSION} - for name, env_name in env_values.items(): - if isinstance(env_name, str) and not env_name.startswith("_"): - runtime_value = os.environ.get(env_name) - if runtime_value: - res[f"azure.ai.agentserver.{name.lower()}"] = runtime_value - return res - - -def get_project_endpoint(): - project_resource_id = os.environ.get(Constants.AGENT_PROJECT_RESOURCE_ID) - if project_resource_id: - last_part = project_resource_id.split("/")[-1] - - parts = last_part.split("@") - if len(parts) < 2: - print(f"invalid project resource id: {project_resource_id}") - return None - account = parts[0] - project = parts[1] - return f"https://{account}.services.ai.azure.com/api/projects/{project}" - print("environment variable AGENT_PROJECT_RESOURCE_ID not set.") - return None - - -def get_application_insights_connstr(): - try: - conn_str = os.environ.get(Constants.APPLICATION_INSIGHTS_CONNECTION_STRING) - if not conn_str: - print("environment variable APPLICATION_INSIGHTS_CONNECTION_STRING not set.") - project_endpoint = get_project_endpoint() - if project_endpoint: - # try to get the project connected application insights - from azure.ai.projects import AIProjectClient - from azure.identity import DefaultAzureCredential - - project_client = AIProjectClient(credential=DefaultAzureCredential(), endpoint=project_endpoint) - conn_str = project_client.telemetry.get_application_insights_connection_string() - if not conn_str: - print(f"no connected application insights found for project:{project_endpoint}") - else: - os.environ[Constants.APPLICATION_INSIGHTS_CONNECTION_STRING] = conn_str - return conn_str - except Exception as e: - print(f"failed to get application insights with error: {e}") - return None - - -class CustomDimensionsFilter(logging.Filter): - def filter(self, record): - # Add custom dimensions to every log record - dimensions = get_dimensions() - for key, value in dimensions.items(): - setattr(record, key, value) - cur_request_context = request_context.get() - if cur_request_context: - for key, value in cur_request_context.items(): - setattr(record, key, value) - return True - - -def configure(log_config: dict = default_log_config): - """ - Configure logging based on the provided configuration dictionary. - The dictionary should contain the logging configuration in a format compatible with `logging.config.dictConfig`. - - :param log_config: A dictionary containing logging configuration. - :type log_config: dict - """ - try: - config.dictConfig(log_config) - - application_insights_connection_string = get_application_insights_connstr() - enable_application_insights_logger = ( - os.environ.get(Constants.ENABLE_APPLICATION_INSIGHTS_LOGGER, "true").lower() == "true" - ) - if application_insights_connection_string and enable_application_insights_logger: - from opentelemetry._logs import set_logger_provider - from opentelemetry.sdk._logs import ( - LoggerProvider, - LoggingHandler, - ) - from opentelemetry.sdk._logs.export import BatchLogRecordProcessor - from opentelemetry.sdk.resources import Resource - - from azure.monitor.opentelemetry.exporter import AzureMonitorLogExporter - - logger_provider = LoggerProvider(resource=Resource.create({"service.name": "azure.ai.agentserver"})) - set_logger_provider(logger_provider) - - exporter = AzureMonitorLogExporter(connection_string=application_insights_connection_string) - - logger_provider.add_log_record_processor(BatchLogRecordProcessor(exporter)) - handler = LoggingHandler(logger_provider=logger_provider) - handler.name = "appinsights_handler" - - # Add custom filter to inject dimensions - custom_filter = CustomDimensionsFilter() - handler.addFilter(custom_filter) - - # Only add to azure.ai.agentserver namespace to avoid infrastructure logs - app_logger = logging.getLogger("azure.ai.agentserver") - app_logger.setLevel(get_log_level()) - app_logger.addHandler(handler) - - except Exception as e: - print(f"Failed to configure logging: {e}") - - -def get_log_level(): - log_level = os.getenv(Constants.AGENT_LOG_LEVEL, "INFO").upper() - valid_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] - if log_level not in valid_levels: - print(f"Invalid log level '{log_level}' specified. Defaulting to 'INFO'.") - log_level = "INFO" - return log_level - - -def get_logger() -> logging.Logger: - """ - If the logger is not already configured, it will be initialized with default settings. - - :return: Configured logger instance. - :rtype: logging.Logger - """ - return logging.getLogger("azure.ai.agentserver") diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/__init__.py deleted file mode 100644 index d5622ebe7732..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- -from ._create_response import CreateResponse # type: ignore -from .projects import Response, ResponseStreamEvent - -__all__ = ["CreateResponse", "Response", "ResponseStreamEvent"] # type: ignore[var-annotated] diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_create_response.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_create_response.py deleted file mode 100644 index a38f55408c7f..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_create_response.py +++ /dev/null @@ -1,12 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- -# pylint: disable=no-name-in-module -from typing import Optional - -from .openai import response_create_params # type: ignore -from . import projects as _azure_ai_projects_models - -class CreateResponse(response_create_params.ResponseCreateParamsBase, total=False): # type: ignore - agent: Optional[_azure_ai_projects_models.AgentReference] - stream: Optional[bool] diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/openai/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/openai/__init__.py deleted file mode 100644 index ecf2179f53b7..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/openai/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- -""" -Re-exports of OpenAI SDK response types. - -This module re-exports types from the OpenAI SDK for convenience. -These types are fully documented in the OpenAI SDK documentation. - -.. note:: - This module re-exports OpenAI SDK types. For detailed documentation, - please refer to the `OpenAI Python SDK documentation `_. -""" -from openai.types.responses import * # pylint: disable=unused-wildcard-import - -__all__ = [name for name in globals() if not name.startswith("_")] # type: ignore[var-annotated] diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/__init__.py deleted file mode 100644 index f65ea1133818..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/__init__.py +++ /dev/null @@ -1,820 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) Python Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -# pylint: disable=wrong-import-position - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from ._patch import * # pylint: disable=unused-wildcard-import - - -from ._models import ( # type: ignore - A2ATool, - AISearchIndexResource, - AgentClusterInsightResult, - AgentClusterInsightsRequest, - AgentContainerObject, - AgentContainerOperationError, - AgentContainerOperationObject, - AgentDefinition, - AgentId, - AgentObject, - AgentObjectVersions, - AgentReference, - AgentTaxonomyInput, - AgentVersionObject, - AgenticIdentityCredentials, - Annotation, - AnnotationFileCitation, - AnnotationFilePath, - AnnotationUrlCitation, - ApiError, - ApiErrorResponse, - ApiInnerError, - ApiKeyCredentials, - ApproximateLocation, - AzureAIAgentTarget, - AzureAISearchAgentTool, - AzureAISearchIndex, - AzureAISearchToolResource, - AzureFunctionAgentTool, - AzureFunctionBinding, - AzureFunctionDefinition, - AzureFunctionDefinitionFunction, - AzureFunctionStorageQueue, - AzureOpenAIModelConfiguration, - BaseCredentials, - BingCustomSearchAgentTool, - BingCustomSearchConfiguration, - BingCustomSearchToolParameters, - BingGroundingAgentTool, - BingGroundingSearchConfiguration, - BingGroundingSearchToolParameters, - BlobReference, - BlobReferenceSasCredential, - BrowserAutomationAgentTool, - BrowserAutomationToolConnectionParameters, - BrowserAutomationToolParameters, - CaptureStructuredOutputsTool, - ChartCoordinate, - ChatSummaryMemoryItem, - ClusterInsightResult, - ClusterTokenUsage, - CodeBasedEvaluatorDefinition, - CodeInterpreterOutput, - CodeInterpreterOutputImage, - CodeInterpreterOutputLogs, - CodeInterpreterTool, - CodeInterpreterToolAuto, - CodeInterpreterToolCallItemParam, - CodeInterpreterToolCallItemResource, - ComparisonFilter, - CompoundFilter, - ComputerAction, - ComputerActionClick, - ComputerActionDoubleClick, - ComputerActionDrag, - ComputerActionKeyPress, - ComputerActionMove, - ComputerActionScreenshot, - ComputerActionScroll, - ComputerActionTypeKeys, - ComputerActionWait, - ComputerToolCallItemParam, - ComputerToolCallItemResource, - ComputerToolCallOutputItemOutput, - ComputerToolCallOutputItemOutputComputerScreenshot, - ComputerToolCallOutputItemParam, - ComputerToolCallOutputItemResource, - ComputerToolCallSafetyCheck, - ComputerUsePreviewTool, - Connection, - ContainerAppAgentDefinition, - ContinuousEvaluationRuleAction, - Coordinate, - CosmosDBIndex, - CreatedBy, - CronTrigger, - CustomCredential, - DailyRecurrenceSchedule, - DatasetCredential, - DatasetVersion, - DeleteAgentResponse, - DeleteAgentVersionResponse, - DeleteMemoryStoreResponse, - Deployment, - EmbeddingConfiguration, - EntraIDCredentials, - EvalCompareReport, - EvalResult, - EvalRunResultCompareItem, - EvalRunResultComparison, - EvalRunResultSummary, - EvaluationComparisonRequest, - EvaluationResultSample, - EvaluationRule, - EvaluationRuleAction, - EvaluationRuleFilter, - EvaluationRunClusterInsightResult, - EvaluationRunClusterInsightsRequest, - EvaluationScheduleTask, - EvaluationTaxonomy, - EvaluationTaxonomyInput, - EvaluatorDefinition, - EvaluatorMetric, - EvaluatorVersion, - FabricDataAgentToolParameters, - FieldMapping, - FileDatasetVersion, - FileSearchTool, - FileSearchToolCallItemParam, - FileSearchToolCallItemParamResult, - FileSearchToolCallItemResource, - FolderDatasetVersion, - FunctionTool, - FunctionToolCallItemParam, - FunctionToolCallItemResource, - FunctionToolCallOutputItemParam, - FunctionToolCallOutputItemResource, - HostedAgentDefinition, - HourlyRecurrenceSchedule, - HumanEvaluationRuleAction, - ImageBasedHostedAgentDefinition, - ImageGenTool, - ImageGenToolCallItemParam, - ImageGenToolCallItemResource, - ImageGenToolInputImageMask, - Index, - Insight, - InsightCluster, - InsightModelConfiguration, - InsightRequest, - InsightResult, - InsightSample, - InsightScheduleTask, - InsightSummary, - InsightsMetadata, - InvokeAzureAgentWorkflowActionOutputItemResource, - ItemContent, - ItemContentInputAudio, - ItemContentInputFile, - ItemContentInputImage, - ItemContentInputText, - ItemContentOutputAudio, - ItemContentOutputText, - ItemContentRefusal, - ItemParam, - ItemReferenceItemParam, - ItemResource, - LocalShellExecAction, - LocalShellTool, - LocalShellToolCallItemParam, - LocalShellToolCallItemResource, - LocalShellToolCallOutputItemParam, - LocalShellToolCallOutputItemResource, - Location, - LogProb, - MCPApprovalRequestItemParam, - MCPApprovalRequestItemResource, - MCPApprovalResponseItemParam, - MCPApprovalResponseItemResource, - MCPCallItemParam, - MCPCallItemResource, - MCPListToolsItemParam, - MCPListToolsItemResource, - MCPListToolsTool, - MCPTool, - MCPToolAllowedTools1, - MCPToolRequireApproval1, - MCPToolRequireApprovalAlways, - MCPToolRequireApprovalNever, - ManagedAzureAISearchIndex, - MemoryItem, - MemoryOperation, - MemorySearchItem, - MemorySearchOptions, - MemorySearchTool, - MemorySearchToolCallItemParam, - MemorySearchToolCallItemResource, - MemoryStoreDefaultDefinition, - MemoryStoreDefaultOptions, - MemoryStoreDefinition, - MemoryStoreDeleteScopeResponse, - MemoryStoreObject, - MemoryStoreOperationUsage, - MemoryStoreOperationUsageInputTokensDetails, - MemoryStoreOperationUsageOutputTokensDetails, - MemoryStoreSearchResponse, - MemoryStoreUpdateResponse, - MemoryStoreUpdateResult, - MicrosoftFabricAgentTool, - ModelDeployment, - ModelDeploymentSku, - MonthlyRecurrenceSchedule, - NoAuthenticationCredentials, - OAuthConsentRequestItemResource, - OneTimeTrigger, - OpenApiAgentTool, - OpenApiAnonymousAuthDetails, - OpenApiAuthDetails, - OpenApiFunctionDefinition, - OpenApiFunctionDefinitionFunction, - OpenApiManagedAuthDetails, - OpenApiManagedSecurityScheme, - OpenApiProjectConnectionAuthDetails, - OpenApiProjectConnectionSecurityScheme, - PagedScheduleRun, - PendingUploadRequest, - PendingUploadResponse, - Prompt, - PromptAgentDefinition, - PromptAgentDefinitionText, - PromptBasedEvaluatorDefinition, - ProtocolVersionRecord, - RaiConfig, - RankingOptions, - Reasoning, - ReasoningItemParam, - ReasoningItemResource, - ReasoningItemSummaryPart, - ReasoningItemSummaryTextPart, - RecurrenceSchedule, - RecurrenceTrigger, - RedTeam, - Response, - ResponseCodeInterpreterCallCodeDeltaEvent, - ResponseCodeInterpreterCallCodeDoneEvent, - ResponseCodeInterpreterCallCompletedEvent, - ResponseCodeInterpreterCallInProgressEvent, - ResponseCodeInterpreterCallInterpretingEvent, - ResponseCompletedEvent, - ResponseContentPartAddedEvent, - ResponseContentPartDoneEvent, - ResponseConversation1, - ResponseCreatedEvent, - ResponseError, - ResponseErrorEvent, - ResponseFailedEvent, - ResponseFileSearchCallCompletedEvent, - ResponseFileSearchCallInProgressEvent, - ResponseFileSearchCallSearchingEvent, - ResponseFormatJsonSchemaSchema, - ResponseFunctionCallArgumentsDeltaEvent, - ResponseFunctionCallArgumentsDoneEvent, - ResponseImageGenCallCompletedEvent, - ResponseImageGenCallGeneratingEvent, - ResponseImageGenCallInProgressEvent, - ResponseImageGenCallPartialImageEvent, - ResponseInProgressEvent, - ResponseIncompleteDetails1, - ResponseIncompleteEvent, - ResponseMCPCallArgumentsDeltaEvent, - ResponseMCPCallArgumentsDoneEvent, - ResponseMCPCallCompletedEvent, - ResponseMCPCallFailedEvent, - ResponseMCPCallInProgressEvent, - ResponseMCPListToolsCompletedEvent, - ResponseMCPListToolsFailedEvent, - ResponseMCPListToolsInProgressEvent, - ResponseOutputItemAddedEvent, - ResponseOutputItemDoneEvent, - ResponsePromptVariables, - ResponseQueuedEvent, - ResponseReasoningDeltaEvent, - ResponseReasoningDoneEvent, - ResponseReasoningSummaryDeltaEvent, - ResponseReasoningSummaryDoneEvent, - ResponseReasoningSummaryPartAddedEvent, - ResponseReasoningSummaryPartDoneEvent, - ResponseReasoningSummaryTextDeltaEvent, - ResponseReasoningSummaryTextDoneEvent, - ResponseRefusalDeltaEvent, - ResponseRefusalDoneEvent, - ResponseStreamEvent, - ResponseText, - ResponseTextDeltaEvent, - ResponseTextDoneEvent, - ResponseTextFormatConfiguration, - ResponseTextFormatConfigurationJsonObject, - ResponseTextFormatConfigurationJsonSchema, - ResponseTextFormatConfigurationText, - ResponseUsage, - ResponseWebSearchCallCompletedEvent, - ResponseWebSearchCallInProgressEvent, - ResponseWebSearchCallSearchingEvent, - ResponsesAssistantMessageItemParam, - ResponsesAssistantMessageItemResource, - ResponsesDeveloperMessageItemParam, - ResponsesDeveloperMessageItemResource, - ResponsesMessageItemParam, - ResponsesMessageItemResource, - ResponsesSystemMessageItemParam, - ResponsesSystemMessageItemResource, - ResponsesUserMessageItemParam, - ResponsesUserMessageItemResource, - SASCredentials, - Schedule, - ScheduleRun, - ScheduleTask, - SharepointAgentTool, - SharepointGroundingToolParameters, - StructuredInputDefinition, - StructuredOutputDefinition, - StructuredOutputsItemResource, - Target, - TargetConfig, - TaxonomyCategory, - TaxonomySubCategory, - Tool, - ToolArgumentBinding, - ToolChoiceObject, - ToolChoiceObjectCodeInterpreter, - ToolChoiceObjectComputer, - ToolChoiceObjectFileSearch, - ToolChoiceObjectFunction, - ToolChoiceObjectImageGen, - ToolChoiceObjectMCP, - ToolChoiceObjectWebSearch, - ToolDescription, - ToolProjectConnection, - ToolProjectConnectionList, - TopLogProb, - Trigger, - UserProfileMemoryItem, - VectorStoreFileAttributes, - WebSearchAction, - WebSearchActionFind, - WebSearchActionOpenPage, - WebSearchActionSearch, - WebSearchPreviewTool, - WebSearchToolCallItemParam, - WebSearchToolCallItemResource, - WeeklyRecurrenceSchedule, - WorkflowActionOutputItemResource, - WorkflowDefinition, -) - -from ._enums import ( # type: ignore - AgentContainerOperationStatus, - AgentContainerStatus, - AgentKind, - AgentProtocol, - AnnotationType, - AttackStrategy, - AzureAISearchQueryType, - CodeInterpreterOutputType, - ComputerActionType, - ComputerToolCallOutputItemOutputType, - ConnectionType, - CredentialType, - DatasetType, - DayOfWeek, - DeploymentType, - EvaluationRuleActionType, - EvaluationRuleEventType, - EvaluationTaxonomyInputType, - EvaluatorCategory, - EvaluatorDefinitionType, - EvaluatorMetricDirection, - EvaluatorMetricType, - EvaluatorType, - IndexType, - InsightType, - ItemContentType, - ItemType, - LocationType, - MemoryItemKind, - MemoryOperationKind, - MemoryStoreKind, - MemoryStoreUpdateStatus, - OpenApiAuthType, - OperationState, - PendingUploadType, - ReasoningEffort, - ReasoningItemSummaryPartType, - RecurrenceType, - ResponseErrorCode, - ResponseStreamEventType, - ResponseTextFormatConfigurationType, - ResponsesMessageRole, - RiskCategory, - SampleType, - ScheduleProvisioningStatus, - ScheduleTaskType, - ServiceTier, - ToolChoiceObjectType, - ToolChoiceOptions, - ToolType, - TreatmentEffectType, - TriggerType, - WebSearchActionType, -) -from ._patch import __all__ as _patch_all -from ._patch import * -from ._patch import patch_sdk as _patch_sdk - -__all__ = [ - "A2ATool", - "AISearchIndexResource", - "AgentClusterInsightResult", - "AgentClusterInsightsRequest", - "AgentContainerObject", - "AgentContainerOperationError", - "AgentContainerOperationObject", - "AgentDefinition", - "AgentId", - "AgentObject", - "AgentObjectVersions", - "AgentReference", - "AgentTaxonomyInput", - "AgentVersionObject", - "AgenticIdentityCredentials", - "Annotation", - "AnnotationFileCitation", - "AnnotationFilePath", - "AnnotationUrlCitation", - "ApiError", - "ApiErrorResponse", - "ApiInnerError", - "ApiKeyCredentials", - "ApproximateLocation", - "AzureAIAgentTarget", - "AzureAISearchAgentTool", - "AzureAISearchIndex", - "AzureAISearchToolResource", - "AzureFunctionAgentTool", - "AzureFunctionBinding", - "AzureFunctionDefinition", - "AzureFunctionDefinitionFunction", - "AzureFunctionStorageQueue", - "AzureOpenAIModelConfiguration", - "BaseCredentials", - "BingCustomSearchAgentTool", - "BingCustomSearchConfiguration", - "BingCustomSearchToolParameters", - "BingGroundingAgentTool", - "BingGroundingSearchConfiguration", - "BingGroundingSearchToolParameters", - "BlobReference", - "BlobReferenceSasCredential", - "BrowserAutomationAgentTool", - "BrowserAutomationToolConnectionParameters", - "BrowserAutomationToolParameters", - "CaptureStructuredOutputsTool", - "ChartCoordinate", - "ChatSummaryMemoryItem", - "ClusterInsightResult", - "ClusterTokenUsage", - "CodeBasedEvaluatorDefinition", - "CodeInterpreterOutput", - "CodeInterpreterOutputImage", - "CodeInterpreterOutputLogs", - "CodeInterpreterTool", - "CodeInterpreterToolAuto", - "CodeInterpreterToolCallItemParam", - "CodeInterpreterToolCallItemResource", - "ComparisonFilter", - "CompoundFilter", - "ComputerAction", - "ComputerActionClick", - "ComputerActionDoubleClick", - "ComputerActionDrag", - "ComputerActionKeyPress", - "ComputerActionMove", - "ComputerActionScreenshot", - "ComputerActionScroll", - "ComputerActionTypeKeys", - "ComputerActionWait", - "ComputerToolCallItemParam", - "ComputerToolCallItemResource", - "ComputerToolCallOutputItemOutput", - "ComputerToolCallOutputItemOutputComputerScreenshot", - "ComputerToolCallOutputItemParam", - "ComputerToolCallOutputItemResource", - "ComputerToolCallSafetyCheck", - "ComputerUsePreviewTool", - "Connection", - "ContainerAppAgentDefinition", - "ContinuousEvaluationRuleAction", - "Coordinate", - "CosmosDBIndex", - "CreatedBy", - "CronTrigger", - "CustomCredential", - "DailyRecurrenceSchedule", - "DatasetCredential", - "DatasetVersion", - "DeleteAgentResponse", - "DeleteAgentVersionResponse", - "DeleteMemoryStoreResponse", - "Deployment", - "EmbeddingConfiguration", - "EntraIDCredentials", - "EvalCompareReport", - "EvalResult", - "EvalRunResultCompareItem", - "EvalRunResultComparison", - "EvalRunResultSummary", - "EvaluationComparisonRequest", - "EvaluationResultSample", - "EvaluationRule", - "EvaluationRuleAction", - "EvaluationRuleFilter", - "EvaluationRunClusterInsightResult", - "EvaluationRunClusterInsightsRequest", - "EvaluationScheduleTask", - "EvaluationTaxonomy", - "EvaluationTaxonomyInput", - "EvaluatorDefinition", - "EvaluatorMetric", - "EvaluatorVersion", - "FabricDataAgentToolParameters", - "FieldMapping", - "FileDatasetVersion", - "FileSearchTool", - "FileSearchToolCallItemParam", - "FileSearchToolCallItemParamResult", - "FileSearchToolCallItemResource", - "FolderDatasetVersion", - "FunctionTool", - "FunctionToolCallItemParam", - "FunctionToolCallItemResource", - "FunctionToolCallOutputItemParam", - "FunctionToolCallOutputItemResource", - "HostedAgentDefinition", - "HourlyRecurrenceSchedule", - "HumanEvaluationRuleAction", - "ImageBasedHostedAgentDefinition", - "ImageGenTool", - "ImageGenToolCallItemParam", - "ImageGenToolCallItemResource", - "ImageGenToolInputImageMask", - "Index", - "Insight", - "InsightCluster", - "InsightModelConfiguration", - "InsightRequest", - "InsightResult", - "InsightSample", - "InsightScheduleTask", - "InsightSummary", - "InsightsMetadata", - "InvokeAzureAgentWorkflowActionOutputItemResource", - "ItemContent", - "ItemContentInputAudio", - "ItemContentInputFile", - "ItemContentInputImage", - "ItemContentInputText", - "ItemContentOutputAudio", - "ItemContentOutputText", - "ItemContentRefusal", - "ItemParam", - "ItemReferenceItemParam", - "ItemResource", - "LocalShellExecAction", - "LocalShellTool", - "LocalShellToolCallItemParam", - "LocalShellToolCallItemResource", - "LocalShellToolCallOutputItemParam", - "LocalShellToolCallOutputItemResource", - "Location", - "LogProb", - "MCPApprovalRequestItemParam", - "MCPApprovalRequestItemResource", - "MCPApprovalResponseItemParam", - "MCPApprovalResponseItemResource", - "MCPCallItemParam", - "MCPCallItemResource", - "MCPListToolsItemParam", - "MCPListToolsItemResource", - "MCPListToolsTool", - "MCPTool", - "MCPToolAllowedTools1", - "MCPToolRequireApproval1", - "MCPToolRequireApprovalAlways", - "MCPToolRequireApprovalNever", - "ManagedAzureAISearchIndex", - "MemoryItem", - "MemoryOperation", - "MemorySearchItem", - "MemorySearchOptions", - "MemorySearchTool", - "MemorySearchToolCallItemParam", - "MemorySearchToolCallItemResource", - "MemoryStoreDefaultDefinition", - "MemoryStoreDefaultOptions", - "MemoryStoreDefinition", - "MemoryStoreDeleteScopeResponse", - "MemoryStoreObject", - "MemoryStoreOperationUsage", - "MemoryStoreOperationUsageInputTokensDetails", - "MemoryStoreOperationUsageOutputTokensDetails", - "MemoryStoreSearchResponse", - "MemoryStoreUpdateResponse", - "MemoryStoreUpdateResult", - "MicrosoftFabricAgentTool", - "ModelDeployment", - "ModelDeploymentSku", - "MonthlyRecurrenceSchedule", - "NoAuthenticationCredentials", - "OAuthConsentRequestItemResource", - "OneTimeTrigger", - "OpenApiAgentTool", - "OpenApiAnonymousAuthDetails", - "OpenApiAuthDetails", - "OpenApiFunctionDefinition", - "OpenApiFunctionDefinitionFunction", - "OpenApiManagedAuthDetails", - "OpenApiManagedSecurityScheme", - "OpenApiProjectConnectionAuthDetails", - "OpenApiProjectConnectionSecurityScheme", - "PagedScheduleRun", - "PendingUploadRequest", - "PendingUploadResponse", - "Prompt", - "PromptAgentDefinition", - "PromptAgentDefinitionText", - "PromptBasedEvaluatorDefinition", - "ProtocolVersionRecord", - "RaiConfig", - "RankingOptions", - "Reasoning", - "ReasoningItemParam", - "ReasoningItemResource", - "ReasoningItemSummaryPart", - "ReasoningItemSummaryTextPart", - "RecurrenceSchedule", - "RecurrenceTrigger", - "RedTeam", - "Response", - "ResponseCodeInterpreterCallCodeDeltaEvent", - "ResponseCodeInterpreterCallCodeDoneEvent", - "ResponseCodeInterpreterCallCompletedEvent", - "ResponseCodeInterpreterCallInProgressEvent", - "ResponseCodeInterpreterCallInterpretingEvent", - "ResponseCompletedEvent", - "ResponseContentPartAddedEvent", - "ResponseContentPartDoneEvent", - "ResponseConversation1", - "ResponseCreatedEvent", - "ResponseError", - "ResponseErrorEvent", - "ResponseFailedEvent", - "ResponseFileSearchCallCompletedEvent", - "ResponseFileSearchCallInProgressEvent", - "ResponseFileSearchCallSearchingEvent", - "ResponseFormatJsonSchemaSchema", - "ResponseFunctionCallArgumentsDeltaEvent", - "ResponseFunctionCallArgumentsDoneEvent", - "ResponseImageGenCallCompletedEvent", - "ResponseImageGenCallGeneratingEvent", - "ResponseImageGenCallInProgressEvent", - "ResponseImageGenCallPartialImageEvent", - "ResponseInProgressEvent", - "ResponseIncompleteDetails1", - "ResponseIncompleteEvent", - "ResponseMCPCallArgumentsDeltaEvent", - "ResponseMCPCallArgumentsDoneEvent", - "ResponseMCPCallCompletedEvent", - "ResponseMCPCallFailedEvent", - "ResponseMCPCallInProgressEvent", - "ResponseMCPListToolsCompletedEvent", - "ResponseMCPListToolsFailedEvent", - "ResponseMCPListToolsInProgressEvent", - "ResponseOutputItemAddedEvent", - "ResponseOutputItemDoneEvent", - "ResponsePromptVariables", - "ResponseQueuedEvent", - "ResponseReasoningDeltaEvent", - "ResponseReasoningDoneEvent", - "ResponseReasoningSummaryDeltaEvent", - "ResponseReasoningSummaryDoneEvent", - "ResponseReasoningSummaryPartAddedEvent", - "ResponseReasoningSummaryPartDoneEvent", - "ResponseReasoningSummaryTextDeltaEvent", - "ResponseReasoningSummaryTextDoneEvent", - "ResponseRefusalDeltaEvent", - "ResponseRefusalDoneEvent", - "ResponseStreamEvent", - "ResponseText", - "ResponseTextDeltaEvent", - "ResponseTextDoneEvent", - "ResponseTextFormatConfiguration", - "ResponseTextFormatConfigurationJsonObject", - "ResponseTextFormatConfigurationJsonSchema", - "ResponseTextFormatConfigurationText", - "ResponseUsage", - "ResponseWebSearchCallCompletedEvent", - "ResponseWebSearchCallInProgressEvent", - "ResponseWebSearchCallSearchingEvent", - "ResponsesAssistantMessageItemParam", - "ResponsesAssistantMessageItemResource", - "ResponsesDeveloperMessageItemParam", - "ResponsesDeveloperMessageItemResource", - "ResponsesMessageItemParam", - "ResponsesMessageItemResource", - "ResponsesSystemMessageItemParam", - "ResponsesSystemMessageItemResource", - "ResponsesUserMessageItemParam", - "ResponsesUserMessageItemResource", - "SASCredentials", - "Schedule", - "ScheduleRun", - "ScheduleTask", - "SharepointAgentTool", - "SharepointGroundingToolParameters", - "StructuredInputDefinition", - "StructuredOutputDefinition", - "StructuredOutputsItemResource", - "Target", - "TargetConfig", - "TaxonomyCategory", - "TaxonomySubCategory", - "Tool", - "ToolArgumentBinding", - "ToolChoiceObject", - "ToolChoiceObjectCodeInterpreter", - "ToolChoiceObjectComputer", - "ToolChoiceObjectFileSearch", - "ToolChoiceObjectFunction", - "ToolChoiceObjectImageGen", - "ToolChoiceObjectMCP", - "ToolChoiceObjectWebSearch", - "ToolDescription", - "ToolProjectConnection", - "ToolProjectConnectionList", - "TopLogProb", - "Trigger", - "UserProfileMemoryItem", - "VectorStoreFileAttributes", - "WebSearchAction", - "WebSearchActionFind", - "WebSearchActionOpenPage", - "WebSearchActionSearch", - "WebSearchPreviewTool", - "WebSearchToolCallItemParam", - "WebSearchToolCallItemResource", - "WeeklyRecurrenceSchedule", - "WorkflowActionOutputItemResource", - "WorkflowDefinition", - "AgentContainerOperationStatus", - "AgentContainerStatus", - "AgentKind", - "AgentProtocol", - "AnnotationType", - "AttackStrategy", - "AzureAISearchQueryType", - "CodeInterpreterOutputType", - "ComputerActionType", - "ComputerToolCallOutputItemOutputType", - "ConnectionType", - "CredentialType", - "DatasetType", - "DayOfWeek", - "DeploymentType", - "EvaluationRuleActionType", - "EvaluationRuleEventType", - "EvaluationTaxonomyInputType", - "EvaluatorCategory", - "EvaluatorDefinitionType", - "EvaluatorMetricDirection", - "EvaluatorMetricType", - "EvaluatorType", - "IndexType", - "InsightType", - "ItemContentType", - "ItemType", - "LocationType", - "MemoryItemKind", - "MemoryOperationKind", - "MemoryStoreKind", - "MemoryStoreUpdateStatus", - "OpenApiAuthType", - "OperationState", - "PendingUploadType", - "ReasoningEffort", - "ReasoningItemSummaryPartType", - "RecurrenceType", - "ResponseErrorCode", - "ResponseStreamEventType", - "ResponseTextFormatConfigurationType", - "ResponsesMessageRole", - "RiskCategory", - "SampleType", - "ScheduleProvisioningStatus", - "ScheduleTaskType", - "ServiceTier", - "ToolChoiceObjectType", - "ToolChoiceOptions", - "ToolType", - "TreatmentEffectType", - "TriggerType", - "WebSearchActionType", -] -__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore -_patch_sdk() diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_enums.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_enums.py deleted file mode 100644 index ea4ebc59efd7..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_enums.py +++ /dev/null @@ -1,767 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) Python Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from enum import Enum -from azure.core import CaseInsensitiveEnumMeta - - -class AgentContainerOperationStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Status of the container operation for a specific version of an agent.""" - - NOT_STARTED = "NotStarted" - """The container operation is not started.""" - IN_PROGRESS = "InProgress" - """The container operation is in progress.""" - SUCCEEDED = "Succeeded" - """The container operation has succeeded.""" - FAILED = "Failed" - """The container operation has failed.""" - - -class AgentContainerStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Status of the container of a specific version of an agent.""" - - STARTING = "Starting" - """The container is starting.""" - RUNNING = "Running" - """The container is running.""" - STOPPING = "Stopping" - """The container is stopping.""" - STOPPED = "Stopped" - """The container is stopped.""" - FAILED = "Failed" - """The container has failed.""" - DELETING = "Deleting" - """The container is deleting.""" - DELETED = "Deleted" - """The container is deleted.""" - UPDATING = "Updating" - """The container is updating.""" - - -class AgentKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of AgentKind.""" - - PROMPT = "prompt" - HOSTED = "hosted" - CONTAINER_APP = "container_app" - WORKFLOW = "workflow" - - -class AgentProtocol(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of AgentProtocol.""" - - ACTIVITY_PROTOCOL = "activity_protocol" - RESPONSES = "responses" - - -class AnnotationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of AnnotationType.""" - - FILE_CITATION = "file_citation" - URL_CITATION = "url_citation" - FILE_PATH = "file_path" - CONTAINER_FILE_CITATION = "container_file_citation" - - -class AttackStrategy(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Strategies for attacks.""" - - EASY = "easy" - """Represents a default set of easy complexity attacks. Easy complexity attacks require less - effort, such as translation of a prompt into some encoding, and does not require any Large - Language Model to convert or orchestrate.""" - MODERATE = "moderate" - """Represents a default set of moderate complexity attacks. Moderate complexity attacks require - having access to resources such as another generative AI model.""" - DIFFICULT = "difficult" - """Represents a default set of difficult complexity attacks. Difficult complexity attacks include - attacks that require access to significant resources and effort to execute an attack such as - knowledge of search-based algorithms in addition to a generative AI model.""" - ASCII_ART = "ascii_art" - """Generates visual art using ASCII characters, often used for creative or obfuscation purposes.""" - ASCII_SMUGGLER = "ascii_smuggler" - """Conceals data within ASCII characters, making it harder to detect.""" - ATBASH = "atbash" - """Implements the Atbash cipher, a simple substitution cipher where each letter is mapped to its - reverse.""" - BASE64 = "base64" - """Encodes binary data into a text format using Base64, commonly used for data transmission.""" - BINARY = "binary" - """Converts text into binary code, representing data in a series of 0s and 1s.""" - CAESAR = "caesar" - """Applies the Caesar cipher, a substitution cipher that shifts characters by a fixed number of - positions.""" - CHARACTER_SPACE = "character_space" - """Alters text by adding spaces between characters, often used for obfuscation.""" - JAILBREAK = "jailbreak" - """Injects specially crafted prompts to bypass AI safeguards, known as User Injected Prompt - Attacks (UPIA).""" - ANSII_ATTACK = "ansii_attack" - """Utilizes ANSI escape sequences to manipulate text appearance and behavior.""" - CHARACTER_SWAP = "character_swap" - """Swaps characters within text to create variations or obfuscate the original content.""" - SUFFIX_APPEND = "suffix_append" - """Appends an adversarial suffix to the prompt.""" - STRING_JOIN = "string_join" - """Joins multiple strings together, often used for concatenation or obfuscation.""" - UNICODE_CONFUSABLE = "unicode_confusable" - """Uses Unicode characters that look similar to standard characters, creating visual confusion.""" - UNICODE_SUBSTITUTION = "unicode_substitution" - """Substitutes standard characters with Unicode equivalents, often for obfuscation.""" - DIACRITIC = "diacritic" - """Adds diacritical marks to characters, changing their appearance and sometimes their meaning.""" - FLIP = "flip" - """Flips characters from front to back, creating a mirrored effect.""" - LEETSPEAK = "leetspeak" - """Transforms text into Leetspeak, a form of encoding that replaces letters with similar-looking - numbers or symbols.""" - ROT13 = "rot13" - """Applies the ROT13 cipher, a simple substitution cipher that shifts characters by 13 positions.""" - MORSE = "morse" - """Encodes text into Morse code, using dots and dashes to represent characters.""" - URL = "url" - """Encodes text into URL format.""" - BASELINE = "baseline" - """Represents the baseline direct adversarial probing, which is used by attack strategies as the - attack objective.""" - - -class AzureAISearchQueryType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Available query types for Azure AI Search tool.""" - - SIMPLE = "simple" - """Query type ``simple``""" - SEMANTIC = "semantic" - """Query type ``semantic``""" - VECTOR = "vector" - """Query type ``vector``""" - VECTOR_SIMPLE_HYBRID = "vector_simple_hybrid" - """Query type ``vector_simple_hybrid``""" - VECTOR_SEMANTIC_HYBRID = "vector_semantic_hybrid" - """Query type ``vector_semantic_hybrid``""" - - -class CodeInterpreterOutputType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of CodeInterpreterOutputType.""" - - LOGS = "logs" - IMAGE = "image" - - -class ComputerActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of ComputerActionType.""" - - SCREENSHOT = "screenshot" - CLICK = "click" - DOUBLE_CLICK = "double_click" - SCROLL = "scroll" - TYPE = "type" - WAIT = "wait" - KEYPRESS = "keypress" - DRAG = "drag" - MOVE = "move" - - -class ComputerToolCallOutputItemOutputType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """A computer screenshot image used with the computer use tool.""" - - SCREENSHOT = "computer_screenshot" - - -class ConnectionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The Type (or category) of the connection.""" - - AZURE_OPEN_AI = "AzureOpenAI" - """Azure OpenAI Service""" - AZURE_BLOB_STORAGE = "AzureBlob" - """Azure Blob Storage, with specified container""" - AZURE_STORAGE_ACCOUNT = "AzureStorageAccount" - """Azure Blob Storage, with container not specified (used by Agents)""" - AZURE_AI_SEARCH = "CognitiveSearch" - """Azure AI Search""" - COSMOS_DB = "CosmosDB" - """CosmosDB""" - API_KEY = "ApiKey" - """Generic connection that uses API Key authentication""" - APPLICATION_CONFIGURATION = "AppConfig" - """Application Configuration""" - APPLICATION_INSIGHTS = "AppInsights" - """Application Insights""" - CUSTOM = "CustomKeys" - """Custom Keys""" - REMOTE_TOOL = "RemoteTool" - """Remote tool""" - - -class CredentialType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The credential type used by the connection.""" - - API_KEY = "ApiKey" - """API Key credential""" - ENTRA_ID = "AAD" - """Entra ID credential (formerly known as AAD)""" - SAS = "SAS" - """Shared Access Signature (SAS) credential""" - CUSTOM = "CustomKeys" - """Custom credential""" - NONE = "None" - """No credential""" - AGENTIC_IDENTITY = "AgenticIdentityToken" - """Agentic identity credential""" - - -class DatasetType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Enum to determine the type of data.""" - - URI_FILE = "uri_file" - """URI file.""" - URI_FOLDER = "uri_folder" - """URI folder.""" - - -class DayOfWeek(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Days of the week for recurrence schedule.""" - - SUNDAY = "Sunday" - """Sunday.""" - MONDAY = "Monday" - """Monday.""" - TUESDAY = "Tuesday" - """Tuesday.""" - WEDNESDAY = "Wednesday" - """Wednesday.""" - THURSDAY = "Thursday" - """Thursday.""" - FRIDAY = "Friday" - """Friday.""" - SATURDAY = "Saturday" - """Saturday.""" - - -class DeploymentType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of DeploymentType.""" - - MODEL_DEPLOYMENT = "ModelDeployment" - """Model deployment""" - - -class EvaluationRuleActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of the evaluation action.""" - - CONTINUOUS_EVALUATION = "continuousEvaluation" - """Continuous evaluation.""" - HUMAN_EVALUATION = "humanEvaluation" - """Human evaluation.""" - - -class EvaluationRuleEventType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of the evaluation rule event.""" - - RESPONSE_COMPLETED = "response.completed" - """Response completed.""" - MANUAL = "manual" - """Manual trigger.""" - - -class EvaluationTaxonomyInputType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of the evaluation taxonomy input.""" - - AGENT = "agent" - """Agent""" - POLICY = "policy" - """Policy.""" - - -class EvaluatorCategory(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The category of the evaluator.""" - - QUALITY = "quality" - """Quality""" - SAFETY = "safety" - """Risk & Safety""" - AGENTS = "agents" - """Agents""" - - -class EvaluatorDefinitionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The type of evaluator definition.""" - - PROMPT = "prompt" - """Prompt-based definition""" - CODE = "code" - """Code-based definition""" - PROMPT_AND_CODE = "prompt_and_code" - """Prompt & Code Based definition""" - SERVICE = "service" - """Service-based evaluator""" - OPENAI_GRADERS = "openai_graders" - """OpenAI graders""" - - -class EvaluatorMetricDirection(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The direction of the metric indicating whether a higher value is better, a lower value is - better, or neutral. - """ - - INCREASE = "increase" - """It indicates a higher value is better for this metric""" - DECREASE = "decrease" - """It indicates a lower value is better for this metric""" - NEUTRAL = "neutral" - """It indicates no preference for this metric direction""" - - -class EvaluatorMetricType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The type of the evaluator.""" - - ORDINAL = "ordinal" - """Ordinal metric representing categories that can be ordered or ranked.""" - CONTINUOUS = "continuous" - """Continuous metric representing values in a continuous range.""" - BOOLEAN = "boolean" - """Boolean metric representing true/false values""" - - -class EvaluatorType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The type of the evaluator.""" - - BUILT_IN = "builtin" - """Built-in evaluator (Microsoft provided)""" - CUSTOM = "custom" - """Custom evaluator""" - - -class IndexType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of IndexType.""" - - AZURE_SEARCH = "AzureSearch" - """Azure search""" - COSMOS_DB = "CosmosDBNoSqlVectorStore" - """CosmosDB""" - MANAGED_AZURE_SEARCH = "ManagedAzureSearch" - """Managed Azure Search""" - - -class InsightType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The request of the insights.""" - - EVALUATION_RUN_CLUSTER_INSIGHT = "EvaluationRunClusterInsight" - """Insights on an Evaluation run result.""" - AGENT_CLUSTER_INSIGHT = "AgentClusterInsight" - """Cluster Insight on an Agent.""" - EVALUATION_COMPARISON = "EvaluationComparison" - """Evaluation Comparison.""" - - -class ItemContentType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Multi-modal input and output contents.""" - - INPUT_TEXT = "input_text" - INPUT_AUDIO = "input_audio" - INPUT_IMAGE = "input_image" - INPUT_FILE = "input_file" - OUTPUT_TEXT = "output_text" - OUTPUT_AUDIO = "output_audio" - REFUSAL = "refusal" - - -class ItemType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of ItemType.""" - - MESSAGE = "message" - FILE_SEARCH_CALL = "file_search_call" - FUNCTION_CALL = "function_call" - FUNCTION_CALL_OUTPUT = "function_call_output" - COMPUTER_CALL = "computer_call" - COMPUTER_CALL_OUTPUT = "computer_call_output" - WEB_SEARCH_CALL = "web_search_call" - REASONING = "reasoning" - ITEM_REFERENCE = "item_reference" - IMAGE_GENERATION_CALL = "image_generation_call" - CODE_INTERPRETER_CALL = "code_interpreter_call" - LOCAL_SHELL_CALL = "local_shell_call" - LOCAL_SHELL_CALL_OUTPUT = "local_shell_call_output" - MCP_LIST_TOOLS = "mcp_list_tools" - MCP_APPROVAL_REQUEST = "mcp_approval_request" - MCP_APPROVAL_RESPONSE = "mcp_approval_response" - MCP_CALL = "mcp_call" - STRUCTURED_OUTPUTS = "structured_outputs" - WORKFLOW_ACTION = "workflow_action" - MEMORY_SEARCH_CALL = "memory_search_call" - OAUTH_CONSENT_REQUEST = "oauth_consent_request" - - -class LocationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of LocationType.""" - - APPROXIMATE = "approximate" - - -class MemoryItemKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Memory item kind.""" - - USER_PROFILE = "user_profile" - """User profile information extracted from conversations.""" - CHAT_SUMMARY = "chat_summary" - """Summary of chat conversations.""" - - -class MemoryOperationKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Memory operation kind.""" - - CREATE = "create" - """Create a new memory item.""" - UPDATE = "update" - """Update an existing memory item.""" - DELETE = "delete" - """Delete an existing memory item.""" - - -class MemoryStoreKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The type of memory store implementation to use.""" - - DEFAULT = "default" - """The default memory store implementation.""" - - -class MemoryStoreUpdateStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Status of a memory store update operation.""" - - QUEUED = "queued" - IN_PROGRESS = "in_progress" - COMPLETED = "completed" - FAILED = "failed" - SUPERSEDED = "superseded" - - -class OpenApiAuthType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Authentication type for OpenApi endpoint. Allowed types are: - * Anonymous (no authentication required) - * Project Connection (requires project_connection_id to endpoint, as setup in AI Foundry) - * Managed_Identity (requires audience for identity based auth). - """ - - ANONYMOUS = "anonymous" - PROJECT_CONNECTION = "project_connection" - MANAGED_IDENTITY = "managed_identity" - - -class OperationState(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Enum describing allowed operation states.""" - - NOT_STARTED = "NotStarted" - """The operation has not started.""" - RUNNING = "Running" - """The operation is in progress.""" - SUCCEEDED = "Succeeded" - """The operation has completed successfully.""" - FAILED = "Failed" - """The operation has failed.""" - CANCELED = "Canceled" - """The operation has been canceled by the user.""" - - -class PendingUploadType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The type of pending upload.""" - - NONE = "None" - """No pending upload.""" - BLOB_REFERENCE = "BlobReference" - """Blob Reference is the only supported type.""" - - -class ReasoningEffort(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """**o-series models only** - Constrains effort on reasoning for - `reasoning models `_. - Currently supported values are ``low``, ``medium``, and ``high``. Reducing - reasoning effort can result in faster responses and fewer tokens used - on reasoning in a response. - """ - - LOW = "low" - MEDIUM = "medium" - HIGH = "high" - - -class ReasoningItemSummaryPartType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of ReasoningItemSummaryPartType.""" - - SUMMARY_TEXT = "summary_text" - - -class RecurrenceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Recurrence type.""" - - HOURLY = "Hourly" - """Hourly recurrence pattern.""" - DAILY = "Daily" - """Daily recurrence pattern.""" - WEEKLY = "Weekly" - """Weekly recurrence pattern.""" - MONTHLY = "Monthly" - """Monthly recurrence pattern.""" - - -class ResponseErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The error code for the response.""" - - SERVER_ERROR = "server_error" - RATE_LIMIT_EXCEEDED = "rate_limit_exceeded" - INVALID_PROMPT = "invalid_prompt" - VECTOR_STORE_TIMEOUT = "vector_store_timeout" - INVALID_IMAGE = "invalid_image" - INVALID_IMAGE_FORMAT = "invalid_image_format" - INVALID_BASE64_IMAGE = "invalid_base64_image" - INVALID_IMAGE_URL = "invalid_image_url" - IMAGE_TOO_LARGE = "image_too_large" - IMAGE_TOO_SMALL = "image_too_small" - IMAGE_PARSE_ERROR = "image_parse_error" - IMAGE_CONTENT_POLICY_VIOLATION = "image_content_policy_violation" - INVALID_IMAGE_MODE = "invalid_image_mode" - IMAGE_FILE_TOO_LARGE = "image_file_too_large" - UNSUPPORTED_IMAGE_MEDIA_TYPE = "unsupported_image_media_type" - EMPTY_IMAGE_FILE = "empty_image_file" - FAILED_TO_DOWNLOAD_IMAGE = "failed_to_download_image" - IMAGE_FILE_NOT_FOUND = "image_file_not_found" - - -class ResponsesMessageRole(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The collection of valid roles for responses message items.""" - - SYSTEM = "system" - DEVELOPER = "developer" - USER = "user" - ASSISTANT = "assistant" - - -class ResponseStreamEventType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of ResponseStreamEventType.""" - - RESPONSE_AUDIO_DELTA = "response.audio.delta" - RESPONSE_AUDIO_DONE = "response.audio.done" - RESPONSE_AUDIO_TRANSCRIPT_DELTA = "response.audio_transcript.delta" - RESPONSE_AUDIO_TRANSCRIPT_DONE = "response.audio_transcript.done" - RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA = "response.code_interpreter_call_code.delta" - RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE = "response.code_interpreter_call_code.done" - RESPONSE_CODE_INTERPRETER_CALL_COMPLETED = "response.code_interpreter_call.completed" - RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS = "response.code_interpreter_call.in_progress" - RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING = "response.code_interpreter_call.interpreting" - RESPONSE_COMPLETED = "response.completed" - RESPONSE_CONTENT_PART_ADDED = "response.content_part.added" - RESPONSE_CONTENT_PART_DONE = "response.content_part.done" - RESPONSE_CREATED = "response.created" - ERROR = "error" - RESPONSE_FILE_SEARCH_CALL_COMPLETED = "response.file_search_call.completed" - RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS = "response.file_search_call.in_progress" - RESPONSE_FILE_SEARCH_CALL_SEARCHING = "response.file_search_call.searching" - RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA = "response.function_call_arguments.delta" - RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE = "response.function_call_arguments.done" - RESPONSE_IN_PROGRESS = "response.in_progress" - RESPONSE_FAILED = "response.failed" - RESPONSE_INCOMPLETE = "response.incomplete" - RESPONSE_OUTPUT_ITEM_ADDED = "response.output_item.added" - RESPONSE_OUTPUT_ITEM_DONE = "response.output_item.done" - RESPONSE_REFUSAL_DELTA = "response.refusal.delta" - RESPONSE_REFUSAL_DONE = "response.refusal.done" - RESPONSE_OUTPUT_TEXT_ANNOTATION_ADDED = "response.output_text.annotation.added" - RESPONSE_OUTPUT_TEXT_DELTA = "response.output_text.delta" - RESPONSE_OUTPUT_TEXT_DONE = "response.output_text.done" - RESPONSE_REASONING_SUMMARY_PART_ADDED = "response.reasoning_summary_part.added" - RESPONSE_REASONING_SUMMARY_PART_DONE = "response.reasoning_summary_part.done" - RESPONSE_REASONING_SUMMARY_TEXT_DELTA = "response.reasoning_summary_text.delta" - RESPONSE_REASONING_SUMMARY_TEXT_DONE = "response.reasoning_summary_text.done" - RESPONSE_WEB_SEARCH_CALL_COMPLETED = "response.web_search_call.completed" - RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS = "response.web_search_call.in_progress" - RESPONSE_WEB_SEARCH_CALL_SEARCHING = "response.web_search_call.searching" - RESPONSE_IMAGE_GENERATION_CALL_COMPLETED = "response.image_generation_call.completed" - RESPONSE_IMAGE_GENERATION_CALL_GENERATING = "response.image_generation_call.generating" - RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS = "response.image_generation_call.in_progress" - RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE = "response.image_generation_call.partial_image" - RESPONSE_MCP_CALL_ARGUMENTS_DELTA = "response.mcp_call.arguments_delta" - RESPONSE_MCP_CALL_ARGUMENTS_DONE = "response.mcp_call.arguments_done" - RESPONSE_MCP_CALL_COMPLETED = "response.mcp_call.completed" - RESPONSE_MCP_CALL_FAILED = "response.mcp_call.failed" - RESPONSE_MCP_CALL_IN_PROGRESS = "response.mcp_call.in_progress" - RESPONSE_MCP_LIST_TOOLS_COMPLETED = "response.mcp_list_tools.completed" - RESPONSE_MCP_LIST_TOOLS_FAILED = "response.mcp_list_tools.failed" - RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS = "response.mcp_list_tools.in_progress" - RESPONSE_QUEUED = "response.queued" - RESPONSE_REASONING_DELTA = "response.reasoning.delta" - RESPONSE_REASONING_DONE = "response.reasoning.done" - RESPONSE_REASONING_SUMMARY_DELTA = "response.reasoning_summary.delta" - RESPONSE_REASONING_SUMMARY_DONE = "response.reasoning_summary.done" - - -class ResponseTextFormatConfigurationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """An object specifying the format that the model must output. - Configuring ``{ "type": "json_schema" }`` enables Structured Outputs, - which ensures the model will match your supplied JSON schema. Learn more in the - `Structured Outputs guide `_. - The default format is ``{ "type": "text" }`` with no additional options. - **Not recommended for gpt-4o and newer models:** - Setting to ``{ "type": "json_object" }`` enables the older JSON mode, which - ensures the message the model generates is valid JSON. Using ``json_schema`` - is preferred for models that support it. - """ - - TEXT = "text" - JSON_SCHEMA = "json_schema" - JSON_OBJECT = "json_object" - - -class RiskCategory(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Risk category for the attack objective.""" - - HATE_UNFAIRNESS = "HateUnfairness" - """Represents content related to hate or unfairness.""" - VIOLENCE = "Violence" - """Represents content related to violence.""" - SEXUAL = "Sexual" - """Represents content of a sexual nature.""" - SELF_HARM = "SelfHarm" - """Represents content related to self-harm.""" - - -class SampleType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The type of sample used in the analysis.""" - - EVALUATION_RESULT_SAMPLE = "EvaluationResultSample" - """A sample from the evaluation result.""" - - -class ScheduleProvisioningStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Schedule provisioning status.""" - - CREATING = "Creating" - """Represents the creation status of the schedule.""" - UPDATING = "Updating" - """Represents the updating status of the schedule.""" - DELETING = "Deleting" - """Represents the deleting status of the schedule.""" - SUCCEEDED = "Succeeded" - """Represents the succeeded status of the schedule.""" - FAILED = "Failed" - """Represents the failed status of the schedule.""" - - -class ScheduleTaskType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of the task.""" - - EVALUATION = "Evaluation" - """Evaluation task.""" - INSIGHT = "Insight" - """Insight task.""" - - -class ServiceTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Specifies the processing type used for serving the request. - * If set to 'auto', then the request will be processed with the service tier configured in the - Project settings. Unless otherwise configured, the Project will use 'default'. - * If set to 'default', then the request will be processed with the standard pricing and - performance for the selected model. - * If set to '[flex](/docs/guides/flex-processing)' or 'priority', then the request will be - processed with the corresponding service tier. [Contact - sales](https://openai.com/contact-sales) to learn more about Priority processing. - * When not set, the default behavior is 'auto'. - When the ``service_tier`` parameter is set, the response body will include the - ``service_tier`` value based on the processing mode actually used to serve the request. This - response value may be different from the value set in the parameter. - """ - - AUTO = "auto" - DEFAULT = "default" - FLEX = "flex" - SCALE = "scale" - PRIORITY = "priority" - - -class ToolChoiceObjectType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Indicates that the model should use a built-in tool to generate a response. - `Learn more about built-in tools `_. - """ - - FILE_SEARCH = "file_search" - FUNCTION = "function" - COMPUTER = "computer_use_preview" - WEB_SEARCH = "web_search_preview" - IMAGE_GENERATION = "image_generation" - CODE_INTERPRETER = "code_interpreter" - MCP = "mcp" - - -class ToolChoiceOptions(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Controls which (if any) tool is called by the model. - ``none`` means the model will not call any tool and instead generates a message. - ``auto`` means the model can pick between generating a message or calling one or - more tools. - ``required`` means the model must call one or more tools. - """ - - NONE = "none" - AUTO = "auto" - REQUIRED = "required" - - -class ToolType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """A tool that can be used to generate a response.""" - - FILE_SEARCH = "file_search" - FUNCTION = "function" - COMPUTER_USE_PREVIEW = "computer_use_preview" - WEB_SEARCH_PREVIEW = "web_search_preview" - MCP = "mcp" - CODE_INTERPRETER = "code_interpreter" - IMAGE_GENERATION = "image_generation" - LOCAL_SHELL = "local_shell" - BING_GROUNDING = "bing_grounding" - BROWSER_AUTOMATION_PREVIEW = "browser_automation_preview" - FABRIC_DATAAGENT_PREVIEW = "fabric_dataagent_preview" - SHAREPOINT_GROUNDING_PREVIEW = "sharepoint_grounding_preview" - AZURE_AI_SEARCH = "azure_ai_search" - OPENAPI = "openapi" - BING_CUSTOM_SEARCH_PREVIEW = "bing_custom_search_preview" - CAPTURE_STRUCTURED_OUTPUTS = "capture_structured_outputs" - A2_A_PREVIEW = "a2a_preview" - AZURE_FUNCTION = "azure_function" - MEMORY_SEARCH = "memory_search" - - -class TreatmentEffectType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Treatment Effect Type.""" - - TOO_FEW_SAMPLES = "TooFewSamples" - """Not enough samples to determine treatment effect.""" - INCONCLUSIVE = "Inconclusive" - """No significant difference between treatment and baseline.""" - CHANGED = "Changed" - """Indicates the metric changed with statistical significance, but the direction is neutral.""" - IMPROVED = "Improved" - """Indicates the treatment significantly improved the metric compared to baseline.""" - DEGRADED = "Degraded" - """Indicates the treatment significantly degraded the metric compared to baseline.""" - - -class TriggerType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of the trigger.""" - - CRON = "Cron" - """Cron based trigger.""" - RECURRENCE = "Recurrence" - """Recurrence based trigger.""" - ONE_TIME = "OneTime" - """One-time trigger.""" - - -class WebSearchActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of WebSearchActionType.""" - - SEARCH = "search" - OPEN_PAGE = "open_page" - FIND = "find" diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_models.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_models.py deleted file mode 100644 index a810ddc805c3..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_models.py +++ /dev/null @@ -1,15049 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression,too-many-lines -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) Python Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -# pylint: disable=useless-super-delegation - -import datetime -from typing import Any, Literal, Mapping, Optional, TYPE_CHECKING, Union, overload - -from ._utils.model_base import Model as _Model, rest_discriminator, rest_field -from ._enums import ( - AgentKind, - AnnotationType, - CodeInterpreterOutputType, - ComputerActionType, - ComputerToolCallOutputItemOutputType, - CredentialType, - DatasetType, - DeploymentType, - EvaluationRuleActionType, - EvaluationTaxonomyInputType, - EvaluatorDefinitionType, - IndexType, - InsightType, - ItemContentType, - ItemType, - LocationType, - MemoryItemKind, - MemoryStoreKind, - OpenApiAuthType, - PendingUploadType, - ReasoningItemSummaryPartType, - RecurrenceType, - ResponseStreamEventType, - ResponseTextFormatConfigurationType, - ResponsesMessageRole, - SampleType, - ScheduleTaskType, - ToolChoiceObjectType, - ToolType, - TriggerType, - WebSearchActionType, -) - -if TYPE_CHECKING: - from .. import _types, models as _models # type: ignore - - -class Tool(_Model): - """Tool. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - A2ATool, AzureAISearchAgentTool, AzureFunctionAgentTool, BingCustomSearchAgentTool, - BingGroundingAgentTool, BrowserAutomationAgentTool, CaptureStructuredOutputsTool, - CodeInterpreterTool, ComputerUsePreviewTool, MicrosoftFabricAgentTool, FileSearchTool, - FunctionTool, ImageGenTool, LocalShellTool, MCPTool, MemorySearchTool, OpenApiAgentTool, - SharepointAgentTool, WebSearchPreviewTool - - :ivar type: Required. Known values are: "file_search", "function", "computer_use_preview", - "web_search_preview", "mcp", "code_interpreter", "image_generation", "local_shell", - "bing_grounding", "browser_automation_preview", "fabric_dataagent_preview", - "sharepoint_grounding_preview", "azure_ai_search", "openapi", "bing_custom_search_preview", - "capture_structured_outputs", "a2a_preview", "azure_function", and "memory_search". - :vartype type: str or ~azure.ai.projects.models.ToolType - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. Known values are: \"file_search\", \"function\", \"computer_use_preview\", - \"web_search_preview\", \"mcp\", \"code_interpreter\", \"image_generation\", \"local_shell\", - \"bing_grounding\", \"browser_automation_preview\", \"fabric_dataagent_preview\", - \"sharepoint_grounding_preview\", \"azure_ai_search\", \"openapi\", - \"bing_custom_search_preview\", \"capture_structured_outputs\", \"a2a_preview\", - \"azure_function\", and \"memory_search\".""" - - @overload - def __init__( - self, - *, - type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class A2ATool(Tool, discriminator="a2a_preview"): - """An agent implementing the A2A protocol. - - :ivar type: The type of the tool. Always ``a2a``. Required. - :vartype type: str or ~azure.ai.projects.models.A2_A_PREVIEW - :ivar base_url: Base URL of the agent. - :vartype base_url: str - :ivar agent_card_path: The path to the agent card relative to the ``base_url``. - If not provided, defaults to ``/.well-known/agent-card.json``. - :vartype agent_card_path: str - :ivar project_connection_id: The connection ID in the project for the A2A server. - The connection stores authentication and other connection details needed to connect to the A2A - server. - :vartype project_connection_id: str - """ - - type: Literal[ToolType.A2_A_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the tool. Always ``a2a``. Required.""" - base_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Base URL of the agent.""" - agent_card_path: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The path to the agent card relative to the ``base_url``. - If not provided, defaults to ``/.well-known/agent-card.json``.""" - project_connection_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The connection ID in the project for the A2A server. - The connection stores authentication and other connection details needed to connect to the A2A - server.""" - - @overload - def __init__( - self, - *, - base_url: Optional[str] = None, - agent_card_path: Optional[str] = None, - project_connection_id: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolType.A2_A_PREVIEW # type: ignore - - -class InsightResult(_Model): - """The result of the insights. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - AgentClusterInsightResult, EvalCompareReport, EvaluationRunClusterInsightResult - - :ivar type: The type of insights result. Required. Known values are: - "EvaluationRunClusterInsight", "AgentClusterInsight", and "EvaluationComparison". - :vartype type: str or ~azure.ai.projects.models.InsightType - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """The type of insights result. Required. Known values are: \"EvaluationRunClusterInsight\", - \"AgentClusterInsight\", and \"EvaluationComparison\".""" - - @overload - def __init__( - self, - *, - type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class AgentClusterInsightResult(InsightResult, discriminator="AgentClusterInsight"): - """Insights from the agent cluster analysis. - - :ivar type: The type of insights result. Required. Cluster Insight on an Agent. - :vartype type: str or ~azure.ai.projects.models.AGENT_CLUSTER_INSIGHT - :ivar cluster_insight: Required. - :vartype cluster_insight: ~azure.ai.projects.models.ClusterInsightResult - """ - - type: Literal[InsightType.AGENT_CLUSTER_INSIGHT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of insights result. Required. Cluster Insight on an Agent.""" - cluster_insight: "_models.ClusterInsightResult" = rest_field( - name="clusterInsight", visibility=["read", "create", "update", "delete", "query"] - ) - """Required.""" - - @overload - def __init__( - self, - *, - cluster_insight: "_models.ClusterInsightResult", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = InsightType.AGENT_CLUSTER_INSIGHT # type: ignore - - -class InsightRequest(_Model): - """The request of the insights report. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - AgentClusterInsightsRequest, EvaluationComparisonRequest, EvaluationRunClusterInsightsRequest - - :ivar type: The type of request. Required. Known values are: "EvaluationRunClusterInsight", - "AgentClusterInsight", and "EvaluationComparison". - :vartype type: str or ~azure.ai.projects.models.InsightType - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """The type of request. Required. Known values are: \"EvaluationRunClusterInsight\", - \"AgentClusterInsight\", and \"EvaluationComparison\".""" - - @overload - def __init__( - self, - *, - type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class AgentClusterInsightsRequest(InsightRequest, discriminator="AgentClusterInsight"): - """Insights on set of Agent Evaluation Results. - - :ivar type: The type of request. Required. Cluster Insight on an Agent. - :vartype type: str or ~azure.ai.projects.models.AGENT_CLUSTER_INSIGHT - :ivar agent_name: Identifier for the agent. Required. - :vartype agent_name: str - :ivar model_configuration: Configuration of the model used in the insight generation. - :vartype model_configuration: ~azure.ai.projects.models.InsightModelConfiguration - """ - - type: Literal[InsightType.AGENT_CLUSTER_INSIGHT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of request. Required. Cluster Insight on an Agent.""" - agent_name: str = rest_field(name="agentName", visibility=["read", "create", "update", "delete", "query"]) - """Identifier for the agent. Required.""" - model_configuration: Optional["_models.InsightModelConfiguration"] = rest_field( - name="modelConfiguration", visibility=["read", "create", "update", "delete", "query"] - ) - """Configuration of the model used in the insight generation.""" - - @overload - def __init__( - self, - *, - agent_name: str, - model_configuration: Optional["_models.InsightModelConfiguration"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = InsightType.AGENT_CLUSTER_INSIGHT # type: ignore - - -class AgentContainerObject(_Model): - """The details of the container of a specific version of an agent. - - :ivar object: The object type, which is always 'agent.container'. Required. Default value is - "agent.container". - :vartype object: str - :ivar status: The status of the container of a specific version of an agent. Required. Known - values are: "Starting", "Running", "Stopping", "Stopped", "Failed", "Deleting", "Deleted", and - "Updating". - :vartype status: str or ~azure.ai.projects.models.AgentContainerStatus - :ivar max_replicas: The maximum number of replicas for the container. Default is 1. - :vartype max_replicas: int - :ivar min_replicas: The minimum number of replicas for the container. Default is 1. - :vartype min_replicas: int - :ivar error_message: The error message if the container failed to operate, if any. - :vartype error_message: str - :ivar created_at: The creation time of the container. Required. - :vartype created_at: ~datetime.datetime - :ivar updated_at: The last update time of the container. Required. - :vartype updated_at: ~datetime.datetime - """ - - object: Literal["agent.container"] = rest_field(visibility=["read"]) - """The object type, which is always 'agent.container'. Required. Default value is - \"agent.container\".""" - status: Union[str, "_models.AgentContainerStatus"] = rest_field(visibility=["read"]) - """The status of the container of a specific version of an agent. Required. Known values are: - \"Starting\", \"Running\", \"Stopping\", \"Stopped\", \"Failed\", \"Deleting\", \"Deleted\", - and \"Updating\".""" - max_replicas: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The maximum number of replicas for the container. Default is 1.""" - min_replicas: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The minimum number of replicas for the container. Default is 1.""" - error_message: Optional[str] = rest_field(visibility=["read"]) - """The error message if the container failed to operate, if any.""" - created_at: datetime.datetime = rest_field(visibility=["read"], format="rfc3339") - """The creation time of the container. Required.""" - updated_at: datetime.datetime = rest_field(visibility=["read"], format="rfc3339") - """The last update time of the container. Required.""" - - @overload - def __init__( - self, - *, - max_replicas: Optional[int] = None, - min_replicas: Optional[int] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.object: Literal["agent.container"] = "agent.container" - - -class AgentContainerOperationError(_Model): - """The error details of the container operation, if any. - - :ivar code: The error code of the container operation, if any. Required. - :vartype code: str - :ivar type: The error type of the container operation, if any. Required. - :vartype type: str - :ivar message: The error message of the container operation, if any. Required. - :vartype message: str - """ - - code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The error code of the container operation, if any. Required.""" - type: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The error type of the container operation, if any. Required.""" - message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The error message of the container operation, if any. Required.""" - - @overload - def __init__( - self, - *, - code: str, - type: str, - message: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class AgentContainerOperationObject(_Model): - """The container operation for a specific version of an agent. - - :ivar id: The ID of the container operation. This id is unique identifier across the system. - Required. - :vartype id: str - :ivar agent_id: The ID of the agent. Required. - :vartype agent_id: str - :ivar agent_version_id: The ID of the agent version. Required. - :vartype agent_version_id: str - :ivar status: The status of the container operation. Required. Known values are: "NotStarted", - "InProgress", "Succeeded", and "Failed". - :vartype status: str or ~azure.ai.projects.models.AgentContainerOperationStatus - :ivar error: The error of the container operation, if any. - :vartype error: ~azure.ai.projects.models.AgentContainerOperationError - :ivar container: The container of the specific version of an agent. - :vartype container: ~azure.ai.projects.models.AgentContainerObject - """ - - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the container operation. This id is unique identifier across the system. Required.""" - agent_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the agent. Required.""" - agent_version_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the agent version. Required.""" - status: Union[str, "_models.AgentContainerOperationStatus"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the container operation. Required. Known values are: \"NotStarted\", - \"InProgress\", \"Succeeded\", and \"Failed\".""" - error: Optional["_models.AgentContainerOperationError"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The error of the container operation, if any.""" - container: Optional["_models.AgentContainerObject"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The container of the specific version of an agent.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - agent_id: str, - agent_version_id: str, - status: Union[str, "_models.AgentContainerOperationStatus"], - error: Optional["_models.AgentContainerOperationError"] = None, - container: Optional["_models.AgentContainerObject"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class AgentDefinition(_Model): - """AgentDefinition. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ContainerAppAgentDefinition, HostedAgentDefinition, PromptAgentDefinition, WorkflowDefinition - - :ivar kind: Required. Known values are: "prompt", "hosted", "container_app", and "workflow". - :vartype kind: str or ~azure.ai.projects.models.AgentKind - :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. - :vartype rai_config: ~azure.ai.projects.models.RaiConfig - """ - - __mapping__: dict[str, _Model] = {} - kind: str = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) - """Required. Known values are: \"prompt\", \"hosted\", \"container_app\", and \"workflow\".""" - rai_config: Optional["_models.RaiConfig"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Configuration for Responsible AI (RAI) content filtering and safety features.""" - - @overload - def __init__( - self, - *, - kind: str, - rai_config: Optional["_models.RaiConfig"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class BaseCredentials(_Model): - """A base class for connection credentials. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - EntraIDCredentials, AgenticIdentityCredentials, ApiKeyCredentials, CustomCredential, - NoAuthenticationCredentials, SASCredentials - - :ivar type: The type of credential used by the connection. Required. Known values are: - "ApiKey", "AAD", "SAS", "CustomKeys", "None", and "AgenticIdentityToken". - :vartype type: str or ~azure.ai.projects.models.CredentialType - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read"]) - """The type of credential used by the connection. Required. Known values are: \"ApiKey\", \"AAD\", - \"SAS\", \"CustomKeys\", \"None\", and \"AgenticIdentityToken\".""" - - @overload - def __init__( - self, - *, - type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class AgenticIdentityCredentials(BaseCredentials, discriminator="AgenticIdentityToken"): - """Agentic identity credential definition. - - :ivar type: The credential type. Required. Agentic identity credential - :vartype type: str or ~azure.ai.projects.models.AGENTIC_IDENTITY - """ - - type: Literal[CredentialType.AGENTIC_IDENTITY] = rest_discriminator(name="type", visibility=["read"]) # type: ignore - """The credential type. Required. Agentic identity credential""" - - @overload - def __init__( - self, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = CredentialType.AGENTIC_IDENTITY # type: ignore - - -class AgentId(_Model): - """AgentId. - - :ivar type: Required. Default value is "agent_id". - :vartype type: str - :ivar name: The name of the agent. Required. - :vartype name: str - :ivar version: The version identifier of the agent. Required. - :vartype version: str - """ - - type: Literal["agent_id"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required. Default value is \"agent_id\".""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the agent. Required.""" - version: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The version identifier of the agent. Required.""" - - @overload - def __init__( - self, - *, - name: str, - version: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type: Literal["agent_id"] = "agent_id" - - -class AgentObject(_Model): - """AgentObject. - - :ivar object: The object type, which is always 'agent'. Required. Default value is "agent". - :vartype object: str - :ivar id: The unique identifier of the agent. Required. - :vartype id: str - :ivar name: The name of the agent. Required. - :vartype name: str - :ivar versions: The latest version of the agent. Required. - :vartype versions: ~azure.ai.projects.models.AgentObjectVersions - """ - - object: Literal["agent"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The object type, which is always 'agent'. Required. Default value is \"agent\".""" - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the agent. Required.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the agent. Required.""" - versions: "_models.AgentObjectVersions" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The latest version of the agent. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - name: str, - versions: "_models.AgentObjectVersions", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.object: Literal["agent"] = "agent" - - -class AgentObjectVersions(_Model): - """AgentObjectVersions. - - :ivar latest: Required. - :vartype latest: ~azure.ai.projects.models.AgentVersionObject - """ - - latest: "_models.AgentVersionObject" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - - @overload - def __init__( - self, - *, - latest: "_models.AgentVersionObject", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class AgentReference(_Model): - """AgentReference. - - :ivar type: Required. Default value is "agent_reference". - :vartype type: str - :ivar name: The name of the agent. Required. - :vartype name: str - :ivar version: The version identifier of the agent. - :vartype version: str - """ - - type: Literal["agent_reference"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required. Default value is \"agent_reference\".""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the agent. Required.""" - version: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The version identifier of the agent.""" - - @overload - def __init__( - self, - *, - name: str, - version: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type: Literal["agent_reference"] = "agent_reference" - - -class EvaluationTaxonomyInput(_Model): - """Input configuration for the evaluation taxonomy. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - AgentTaxonomyInput - - :ivar type: Input type of the evaluation taxonomy. Required. Known values are: "agent" and - "policy". - :vartype type: str or ~azure.ai.projects.models.EvaluationTaxonomyInputType - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Input type of the evaluation taxonomy. Required. Known values are: \"agent\" and \"policy\".""" - - @overload - def __init__( - self, - *, - type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class AgentTaxonomyInput(EvaluationTaxonomyInput, discriminator="agent"): - """Input configuration for the evaluation taxonomy when the input type is agent. - - :ivar type: Input type of the evaluation taxonomy. Required. Agent - :vartype type: str or ~azure.ai.projects.models.AGENT - :ivar target: Target configuration for the agent. Required. - :vartype target: ~azure.ai.projects.models.AzureAIAgentTarget - :ivar risk_categories: List of risk categories to evaluate against. Required. - :vartype risk_categories: list[str or ~azure.ai.projects.models.RiskCategory] - """ - - type: Literal[EvaluationTaxonomyInputType.AGENT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Input type of the evaluation taxonomy. Required. Agent""" - target: "_models.AzureAIAgentTarget" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Target configuration for the agent. Required.""" - risk_categories: list[Union[str, "_models.RiskCategory"]] = rest_field( - name="riskCategories", visibility=["read", "create", "update", "delete", "query"] - ) - """List of risk categories to evaluate against. Required.""" - - @overload - def __init__( - self, - *, - target: "_models.AzureAIAgentTarget", - risk_categories: list[Union[str, "_models.RiskCategory"]], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = EvaluationTaxonomyInputType.AGENT # type: ignore - - -class AgentVersionObject(_Model): - """AgentVersionObject. - - :ivar metadata: Set of 16 key-value pairs that can be attached to an object. This can be - useful for storing additional information about the object in a structured - format, and querying for objects via API or the dashboard. - Keys are strings with a maximum length of 64 characters. Values are strings - with a maximum length of 512 characters. Required. - :vartype metadata: dict[str, str] - :ivar object: The object type, which is always 'agent.version'. Required. Default value is - "agent.version". - :vartype object: str - :ivar id: The unique identifier of the agent version. Required. - :vartype id: str - :ivar name: The name of the agent. Name can be used to retrieve/update/delete the agent. - Required. - :vartype name: str - :ivar version: The version identifier of the agent. Agents are immutable and every update - creates a new version while keeping the name same. Required. - :vartype version: str - :ivar description: A human-readable description of the agent. - :vartype description: str - :ivar created_at: The Unix timestamp (seconds) when the agent was created. Required. - :vartype created_at: ~datetime.datetime - :ivar definition: Required. - :vartype definition: ~azure.ai.projects.models.AgentDefinition - """ - - metadata: dict[str, str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Set of 16 key-value pairs that can be attached to an object. This can be - useful for storing additional information about the object in a structured - format, and querying for objects via API or the dashboard. - Keys are strings with a maximum length of 64 characters. Values are strings - with a maximum length of 512 characters. Required.""" - object: Literal["agent.version"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The object type, which is always 'agent.version'. Required. Default value is \"agent.version\".""" - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the agent version. Required.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the agent. Name can be used to retrieve/update/delete the agent. Required.""" - version: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The version identifier of the agent. Agents are immutable and every update creates a new - version while keeping the name same. Required.""" - description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A human-readable description of the agent.""" - created_at: datetime.datetime = rest_field( - visibility=["read", "create", "update", "delete", "query"], format="unix-timestamp" - ) - """The Unix timestamp (seconds) when the agent was created. Required.""" - definition: "_models.AgentDefinition" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - - @overload - def __init__( - self, - *, - metadata: dict[str, str], - id: str, # pylint: disable=redefined-builtin - name: str, - version: str, - created_at: datetime.datetime, - definition: "_models.AgentDefinition", - description: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.object: Literal["agent.version"] = "agent.version" - - -class AISearchIndexResource(_Model): - """A AI Search Index resource. - - :ivar project_connection_id: An index connection ID in an IndexResource attached to this agent. - Required. - :vartype project_connection_id: str - :ivar index_name: The name of an index in an IndexResource attached to this agent. - :vartype index_name: str - :ivar query_type: Type of query in an AIIndexResource attached to this agent. Known values are: - "simple", "semantic", "vector", "vector_simple_hybrid", and "vector_semantic_hybrid". - :vartype query_type: str or ~azure.ai.projects.models.AzureAISearchQueryType - :ivar top_k: Number of documents to retrieve from search and present to the model. - :vartype top_k: int - :ivar filter: filter string for search resource. Learn more from here: - `https://learn.microsoft.com/azure/search/search-filters - `_. - :vartype filter: str - :ivar index_asset_id: Index asset id for search resource. - :vartype index_asset_id: str - """ - - project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An index connection ID in an IndexResource attached to this agent. Required.""" - index_name: Optional[str] = rest_field(name="indexName", visibility=["read", "create", "update", "delete", "query"]) - """The name of an index in an IndexResource attached to this agent.""" - query_type: Optional[Union[str, "_models.AzureAISearchQueryType"]] = rest_field( - name="queryType", visibility=["read", "create", "update", "delete", "query"] - ) - """Type of query in an AIIndexResource attached to this agent. Known values are: \"simple\", - \"semantic\", \"vector\", \"vector_simple_hybrid\", and \"vector_semantic_hybrid\".""" - top_k: Optional[int] = rest_field(name="topK", visibility=["read", "create", "update", "delete", "query"]) - """Number of documents to retrieve from search and present to the model.""" - filter: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """filter string for search resource. Learn more from here: - `https://learn.microsoft.com/azure/search/search-filters - `_.""" - index_asset_id: Optional[str] = rest_field( - name="indexAssetId", visibility=["read", "create", "update", "delete", "query"] - ) - """Index asset id for search resource.""" - - @overload - def __init__( - self, - *, - project_connection_id: str, - index_name: Optional[str] = None, - query_type: Optional[Union[str, "_models.AzureAISearchQueryType"]] = None, - top_k: Optional[int] = None, - filter: Optional[str] = None, # pylint: disable=redefined-builtin - index_asset_id: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class Annotation(_Model): - """Annotation. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - AnnotationFileCitation, AnnotationFilePath, AnnotationUrlCitation - - :ivar type: Required. Known values are: "file_citation", "url_citation", "file_path", and - "container_file_citation". - :vartype type: str or ~azure.ai.projects.models.AnnotationType - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. Known values are: \"file_citation\", \"url_citation\", \"file_path\", and - \"container_file_citation\".""" - - @overload - def __init__( - self, - *, - type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class AnnotationFileCitation(Annotation, discriminator="file_citation"): - """A citation to a file. - - :ivar type: The type of the file citation. Always ``file_citation``. Required. - :vartype type: str or ~azure.ai.projects.models.FILE_CITATION - :ivar file_id: The ID of the file. Required. - :vartype file_id: str - :ivar index: The index of the file in the list of files. Required. - :vartype index: int - :ivar filename: The filename of the file cited. Required. - :vartype filename: str - """ - - type: Literal[AnnotationType.FILE_CITATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the file citation. Always ``file_citation``. Required.""" - file_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the file. Required.""" - index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the file in the list of files. Required.""" - filename: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The filename of the file cited. Required.""" - - @overload - def __init__( - self, - *, - file_id: str, - index: int, - filename: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = AnnotationType.FILE_CITATION # type: ignore - - -class AnnotationFilePath(Annotation, discriminator="file_path"): - """A path to a file. - - :ivar type: The type of the file path. Always ``file_path``. Required. - :vartype type: str or ~azure.ai.projects.models.FILE_PATH - :ivar file_id: The ID of the file. Required. - :vartype file_id: str - :ivar index: The index of the file in the list of files. Required. - :vartype index: int - """ - - type: Literal[AnnotationType.FILE_PATH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the file path. Always ``file_path``. Required.""" - file_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the file. Required.""" - index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the file in the list of files. Required.""" - - @overload - def __init__( - self, - *, - file_id: str, - index: int, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = AnnotationType.FILE_PATH # type: ignore - - -class AnnotationUrlCitation(Annotation, discriminator="url_citation"): - """A citation for a web resource used to generate a model response. - - :ivar type: The type of the URL citation. Always ``url_citation``. Required. - :vartype type: str or ~azure.ai.projects.models.URL_CITATION - :ivar url: The URL of the web resource. Required. - :vartype url: str - :ivar start_index: The index of the first character of the URL citation in the message. - Required. - :vartype start_index: int - :ivar end_index: The index of the last character of the URL citation in the message. Required. - :vartype end_index: int - :ivar title: The title of the web resource. Required. - :vartype title: str - """ - - type: Literal[AnnotationType.URL_CITATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the URL citation. Always ``url_citation``. Required.""" - url: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The URL of the web resource. Required.""" - start_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the first character of the URL citation in the message. Required.""" - end_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the last character of the URL citation in the message. Required.""" - title: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The title of the web resource. Required.""" - - @overload - def __init__( - self, - *, - url: str, - start_index: int, - end_index: int, - title: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = AnnotationType.URL_CITATION # type: ignore - - -class ApiError(_Model): - """ApiError. - - :ivar code: The error code. Required. - :vartype code: str - :ivar message: A human-readable description of the error. Required. - :vartype message: str - :ivar target: The target of the error, if applicable. - :vartype target: str - :ivar details: Additional details about the error. Required. - :vartype details: list[~azure.ai.projects.models.ApiError] - :ivar innererror: The inner error, if any. - :vartype innererror: ~azure.ai.projects.models.ApiInnerError - """ - - code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The error code. Required.""" - message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A human-readable description of the error. Required.""" - target: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The target of the error, if applicable.""" - details: list["_models.ApiError"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Additional details about the error. Required.""" - innererror: Optional["_models.ApiInnerError"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The inner error, if any.""" - - @overload - def __init__( - self, - *, - code: str, - message: str, - details: list["_models.ApiError"], - target: Optional[str] = None, - innererror: Optional["_models.ApiInnerError"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ApiErrorResponse(_Model): - """Error response for API failures. - - :ivar error: Required. - :vartype error: ~azure.ai.projects.models.ApiError - """ - - error: "_models.ApiError" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - - @overload - def __init__( - self, - *, - error: "_models.ApiError", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ApiInnerError(_Model): - """ApiInnerError. - - :ivar code: The error code. Required. - :vartype code: str - :ivar innererror: The inner error, if any. - :vartype innererror: ~azure.ai.projects.models.ApiInnerError - """ - - code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The error code. Required.""" - innererror: Optional["_models.ApiInnerError"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The inner error, if any.""" - - @overload - def __init__( - self, - *, - code: str, - innererror: Optional["_models.ApiInnerError"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ApiKeyCredentials(BaseCredentials, discriminator="ApiKey"): - """API Key Credential definition. - - :ivar type: The credential type. Required. API Key credential - :vartype type: str or ~azure.ai.projects.models.API_KEY - :ivar api_key: API Key. - :vartype api_key: str - """ - - type: Literal[CredentialType.API_KEY] = rest_discriminator(name="type", visibility=["read"]) # type: ignore - """The credential type. Required. API Key credential""" - api_key: Optional[str] = rest_field(name="key", visibility=["read"]) - """API Key.""" - - @overload - def __init__( - self, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = CredentialType.API_KEY # type: ignore - - -class Location(_Model): - """Location. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ApproximateLocation - - :ivar type: Required. "approximate" - :vartype type: str or ~azure.ai.projects.models.LocationType - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. \"approximate\"""" - - @overload - def __init__( - self, - *, - type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ApproximateLocation(Location, discriminator="approximate"): - """ApproximateLocation. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.APPROXIMATE - :ivar country: - :vartype country: str - :ivar region: - :vartype region: str - :ivar city: - :vartype city: str - :ivar timezone: - :vartype timezone: str - """ - - type: Literal[LocationType.APPROXIMATE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - country: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - region: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - city: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - timezone: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - - @overload - def __init__( - self, - *, - country: Optional[str] = None, - region: Optional[str] = None, - city: Optional[str] = None, - timezone: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = LocationType.APPROXIMATE # type: ignore - - -class Target(_Model): - """Base class for targets with discriminator support. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - AzureAIAgentTarget, AzureAIAssistantTarget, AzureAIModelTarget - - :ivar type: The type of target. Required. Default value is None. - :vartype type: str - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """The type of target. Required. Default value is None.""" - - @overload - def __init__( - self, - *, - type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class AzureAIAgentTarget(Target, discriminator="azure_ai_agent"): - """Represents a target specifying an Azure AI agent. - - :ivar type: The type of target, always ``azure_ai_agent``. Required. Default value is - "azure_ai_agent". - :vartype type: str - :ivar name: The unique identifier of the Azure AI agent. Required. - :vartype name: str - :ivar version: The version of the Azure AI agent. - :vartype version: str - :ivar tool_descriptions: The parameters used to control the sampling behavior of the agent - during text generation. - :vartype tool_descriptions: list[~azure.ai.projects.models.ToolDescription] - """ - - type: Literal["azure_ai_agent"] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of target, always ``azure_ai_agent``. Required. Default value is \"azure_ai_agent\".""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the Azure AI agent. Required.""" - version: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The version of the Azure AI agent.""" - tool_descriptions: Optional[list["_models.ToolDescription"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The parameters used to control the sampling behavior of the agent during text generation.""" - - @overload - def __init__( - self, - *, - name: str, - version: Optional[str] = None, - tool_descriptions: Optional[list["_models.ToolDescription"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = "azure_ai_agent" # type: ignore - - -class AzureAISearchAgentTool(Tool, discriminator="azure_ai_search"): - """The input definition information for an Azure AI search tool as used to configure an agent. - - :ivar type: The object type, which is always 'azure_ai_search'. Required. - :vartype type: str or ~azure.ai.projects.models.AZURE_AI_SEARCH - :ivar azure_ai_search: The azure ai search index resource. Required. - :vartype azure_ai_search: ~azure.ai.projects.models.AzureAISearchToolResource - """ - - type: Literal[ToolType.AZURE_AI_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'azure_ai_search'. Required.""" - azure_ai_search: "_models.AzureAISearchToolResource" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The azure ai search index resource. Required.""" - - @overload - def __init__( - self, - *, - azure_ai_search: "_models.AzureAISearchToolResource", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolType.AZURE_AI_SEARCH # type: ignore - - -class Index(_Model): - """Index resource Definition. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - AzureAISearchIndex, CosmosDBIndex, ManagedAzureAISearchIndex - - :ivar type: Type of index. Required. Known values are: "AzureSearch", - "CosmosDBNoSqlVectorStore", and "ManagedAzureSearch". - :vartype type: str or ~azure.ai.projects.models.IndexType - :ivar id: Asset ID, a unique identifier for the asset. - :vartype id: str - :ivar name: The name of the resource. Required. - :vartype name: str - :ivar version: The version of the resource. Required. - :vartype version: str - :ivar description: The asset description text. - :vartype description: str - :ivar tags: Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Type of index. Required. Known values are: \"AzureSearch\", \"CosmosDBNoSqlVectorStore\", and - \"ManagedAzureSearch\".""" - id: Optional[str] = rest_field(visibility=["read"]) - """Asset ID, a unique identifier for the asset.""" - name: str = rest_field(visibility=["read"]) - """The name of the resource. Required.""" - version: str = rest_field(visibility=["read"]) - """The version of the resource. Required.""" - description: Optional[str] = rest_field(visibility=["create", "update"]) - """The asset description text.""" - tags: Optional[dict[str, str]] = rest_field(visibility=["create", "update"]) - """Tag dictionary. Tags can be added, removed, and updated.""" - - @overload - def __init__( - self, - *, - type: str, - description: Optional[str] = None, - tags: Optional[dict[str, str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class AzureAISearchIndex(Index, discriminator="AzureSearch"): - """Azure AI Search Index Definition. - - :ivar id: Asset ID, a unique identifier for the asset. - :vartype id: str - :ivar name: The name of the resource. Required. - :vartype name: str - :ivar version: The version of the resource. Required. - :vartype version: str - :ivar description: The asset description text. - :vartype description: str - :ivar tags: Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar type: Type of index. Required. Azure search - :vartype type: str or ~azure.ai.projects.models.AZURE_SEARCH - :ivar connection_name: Name of connection to Azure AI Search. Required. - :vartype connection_name: str - :ivar index_name: Name of index in Azure AI Search resource to attach. Required. - :vartype index_name: str - :ivar field_mapping: Field mapping configuration. - :vartype field_mapping: ~azure.ai.projects.models.FieldMapping - """ - - type: Literal[IndexType.AZURE_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Type of index. Required. Azure search""" - connection_name: str = rest_field(name="connectionName", visibility=["create"]) - """Name of connection to Azure AI Search. Required.""" - index_name: str = rest_field(name="indexName", visibility=["create"]) - """Name of index in Azure AI Search resource to attach. Required.""" - field_mapping: Optional["_models.FieldMapping"] = rest_field(name="fieldMapping", visibility=["create"]) - """Field mapping configuration.""" - - @overload - def __init__( - self, - *, - connection_name: str, - index_name: str, - description: Optional[str] = None, - tags: Optional[dict[str, str]] = None, - field_mapping: Optional["_models.FieldMapping"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = IndexType.AZURE_SEARCH # type: ignore - - -class AzureAISearchToolResource(_Model): - """A set of index resources used by the ``azure_ai_search`` tool. - - :ivar index_list: The indices attached to this agent. There can be a maximum of 1 index - resource attached to the agent. - :vartype index_list: list[~azure.ai.projects.models.AISearchIndexResource] - """ - - index_list: Optional[list["_models.AISearchIndexResource"]] = rest_field( - name="indexList", visibility=["read", "create", "update", "delete", "query"] - ) - """The indices attached to this agent. There can be a maximum of 1 index - resource attached to the agent.""" - - @overload - def __init__( - self, - *, - index_list: Optional[list["_models.AISearchIndexResource"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class AzureFunctionAgentTool(Tool, discriminator="azure_function"): - """The input definition information for an Azure Function Tool, as used to configure an Agent. - - :ivar type: The object type, which is always 'browser_automation'. Required. - :vartype type: str or ~azure.ai.projects.models.AZURE_FUNCTION - :ivar azure_function: The Azure Function Tool definition. Required. - :vartype azure_function: ~azure.ai.projects.models.AzureFunctionDefinition - """ - - type: Literal[ToolType.AZURE_FUNCTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'browser_automation'. Required.""" - azure_function: "_models.AzureFunctionDefinition" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The Azure Function Tool definition. Required.""" - - @overload - def __init__( - self, - *, - azure_function: "_models.AzureFunctionDefinition", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolType.AZURE_FUNCTION # type: ignore - - -class AzureFunctionBinding(_Model): - """The structure for keeping storage queue name and URI. - - :ivar type: The type of binding, which is always 'storage_queue'. Required. Default value is - "storage_queue". - :vartype type: str - :ivar storage_queue: Storage queue. Required. - :vartype storage_queue: ~azure.ai.projects.models.AzureFunctionStorageQueue - """ - - type: Literal["storage_queue"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The type of binding, which is always 'storage_queue'. Required. Default value is - \"storage_queue\".""" - storage_queue: "_models.AzureFunctionStorageQueue" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Storage queue. Required.""" - - @overload - def __init__( - self, - *, - storage_queue: "_models.AzureFunctionStorageQueue", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type: Literal["storage_queue"] = "storage_queue" - - -class AzureFunctionDefinition(_Model): - """The definition of Azure function. - - :ivar function: The definition of azure function and its parameters. Required. - :vartype function: ~azure.ai.projects.models.AzureFunctionDefinitionFunction - :ivar input_binding: Input storage queue. The queue storage trigger runs a function as messages - are added to it. Required. - :vartype input_binding: ~azure.ai.projects.models.AzureFunctionBinding - :ivar output_binding: Output storage queue. The function writes output to this queue when the - input items are processed. Required. - :vartype output_binding: ~azure.ai.projects.models.AzureFunctionBinding - """ - - function: "_models.AzureFunctionDefinitionFunction" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The definition of azure function and its parameters. Required.""" - input_binding: "_models.AzureFunctionBinding" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Input storage queue. The queue storage trigger runs a function as messages are added to it. - Required.""" - output_binding: "_models.AzureFunctionBinding" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Output storage queue. The function writes output to this queue when the input items are - processed. Required.""" - - @overload - def __init__( - self, - *, - function: "_models.AzureFunctionDefinitionFunction", - input_binding: "_models.AzureFunctionBinding", - output_binding: "_models.AzureFunctionBinding", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class AzureFunctionDefinitionFunction(_Model): - """AzureFunctionDefinitionFunction. - - :ivar name: The name of the function to be called. Required. - :vartype name: str - :ivar description: A description of what the function does, used by the model to choose when - and how to call the function. - :vartype description: str - :ivar parameters: The parameters the functions accepts, described as a JSON Schema object. - Required. - :vartype parameters: any - """ - - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the function to be called. Required.""" - description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A description of what the function does, used by the model to choose when and how to call the - function.""" - parameters: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The parameters the functions accepts, described as a JSON Schema object. Required.""" - - @overload - def __init__( - self, - *, - name: str, - parameters: Any, - description: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class AzureFunctionStorageQueue(_Model): - """The structure for keeping storage queue name and URI. - - :ivar queue_service_endpoint: URI to the Azure Storage Queue service allowing you to manipulate - a queue. Required. - :vartype queue_service_endpoint: str - :ivar queue_name: The name of an Azure function storage queue. Required. - :vartype queue_name: str - """ - - queue_service_endpoint: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """URI to the Azure Storage Queue service allowing you to manipulate a queue. Required.""" - queue_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of an Azure function storage queue. Required.""" - - @overload - def __init__( - self, - *, - queue_service_endpoint: str, - queue_name: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class TargetConfig(_Model): - """Abstract class for target configuration. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - AzureOpenAIModelConfiguration - - :ivar type: Type of the model configuration. Required. Default value is None. - :vartype type: str - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Type of the model configuration. Required. Default value is None.""" - - @overload - def __init__( - self, - *, - type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class AzureOpenAIModelConfiguration(TargetConfig, discriminator="AzureOpenAIModel"): - """Azure OpenAI model configuration. The API version would be selected by the service for querying - the model. - - :ivar type: Required. Default value is "AzureOpenAIModel". - :vartype type: str - :ivar model_deployment_name: Deployment name for AOAI model. Example: gpt-4o if in AIServices - or connection based ``connection_name/deployment_name`` (e.g. ``my-aoai-connection/gpt-4o``). - Required. - :vartype model_deployment_name: str - """ - - type: Literal["AzureOpenAIModel"] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required. Default value is \"AzureOpenAIModel\".""" - model_deployment_name: str = rest_field( - name="modelDeploymentName", visibility=["read", "create", "update", "delete", "query"] - ) - """Deployment name for AOAI model. Example: gpt-4o if in AIServices or connection based - ``connection_name/deployment_name`` (e.g. ``my-aoai-connection/gpt-4o``). Required.""" - - @overload - def __init__( - self, - *, - model_deployment_name: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = "AzureOpenAIModel" # type: ignore - - -class BingCustomSearchAgentTool(Tool, discriminator="bing_custom_search_preview"): - """The input definition information for a Bing custom search tool as used to configure an agent. - - :ivar type: The object type, which is always 'bing_custom_search'. Required. - :vartype type: str or ~azure.ai.projects.models.BING_CUSTOM_SEARCH_PREVIEW - :ivar bing_custom_search_preview: The bing custom search tool parameters. Required. - :vartype bing_custom_search_preview: ~azure.ai.projects.models.BingCustomSearchToolParameters - """ - - type: Literal[ToolType.BING_CUSTOM_SEARCH_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'bing_custom_search'. Required.""" - bing_custom_search_preview: "_models.BingCustomSearchToolParameters" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The bing custom search tool parameters. Required.""" - - @overload - def __init__( - self, - *, - bing_custom_search_preview: "_models.BingCustomSearchToolParameters", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolType.BING_CUSTOM_SEARCH_PREVIEW # type: ignore - - -class BingCustomSearchConfiguration(_Model): - """A bing custom search configuration. - - :ivar project_connection_id: Project connection id for grounding with bing search. Required. - :vartype project_connection_id: str - :ivar instance_name: Name of the custom configuration instance given to config. Required. - :vartype instance_name: str - :ivar market: The market where the results come from. - :vartype market: str - :ivar set_lang: The language to use for user interface strings when calling Bing API. - :vartype set_lang: str - :ivar count: The number of search results to return in the bing api response. - :vartype count: int - :ivar freshness: Filter search results by a specific time range. Accepted values: - `https://learn.microsoft.com/bing/search-apis/bing-web-search/reference/query-parameters - `_. - :vartype freshness: str - """ - - project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Project connection id for grounding with bing search. Required.""" - instance_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Name of the custom configuration instance given to config. Required.""" - market: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The market where the results come from.""" - set_lang: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The language to use for user interface strings when calling Bing API.""" - count: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The number of search results to return in the bing api response.""" - freshness: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Filter search results by a specific time range. Accepted values: - `https://learn.microsoft.com/bing/search-apis/bing-web-search/reference/query-parameters - `_.""" - - @overload - def __init__( - self, - *, - project_connection_id: str, - instance_name: str, - market: Optional[str] = None, - set_lang: Optional[str] = None, - count: Optional[int] = None, - freshness: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class BingCustomSearchToolParameters(_Model): - """The bing custom search tool parameters. - - :ivar search_configurations: The project connections attached to this tool. There can be a - maximum of 1 connection - resource attached to the tool. Required. - :vartype search_configurations: list[~azure.ai.projects.models.BingCustomSearchConfiguration] - """ - - search_configurations: list["_models.BingCustomSearchConfiguration"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The project connections attached to this tool. There can be a maximum of 1 connection - resource attached to the tool. Required.""" - - @overload - def __init__( - self, - *, - search_configurations: list["_models.BingCustomSearchConfiguration"], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class BingGroundingAgentTool(Tool, discriminator="bing_grounding"): - """The input definition information for a bing grounding search tool as used to configure an - agent. - - :ivar type: The object type, which is always 'bing_grounding'. Required. - :vartype type: str or ~azure.ai.projects.models.BING_GROUNDING - :ivar bing_grounding: The bing grounding search tool parameters. Required. - :vartype bing_grounding: ~azure.ai.projects.models.BingGroundingSearchToolParameters - """ - - type: Literal[ToolType.BING_GROUNDING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'bing_grounding'. Required.""" - bing_grounding: "_models.BingGroundingSearchToolParameters" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The bing grounding search tool parameters. Required.""" - - @overload - def __init__( - self, - *, - bing_grounding: "_models.BingGroundingSearchToolParameters", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolType.BING_GROUNDING # type: ignore - - -class BingGroundingSearchConfiguration(_Model): - """Search configuration for Bing Grounding. - - :ivar project_connection_id: Project connection id for grounding with bing search. Required. - :vartype project_connection_id: str - :ivar market: The market where the results come from. - :vartype market: str - :ivar set_lang: The language to use for user interface strings when calling Bing API. - :vartype set_lang: str - :ivar count: The number of search results to return in the bing api response. - :vartype count: int - :ivar freshness: Filter search results by a specific time range. Accepted values: - `https://learn.microsoft.com/bing/search-apis/bing-web-search/reference/query-parameters - `_. - :vartype freshness: str - """ - - project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Project connection id for grounding with bing search. Required.""" - market: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The market where the results come from.""" - set_lang: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The language to use for user interface strings when calling Bing API.""" - count: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The number of search results to return in the bing api response.""" - freshness: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Filter search results by a specific time range. Accepted values: - `https://learn.microsoft.com/bing/search-apis/bing-web-search/reference/query-parameters - `_.""" - - @overload - def __init__( - self, - *, - project_connection_id: str, - market: Optional[str] = None, - set_lang: Optional[str] = None, - count: Optional[int] = None, - freshness: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class BingGroundingSearchToolParameters(_Model): - """The bing grounding search tool parameters. - - :ivar project_connections: The project connections attached to this tool. There can be a - maximum of 1 connection - resource attached to the tool. Required. - :vartype project_connections: ~azure.ai.projects.models.ToolProjectConnectionList - :ivar search_configurations: The search configurations attached to this tool. There can be a - maximum of 1 - search configuration resource attached to the tool. Required. - :vartype search_configurations: - list[~azure.ai.projects.models.BingGroundingSearchConfiguration] - """ - - project_connections: "_models.ToolProjectConnectionList" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The project connections attached to this tool. There can be a maximum of 1 connection - resource attached to the tool. Required.""" - search_configurations: list["_models.BingGroundingSearchConfiguration"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The search configurations attached to this tool. There can be a maximum of 1 - search configuration resource attached to the tool. Required.""" - - @overload - def __init__( - self, - *, - project_connections: "_models.ToolProjectConnectionList", - search_configurations: list["_models.BingGroundingSearchConfiguration"], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class BlobReference(_Model): - """Blob reference details. - - :ivar blob_uri: Blob URI path for client to upload data. Example: - `https://blob.windows.core.net/Container/Path `_. - Required. - :vartype blob_uri: str - :ivar storage_account_arm_id: ARM ID of the storage account to use. Required. - :vartype storage_account_arm_id: str - :ivar credential: Credential info to access the storage account. Required. - :vartype credential: ~azure.ai.projects.models.BlobReferenceSasCredential - """ - - blob_uri: str = rest_field(name="blobUri", visibility=["read", "create", "update", "delete", "query"]) - """Blob URI path for client to upload data. Example: `https://blob.windows.core.net/Container/Path - `_. Required.""" - storage_account_arm_id: str = rest_field( - name="storageAccountArmId", visibility=["read", "create", "update", "delete", "query"] - ) - """ARM ID of the storage account to use. Required.""" - credential: "_models.BlobReferenceSasCredential" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Credential info to access the storage account. Required.""" - - @overload - def __init__( - self, - *, - blob_uri: str, - storage_account_arm_id: str, - credential: "_models.BlobReferenceSasCredential", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class BlobReferenceSasCredential(_Model): - """SAS Credential definition. - - :ivar sas_uri: SAS uri. Required. - :vartype sas_uri: str - :ivar type: Type of credential. Required. Default value is "SAS". - :vartype type: str - """ - - sas_uri: str = rest_field(name="sasUri", visibility=["read"]) - """SAS uri. Required.""" - type: Literal["SAS"] = rest_field(visibility=["read"]) - """Type of credential. Required. Default value is \"SAS\".""" - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type: Literal["SAS"] = "SAS" - - -class BrowserAutomationAgentTool(Tool, discriminator="browser_automation_preview"): - """The input definition information for a Browser Automation Tool, as used to configure an Agent. - - :ivar type: The object type, which is always 'browser_automation'. Required. - :vartype type: str or ~azure.ai.projects.models.BROWSER_AUTOMATION_PREVIEW - :ivar browser_automation_preview: The Browser Automation Tool parameters. Required. - :vartype browser_automation_preview: ~azure.ai.projects.models.BrowserAutomationToolParameters - """ - - type: Literal[ToolType.BROWSER_AUTOMATION_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'browser_automation'. Required.""" - browser_automation_preview: "_models.BrowserAutomationToolParameters" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The Browser Automation Tool parameters. Required.""" - - @overload - def __init__( - self, - *, - browser_automation_preview: "_models.BrowserAutomationToolParameters", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolType.BROWSER_AUTOMATION_PREVIEW # type: ignore - - -class BrowserAutomationToolConnectionParameters(_Model): # pylint: disable=name-too-long - """Definition of input parameters for the connection used by the Browser Automation Tool. - - :ivar id: The ID of the project connection to your Azure Playwright resource. Required. - :vartype id: str - """ - - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the project connection to your Azure Playwright resource. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class BrowserAutomationToolParameters(_Model): - """Definition of input parameters for the Browser Automation Tool. - - :ivar project_connection: The project connection parameters associated with the Browser - Automation Tool. Required. - :vartype project_connection: - ~azure.ai.projects.models.BrowserAutomationToolConnectionParameters - """ - - project_connection: "_models.BrowserAutomationToolConnectionParameters" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The project connection parameters associated with the Browser Automation Tool. Required.""" - - @overload - def __init__( - self, - *, - project_connection: "_models.BrowserAutomationToolConnectionParameters", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class CaptureStructuredOutputsTool(Tool, discriminator="capture_structured_outputs"): - """A tool for capturing structured outputs. - - :ivar type: The type of the tool. Always ``capture_structured_outputs``. Required. - :vartype type: str or ~azure.ai.projects.models.CAPTURE_STRUCTURED_OUTPUTS - :ivar outputs: The structured outputs to capture from the model. Required. - :vartype outputs: ~azure.ai.projects.models.StructuredOutputDefinition - """ - - type: Literal[ToolType.CAPTURE_STRUCTURED_OUTPUTS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the tool. Always ``capture_structured_outputs``. Required.""" - outputs: "_models.StructuredOutputDefinition" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The structured outputs to capture from the model. Required.""" - - @overload - def __init__( - self, - *, - outputs: "_models.StructuredOutputDefinition", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolType.CAPTURE_STRUCTURED_OUTPUTS # type: ignore - - -class ChartCoordinate(_Model): - """Coordinates for the analysis chart. - - :ivar x: X-axis coordinate. Required. - :vartype x: int - :ivar y: Y-axis coordinate. Required. - :vartype y: int - :ivar size: Size of the chart element. Required. - :vartype size: int - """ - - x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """X-axis coordinate. Required.""" - y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Y-axis coordinate. Required.""" - size: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Size of the chart element. Required.""" - - @overload - def __init__( - self, - *, - x: int, - y: int, - size: int, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class MemoryItem(_Model): - """A single memory item stored in the memory store, containing content and metadata. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ChatSummaryMemoryItem, UserProfileMemoryItem - - :ivar memory_id: The unique ID of the memory item. Required. - :vartype memory_id: str - :ivar updated_at: The last update time of the memory item. Required. - :vartype updated_at: ~datetime.datetime - :ivar scope: The namespace that logically groups and isolates memories, such as a user ID. - Required. - :vartype scope: str - :ivar content: The content of the memory. Required. - :vartype content: str - :ivar kind: The kind of the memory item. Required. Known values are: "user_profile" and - "chat_summary". - :vartype kind: str or ~azure.ai.projects.models.MemoryItemKind - """ - - __mapping__: dict[str, _Model] = {} - memory_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the memory item. Required.""" - updated_at: datetime.datetime = rest_field( - visibility=["read", "create", "update", "delete", "query"], format="unix-timestamp" - ) - """The last update time of the memory item. Required.""" - scope: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The namespace that logically groups and isolates memories, such as a user ID. Required.""" - content: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The content of the memory. Required.""" - kind: str = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) - """The kind of the memory item. Required. Known values are: \"user_profile\" and \"chat_summary\".""" - - @overload - def __init__( - self, - *, - memory_id: str, - updated_at: datetime.datetime, - scope: str, - content: str, - kind: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ChatSummaryMemoryItem(MemoryItem, discriminator="chat_summary"): - """A memory item containing a summary extracted from conversations. - - :ivar memory_id: The unique ID of the memory item. Required. - :vartype memory_id: str - :ivar updated_at: The last update time of the memory item. Required. - :vartype updated_at: ~datetime.datetime - :ivar scope: The namespace that logically groups and isolates memories, such as a user ID. - Required. - :vartype scope: str - :ivar content: The content of the memory. Required. - :vartype content: str - :ivar kind: The kind of the memory item. Required. Summary of chat conversations. - :vartype kind: str or ~azure.ai.projects.models.CHAT_SUMMARY - """ - - kind: Literal[MemoryItemKind.CHAT_SUMMARY] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The kind of the memory item. Required. Summary of chat conversations.""" - - @overload - def __init__( - self, - *, - memory_id: str, - updated_at: datetime.datetime, - scope: str, - content: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.kind = MemoryItemKind.CHAT_SUMMARY # type: ignore - - -class ClusterInsightResult(_Model): - """Insights from the cluster analysis. - - :ivar summary: Summary of the insights report. Required. - :vartype summary: ~azure.ai.projects.models.InsightSummary - :ivar clusters: List of clusters identified in the insights. Required. - :vartype clusters: list[~azure.ai.projects.models.InsightCluster] - :ivar coordinates: Optional mapping of IDs to 2D coordinates used by the UX for - visualization. - The map keys are string identifiers (for example, a cluster id or a sample id) - and the values are the coordinates and visual size for rendering on a 2D chart. - This property is omitted unless the client requests coordinates (for example, - by passing ``includeCoordinates=true`` as a query parameter). - Example: - { - "cluster-1": { "x": 12, "y": 34, "size": 8 }, - "sample-123": { "x": 18, "y": 22, "size": 4 } - } - Coordinates are intended only for client-side visualization and do not - modify the canonical insights results. - :vartype coordinates: dict[str, ~azure.ai.projects.models.ChartCoordinate] - """ - - summary: "_models.InsightSummary" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Summary of the insights report. Required.""" - clusters: list["_models.InsightCluster"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """List of clusters identified in the insights. Required.""" - coordinates: Optional[dict[str, "_models.ChartCoordinate"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """ Optional mapping of IDs to 2D coordinates used by the UX for visualization. - The map keys are string identifiers (for example, a cluster id or a sample id) - and the values are the coordinates and visual size for rendering on a 2D chart. - This property is omitted unless the client requests coordinates (for example, - by passing ``includeCoordinates=true`` as a query parameter). - Example: - { - \"cluster-1\": { \"x\": 12, \"y\": 34, \"size\": 8 }, - \"sample-123\": { \"x\": 18, \"y\": 22, \"size\": 4 } - } - Coordinates are intended only for client-side visualization and do not - modify the canonical insights results.""" - - @overload - def __init__( - self, - *, - summary: "_models.InsightSummary", - clusters: list["_models.InsightCluster"], - coordinates: Optional[dict[str, "_models.ChartCoordinate"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ClusterTokenUsage(_Model): - """Token usage for cluster analysis. - - :ivar input_token_usage: input token usage. Required. - :vartype input_token_usage: int - :ivar output_token_usage: output token usage. Required. - :vartype output_token_usage: int - :ivar total_token_usage: total token usage. Required. - :vartype total_token_usage: int - """ - - input_token_usage: int = rest_field( - name="inputTokenUsage", visibility=["read", "create", "update", "delete", "query"] - ) - """input token usage. Required.""" - output_token_usage: int = rest_field( - name="outputTokenUsage", visibility=["read", "create", "update", "delete", "query"] - ) - """output token usage. Required.""" - total_token_usage: int = rest_field( - name="totalTokenUsage", visibility=["read", "create", "update", "delete", "query"] - ) - """total token usage. Required.""" - - @overload - def __init__( - self, - *, - input_token_usage: int, - output_token_usage: int, - total_token_usage: int, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class EvaluatorDefinition(_Model): - """Base evaluator configuration with discriminator. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - CodeBasedEvaluatorDefinition, PromptBasedEvaluatorDefinition - - :ivar type: The type of evaluator definition. Required. Known values are: "prompt", "code", - "prompt_and_code", "service", and "openai_graders". - :vartype type: str or ~azure.ai.projects.models.EvaluatorDefinitionType - :ivar init_parameters: The JSON schema (Draft 2020-12) for the evaluator's input parameters. - This includes parameters like type, properties, required. - :vartype init_parameters: any - :ivar data_schema: The JSON schema (Draft 2020-12) for the evaluator's input data. This - includes parameters like type, properties, required. - :vartype data_schema: any - :ivar metrics: List of output metrics produced by this evaluator. - :vartype metrics: dict[str, ~azure.ai.projects.models.EvaluatorMetric] - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """The type of evaluator definition. Required. Known values are: \"prompt\", \"code\", - \"prompt_and_code\", \"service\", and \"openai_graders\".""" - init_parameters: Optional[Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The JSON schema (Draft 2020-12) for the evaluator's input parameters. This includes parameters - like type, properties, required.""" - data_schema: Optional[Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The JSON schema (Draft 2020-12) for the evaluator's input data. This includes parameters like - type, properties, required.""" - metrics: Optional[dict[str, "_models.EvaluatorMetric"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """List of output metrics produced by this evaluator.""" - - @overload - def __init__( - self, - *, - type: str, - init_parameters: Optional[Any] = None, - data_schema: Optional[Any] = None, - metrics: Optional[dict[str, "_models.EvaluatorMetric"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class CodeBasedEvaluatorDefinition(EvaluatorDefinition, discriminator="code"): - """Code-based evaluator definition using python code. - - :ivar init_parameters: The JSON schema (Draft 2020-12) for the evaluator's input parameters. - This includes parameters like type, properties, required. - :vartype init_parameters: any - :ivar data_schema: The JSON schema (Draft 2020-12) for the evaluator's input data. This - includes parameters like type, properties, required. - :vartype data_schema: any - :ivar metrics: List of output metrics produced by this evaluator. - :vartype metrics: dict[str, ~azure.ai.projects.models.EvaluatorMetric] - :ivar type: Required. Code-based definition - :vartype type: str or ~azure.ai.projects.models.CODE - :ivar code_text: Inline code text for the evaluator. Required. - :vartype code_text: str - """ - - type: Literal[EvaluatorDefinitionType.CODE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required. Code-based definition""" - code_text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Inline code text for the evaluator. Required.""" - - @overload - def __init__( - self, - *, - code_text: str, - init_parameters: Optional[Any] = None, - data_schema: Optional[Any] = None, - metrics: Optional[dict[str, "_models.EvaluatorMetric"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = EvaluatorDefinitionType.CODE # type: ignore - - -class CodeInterpreterOutput(_Model): - """CodeInterpreterOutput. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - CodeInterpreterOutputImage, CodeInterpreterOutputLogs - - :ivar type: Required. Known values are: "logs" and "image". - :vartype type: str or ~azure.ai.projects.models.CodeInterpreterOutputType - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. Known values are: \"logs\" and \"image\".""" - - @overload - def __init__( - self, - *, - type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class CodeInterpreterOutputImage(CodeInterpreterOutput, discriminator="image"): - """The image output from the code interpreter. - - :ivar type: The type of the output. Always 'image'. Required. - :vartype type: str or ~azure.ai.projects.models.IMAGE - :ivar url: The URL of the image output from the code interpreter. Required. - :vartype url: str - """ - - type: Literal[CodeInterpreterOutputType.IMAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the output. Always 'image'. Required.""" - url: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The URL of the image output from the code interpreter. Required.""" - - @overload - def __init__( - self, - *, - url: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = CodeInterpreterOutputType.IMAGE # type: ignore - - -class CodeInterpreterOutputLogs(CodeInterpreterOutput, discriminator="logs"): - """The logs output from the code interpreter. - - :ivar type: The type of the output. Always 'logs'. Required. - :vartype type: str or ~azure.ai.projects.models.LOGS - :ivar logs: The logs output from the code interpreter. Required. - :vartype logs: str - """ - - type: Literal[CodeInterpreterOutputType.LOGS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the output. Always 'logs'. Required.""" - logs: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The logs output from the code interpreter. Required.""" - - @overload - def __init__( - self, - *, - logs: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = CodeInterpreterOutputType.LOGS # type: ignore - - -class CodeInterpreterTool(Tool, discriminator="code_interpreter"): - """A tool that runs Python code to help generate a response to a prompt. - - :ivar type: The type of the code interpreter tool. Always ``code_interpreter``. Required. - :vartype type: str or ~azure.ai.projects.models.CODE_INTERPRETER - :ivar container: The code interpreter container. Can be a container ID or an object that - specifies uploaded file IDs to make available to your code. Required. Is either a str type or a - CodeInterpreterToolAuto type. - :vartype container: str or ~azure.ai.projects.models.CodeInterpreterToolAuto - """ - - type: Literal[ToolType.CODE_INTERPRETER] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the code interpreter tool. Always ``code_interpreter``. Required.""" - container: Union[str, "_models.CodeInterpreterToolAuto"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The code interpreter container. Can be a container ID or an object that - specifies uploaded file IDs to make available to your code. Required. Is either a str type or a - CodeInterpreterToolAuto type.""" - - @overload - def __init__( - self, - *, - container: Union[str, "_models.CodeInterpreterToolAuto"], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolType.CODE_INTERPRETER # type: ignore - - -class CodeInterpreterToolAuto(_Model): - """Configuration for a code interpreter container. Optionally specify the IDs - of the files to run the code on. - - :ivar type: Always ``auto``. Required. Default value is "auto". - :vartype type: str - :ivar file_ids: An optional list of uploaded files to make available to your code. - :vartype file_ids: list[str] - """ - - type: Literal["auto"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Always ``auto``. Required. Default value is \"auto\".""" - file_ids: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An optional list of uploaded files to make available to your code.""" - - @overload - def __init__( - self, - *, - file_ids: Optional[list[str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type: Literal["auto"] = "auto" - - -class ItemParam(_Model): - """Content item used to generate a response. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - CodeInterpreterToolCallItemParam, ComputerToolCallItemParam, ComputerToolCallOutputItemParam, - FileSearchToolCallItemParam, FunctionToolCallItemParam, FunctionToolCallOutputItemParam, - ImageGenToolCallItemParam, ItemReferenceItemParam, LocalShellToolCallItemParam, - LocalShellToolCallOutputItemParam, MCPApprovalRequestItemParam, MCPApprovalResponseItemParam, - MCPCallItemParam, MCPListToolsItemParam, MemorySearchToolCallItemParam, - ResponsesMessageItemParam, ReasoningItemParam, WebSearchToolCallItemParam - - :ivar type: Required. Known values are: "message", "file_search_call", "function_call", - "function_call_output", "computer_call", "computer_call_output", "web_search_call", - "reasoning", "item_reference", "image_generation_call", "code_interpreter_call", - "local_shell_call", "local_shell_call_output", "mcp_list_tools", "mcp_approval_request", - "mcp_approval_response", "mcp_call", "structured_outputs", "workflow_action", - "memory_search_call", and "oauth_consent_request". - :vartype type: str or ~azure.ai.projects.models.ItemType - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. Known values are: \"message\", \"file_search_call\", \"function_call\", - \"function_call_output\", \"computer_call\", \"computer_call_output\", \"web_search_call\", - \"reasoning\", \"item_reference\", \"image_generation_call\", \"code_interpreter_call\", - \"local_shell_call\", \"local_shell_call_output\", \"mcp_list_tools\", - \"mcp_approval_request\", \"mcp_approval_response\", \"mcp_call\", \"structured_outputs\", - \"workflow_action\", \"memory_search_call\", and \"oauth_consent_request\".""" - - @overload - def __init__( - self, - *, - type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class CodeInterpreterToolCallItemParam(ItemParam, discriminator="code_interpreter_call"): - """A tool call to run code. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.CODE_INTERPRETER_CALL - :ivar container_id: The ID of the container used to run the code. Required. - :vartype container_id: str - :ivar code: The code to run, or null if not available. Required. - :vartype code: str - :ivar outputs: The outputs generated by the code interpreter, such as logs or images. - Can be null if no outputs are available. Required. - :vartype outputs: list[~azure.ai.projects.models.CodeInterpreterOutput] - """ - - type: Literal[ItemType.CODE_INTERPRETER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - container_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the container used to run the code. Required.""" - code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The code to run, or null if not available. Required.""" - outputs: list["_models.CodeInterpreterOutput"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The outputs generated by the code interpreter, such as logs or images. - Can be null if no outputs are available. Required.""" - - @overload - def __init__( - self, - *, - container_id: str, - code: str, - outputs: list["_models.CodeInterpreterOutput"], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.CODE_INTERPRETER_CALL # type: ignore - - -class ItemResource(_Model): - """Content item used to generate a response. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - CodeInterpreterToolCallItemResource, ComputerToolCallItemResource, - ComputerToolCallOutputItemResource, FileSearchToolCallItemResource, - FunctionToolCallItemResource, FunctionToolCallOutputItemResource, ImageGenToolCallItemResource, - LocalShellToolCallItemResource, LocalShellToolCallOutputItemResource, - MCPApprovalRequestItemResource, MCPApprovalResponseItemResource, MCPCallItemResource, - MCPListToolsItemResource, MemorySearchToolCallItemResource, ResponsesMessageItemResource, - OAuthConsentRequestItemResource, ReasoningItemResource, StructuredOutputsItemResource, - WebSearchToolCallItemResource, WorkflowActionOutputItemResource - - :ivar type: Required. Known values are: "message", "file_search_call", "function_call", - "function_call_output", "computer_call", "computer_call_output", "web_search_call", - "reasoning", "item_reference", "image_generation_call", "code_interpreter_call", - "local_shell_call", "local_shell_call_output", "mcp_list_tools", "mcp_approval_request", - "mcp_approval_response", "mcp_call", "structured_outputs", "workflow_action", - "memory_search_call", and "oauth_consent_request". - :vartype type: str or ~azure.ai.projects.models.ItemType - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. Known values are: \"message\", \"file_search_call\", \"function_call\", - \"function_call_output\", \"computer_call\", \"computer_call_output\", \"web_search_call\", - \"reasoning\", \"item_reference\", \"image_generation_call\", \"code_interpreter_call\", - \"local_shell_call\", \"local_shell_call_output\", \"mcp_list_tools\", - \"mcp_approval_request\", \"mcp_approval_response\", \"mcp_call\", \"structured_outputs\", - \"workflow_action\", \"memory_search_call\", and \"oauth_consent_request\".""" - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - created_by: Optional["_models.CreatedBy"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The information about the creator of the item.""" - - @overload - def __init__( - self, - *, - type: str, - id: str, # pylint: disable=redefined-builtin - created_by: Optional["_models.CreatedBy"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class CodeInterpreterToolCallItemResource(ItemResource, discriminator="code_interpreter_call"): - """A tool call to run code. - - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.CODE_INTERPRETER_CALL - :ivar status: Required. Is one of the following types: Literal["in_progress"], - Literal["completed"], Literal["incomplete"], Literal["interpreting"], Literal["failed"] - :vartype status: str or str or str or str or str - :ivar container_id: The ID of the container used to run the code. Required. - :vartype container_id: str - :ivar code: The code to run, or null if not available. Required. - :vartype code: str - :ivar outputs: The outputs generated by the code interpreter, such as logs or images. - Can be null if no outputs are available. Required. - :vartype outputs: list[~azure.ai.projects.models.CodeInterpreterOutput] - """ - - type: Literal[ItemType.CODE_INTERPRETER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - status: Literal["in_progress", "completed", "incomplete", "interpreting", "failed"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Required. Is one of the following types: Literal[\"in_progress\"], Literal[\"completed\"], - Literal[\"incomplete\"], Literal[\"interpreting\"], Literal[\"failed\"]""" - container_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the container used to run the code. Required.""" - code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The code to run, or null if not available. Required.""" - outputs: list["_models.CodeInterpreterOutput"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The outputs generated by the code interpreter, such as logs or images. - Can be null if no outputs are available. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "completed", "incomplete", "interpreting", "failed"], - container_id: str, - code: str, - outputs: list["_models.CodeInterpreterOutput"], - created_by: Optional["_models.CreatedBy"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.CODE_INTERPRETER_CALL # type: ignore - - -class ComparisonFilter(_Model): - """A filter used to compare a specified attribute key to a given value using a defined comparison - operation. - - :ivar type: Specifies the comparison operator: ``eq``, ``ne``, ``gt``, ``gte``, ``lt``, - ``lte``. - * `eq`: equals - * `ne`: not equal - * `gt`: greater than - * `gte`: greater than or equal - * `lt`: less than - * `lte`: less than or equal. Required. Is one of the following types: Literal["eq"], - Literal["ne"], Literal["gt"], Literal["gte"], Literal["lt"], Literal["lte"] - :vartype type: str or str or str or str or str or str - :ivar key: The key to compare against the value. Required. - :vartype key: str - :ivar value: The value to compare against the attribute key; supports string, number, or - boolean types. Required. Is one of the following types: str, float, bool - :vartype value: str or float or bool - """ - - type: Literal["eq", "ne", "gt", "gte", "lt", "lte"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Specifies the comparison operator: ``eq``, ``ne``, ``gt``, ``gte``, ``lt``, ``lte``. - * `eq`: equals - * `ne`: not equal - * `gt`: greater than - * `gte`: greater than or equal - * `lt`: less than - * `lte`: less than or equal. Required. Is one of the following types: Literal[\"eq\"], - Literal[\"ne\"], Literal[\"gt\"], Literal[\"gte\"], Literal[\"lt\"], Literal[\"lte\"]""" - key: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The key to compare against the value. Required.""" - value: Union[str, float, bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The value to compare against the attribute key; supports string, number, or boolean types. - Required. Is one of the following types: str, float, bool""" - - @overload - def __init__( - self, - *, - type: Literal["eq", "ne", "gt", "gte", "lt", "lte"], - key: str, - value: Union[str, float, bool], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class CompoundFilter(_Model): - """Combine multiple filters using ``and`` or ``or``. - - :ivar type: Type of operation: ``and`` or ``or``. Required. Is either a Literal["and"] type or - a Literal["or"] type. - :vartype type: str or str - :ivar filters: Array of filters to combine. Items can be ``ComparisonFilter`` or - ``CompoundFilter``. Required. - :vartype filters: list[~azure.ai.projects.models.ComparisonFilter or - ~azure.ai.projects.models.CompoundFilter] - """ - - type: Literal["and", "or"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Type of operation: ``and`` or ``or``. Required. Is either a Literal[\"and\"] type or a - Literal[\"or\"] type.""" - filters: list[Union["_models.ComparisonFilter", "_models.CompoundFilter"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Array of filters to combine. Items can be ``ComparisonFilter`` or ``CompoundFilter``. Required.""" - - @overload - def __init__( - self, - *, - type: Literal["and", "or"], - filters: list[Union["_models.ComparisonFilter", "_models.CompoundFilter"]], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ComputerAction(_Model): - """ComputerAction. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ComputerActionClick, ComputerActionDoubleClick, ComputerActionDrag, ComputerActionKeyPress, - ComputerActionMove, ComputerActionScreenshot, ComputerActionScroll, ComputerActionTypeKeys, - ComputerActionWait - - :ivar type: Required. Known values are: "screenshot", "click", "double_click", "scroll", - "type", "wait", "keypress", "drag", and "move". - :vartype type: str or ~azure.ai.projects.models.ComputerActionType - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. Known values are: \"screenshot\", \"click\", \"double_click\", \"scroll\", \"type\", - \"wait\", \"keypress\", \"drag\", and \"move\".""" - - @overload - def __init__( - self, - *, - type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ComputerActionClick(ComputerAction, discriminator="click"): - """A click action. - - :ivar type: Specifies the event type. For a click action, this property is - always set to ``click``. Required. - :vartype type: str or ~azure.ai.projects.models.CLICK - :ivar button: Indicates which mouse button was pressed during the click. One of ``left``, - ``right``, ``wheel``, ``back``, or ``forward``. Required. Is one of the following types: - Literal["left"], Literal["right"], Literal["wheel"], Literal["back"], Literal["forward"] - :vartype button: str or str or str or str or str - :ivar x: The x-coordinate where the click occurred. Required. - :vartype x: int - :ivar y: The y-coordinate where the click occurred. Required. - :vartype y: int - """ - - type: Literal[ComputerActionType.CLICK] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Specifies the event type. For a click action, this property is - always set to ``click``. Required.""" - button: Literal["left", "right", "wheel", "back", "forward"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Indicates which mouse button was pressed during the click. One of ``left``, ``right``, - ``wheel``, ``back``, or ``forward``. Required. Is one of the following types: - Literal[\"left\"], Literal[\"right\"], Literal[\"wheel\"], Literal[\"back\"], - Literal[\"forward\"]""" - x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The x-coordinate where the click occurred. Required.""" - y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The y-coordinate where the click occurred. Required.""" - - @overload - def __init__( - self, - *, - button: Literal["left", "right", "wheel", "back", "forward"], - x: int, - y: int, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ComputerActionType.CLICK # type: ignore - - -class ComputerActionDoubleClick(ComputerAction, discriminator="double_click"): - """A double click action. - - :ivar type: Specifies the event type. For a double click action, this property is - always set to ``double_click``. Required. - :vartype type: str or ~azure.ai.projects.models.DOUBLE_CLICK - :ivar x: The x-coordinate where the double click occurred. Required. - :vartype x: int - :ivar y: The y-coordinate where the double click occurred. Required. - :vartype y: int - """ - - type: Literal[ComputerActionType.DOUBLE_CLICK] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Specifies the event type. For a double click action, this property is - always set to ``double_click``. Required.""" - x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The x-coordinate where the double click occurred. Required.""" - y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The y-coordinate where the double click occurred. Required.""" - - @overload - def __init__( - self, - *, - x: int, - y: int, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ComputerActionType.DOUBLE_CLICK # type: ignore - - -class ComputerActionDrag(ComputerAction, discriminator="drag"): - """A drag action. - - :ivar type: Specifies the event type. For a drag action, this property is - always set to ``drag``. Required. - :vartype type: str or ~azure.ai.projects.models.DRAG - :ivar path: An array of coordinates representing the path of the drag action. Coordinates will - appear as an array - of objects, eg - .. code-block:: - [ - { x: 100, y: 200 }, - { x: 200, y: 300 } - ]. Required. - :vartype path: list[~azure.ai.projects.models.Coordinate] - """ - - type: Literal[ComputerActionType.DRAG] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Specifies the event type. For a drag action, this property is - always set to ``drag``. Required.""" - path: list["_models.Coordinate"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An array of coordinates representing the path of the drag action. Coordinates will appear as an - array - of objects, eg - .. code-block:: - [ - { x: 100, y: 200 }, - { x: 200, y: 300 } - ]. Required.""" - - @overload - def __init__( - self, - *, - path: list["_models.Coordinate"], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ComputerActionType.DRAG # type: ignore - - -class ComputerActionKeyPress(ComputerAction, discriminator="keypress"): - """A collection of keypresses the model would like to perform. - - :ivar type: Specifies the event type. For a keypress action, this property is - always set to ``keypress``. Required. - :vartype type: str or ~azure.ai.projects.models.KEYPRESS - :ivar keys_property: The combination of keys the model is requesting to be pressed. This is an - array of strings, each representing a key. Required. - :vartype keys_property: list[str] - """ - - type: Literal[ComputerActionType.KEYPRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Specifies the event type. For a keypress action, this property is - always set to ``keypress``. Required.""" - keys_property: list[str] = rest_field(name="keys", visibility=["read", "create", "update", "delete", "query"]) - """The combination of keys the model is requesting to be pressed. This is an - array of strings, each representing a key. Required.""" - - @overload - def __init__( - self, - *, - keys_property: list[str], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ComputerActionType.KEYPRESS # type: ignore - - -class ComputerActionMove(ComputerAction, discriminator="move"): - """A mouse move action. - - :ivar type: Specifies the event type. For a move action, this property is - always set to ``move``. Required. - :vartype type: str or ~azure.ai.projects.models.MOVE - :ivar x: The x-coordinate to move to. Required. - :vartype x: int - :ivar y: The y-coordinate to move to. Required. - :vartype y: int - """ - - type: Literal[ComputerActionType.MOVE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Specifies the event type. For a move action, this property is - always set to ``move``. Required.""" - x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The x-coordinate to move to. Required.""" - y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The y-coordinate to move to. Required.""" - - @overload - def __init__( - self, - *, - x: int, - y: int, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ComputerActionType.MOVE # type: ignore - - -class ComputerActionScreenshot(ComputerAction, discriminator="screenshot"): - """A screenshot action. - - :ivar type: Specifies the event type. For a screenshot action, this property is - always set to ``screenshot``. Required. - :vartype type: str or ~azure.ai.projects.models.SCREENSHOT - """ - - type: Literal[ComputerActionType.SCREENSHOT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Specifies the event type. For a screenshot action, this property is - always set to ``screenshot``. Required.""" - - @overload - def __init__( - self, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ComputerActionType.SCREENSHOT # type: ignore - - -class ComputerActionScroll(ComputerAction, discriminator="scroll"): - """A scroll action. - - :ivar type: Specifies the event type. For a scroll action, this property is - always set to ``scroll``. Required. - :vartype type: str or ~azure.ai.projects.models.SCROLL - :ivar x: The x-coordinate where the scroll occurred. Required. - :vartype x: int - :ivar y: The y-coordinate where the scroll occurred. Required. - :vartype y: int - :ivar scroll_x: The horizontal scroll distance. Required. - :vartype scroll_x: int - :ivar scroll_y: The vertical scroll distance. Required. - :vartype scroll_y: int - """ - - type: Literal[ComputerActionType.SCROLL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Specifies the event type. For a scroll action, this property is - always set to ``scroll``. Required.""" - x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The x-coordinate where the scroll occurred. Required.""" - y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The y-coordinate where the scroll occurred. Required.""" - scroll_x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The horizontal scroll distance. Required.""" - scroll_y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The vertical scroll distance. Required.""" - - @overload - def __init__( - self, - *, - x: int, - y: int, - scroll_x: int, - scroll_y: int, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ComputerActionType.SCROLL # type: ignore - - -class ComputerActionTypeKeys(ComputerAction, discriminator="type"): - """An action to type in text. - - :ivar type: Specifies the event type. For a type action, this property is - always set to ``type``. Required. - :vartype type: str or ~azure.ai.projects.models.TYPE - :ivar text: The text to type. Required. - :vartype text: str - """ - - type: Literal[ComputerActionType.TYPE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Specifies the event type. For a type action, this property is - always set to ``type``. Required.""" - text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The text to type. Required.""" - - @overload - def __init__( - self, - *, - text: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ComputerActionType.TYPE # type: ignore - - -class ComputerActionWait(ComputerAction, discriminator="wait"): - """A wait action. - - :ivar type: Specifies the event type. For a wait action, this property is - always set to ``wait``. Required. - :vartype type: str or ~azure.ai.projects.models.WAIT - """ - - type: Literal[ComputerActionType.WAIT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Specifies the event type. For a wait action, this property is - always set to ``wait``. Required.""" - - @overload - def __init__( - self, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ComputerActionType.WAIT # type: ignore - - -class ComputerToolCallItemParam(ItemParam, discriminator="computer_call"): - """A tool call to a computer use tool. See the - `computer use guide `_ for more information. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.COMPUTER_CALL - :ivar call_id: An identifier used when responding to the tool call with output. Required. - :vartype call_id: str - :ivar action: Required. - :vartype action: ~azure.ai.projects.models.ComputerAction - :ivar pending_safety_checks: The pending safety checks for the computer call. Required. - :vartype pending_safety_checks: list[~azure.ai.projects.models.ComputerToolCallSafetyCheck] - """ - - type: Literal[ItemType.COMPUTER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An identifier used when responding to the tool call with output. Required.""" - action: "_models.ComputerAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - pending_safety_checks: list["_models.ComputerToolCallSafetyCheck"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The pending safety checks for the computer call. Required.""" - - @overload - def __init__( - self, - *, - call_id: str, - action: "_models.ComputerAction", - pending_safety_checks: list["_models.ComputerToolCallSafetyCheck"], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.COMPUTER_CALL # type: ignore - - -class ComputerToolCallItemResource(ItemResource, discriminator="computer_call"): - """A tool call to a computer use tool. See the - `computer use guide `_ for more information. - - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.COMPUTER_CALL - :ivar status: The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - :ivar call_id: An identifier used when responding to the tool call with output. Required. - :vartype call_id: str - :ivar action: Required. - :vartype action: ~azure.ai.projects.models.ComputerAction - :ivar pending_safety_checks: The pending safety checks for the computer call. Required. - :vartype pending_safety_checks: list[~azure.ai.projects.models.ComputerToolCallSafetyCheck] - """ - - type: Literal[ItemType.COMPUTER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - status: Literal["in_progress", "completed", "incomplete"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An identifier used when responding to the tool call with output. Required.""" - action: "_models.ComputerAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - pending_safety_checks: list["_models.ComputerToolCallSafetyCheck"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The pending safety checks for the computer call. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "completed", "incomplete"], - call_id: str, - action: "_models.ComputerAction", - pending_safety_checks: list["_models.ComputerToolCallSafetyCheck"], - created_by: Optional["_models.CreatedBy"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.COMPUTER_CALL # type: ignore - - -class ComputerToolCallOutputItemOutput(_Model): - """ComputerToolCallOutputItemOutput. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ComputerToolCallOutputItemOutputComputerScreenshot - - :ivar type: Required. "computer_screenshot" - :vartype type: str or ~azure.ai.projects.models.ComputerToolCallOutputItemOutputType - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. \"computer_screenshot\"""" - - @overload - def __init__( - self, - *, - type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ComputerToolCallOutputItemOutputComputerScreenshot( - ComputerToolCallOutputItemOutput, discriminator="computer_screenshot" -): # pylint: disable=name-too-long - """ComputerToolCallOutputItemOutputComputerScreenshot. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.SCREENSHOT - :ivar image_url: - :vartype image_url: str - :ivar file_id: - :vartype file_id: str - """ - - type: Literal[ComputerToolCallOutputItemOutputType.SCREENSHOT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - - @overload - def __init__( - self, - *, - image_url: Optional[str] = None, - file_id: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ComputerToolCallOutputItemOutputType.SCREENSHOT # type: ignore - - -class ComputerToolCallOutputItemParam(ItemParam, discriminator="computer_call_output"): - """The output of a computer tool call. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.COMPUTER_CALL_OUTPUT - :ivar call_id: The ID of the computer tool call that produced the output. Required. - :vartype call_id: str - :ivar acknowledged_safety_checks: The safety checks reported by the API that have been - acknowledged by the - developer. - :vartype acknowledged_safety_checks: - list[~azure.ai.projects.models.ComputerToolCallSafetyCheck] - :ivar output: Required. - :vartype output: ~azure.ai.projects.models.ComputerToolCallOutputItemOutput - """ - - type: Literal[ItemType.COMPUTER_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the computer tool call that produced the output. Required.""" - acknowledged_safety_checks: Optional[list["_models.ComputerToolCallSafetyCheck"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The safety checks reported by the API that have been acknowledged by the - developer.""" - output: "_models.ComputerToolCallOutputItemOutput" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Required.""" - - @overload - def __init__( - self, - *, - call_id: str, - output: "_models.ComputerToolCallOutputItemOutput", - acknowledged_safety_checks: Optional[list["_models.ComputerToolCallSafetyCheck"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.COMPUTER_CALL_OUTPUT # type: ignore - - -class ComputerToolCallOutputItemResource(ItemResource, discriminator="computer_call_output"): - """The output of a computer tool call. - - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.COMPUTER_CALL_OUTPUT - :ivar status: The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - :ivar call_id: The ID of the computer tool call that produced the output. Required. - :vartype call_id: str - :ivar acknowledged_safety_checks: The safety checks reported by the API that have been - acknowledged by the - developer. - :vartype acknowledged_safety_checks: - list[~azure.ai.projects.models.ComputerToolCallSafetyCheck] - :ivar output: Required. - :vartype output: ~azure.ai.projects.models.ComputerToolCallOutputItemOutput - """ - - type: Literal[ItemType.COMPUTER_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - status: Literal["in_progress", "completed", "incomplete"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the computer tool call that produced the output. Required.""" - acknowledged_safety_checks: Optional[list["_models.ComputerToolCallSafetyCheck"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The safety checks reported by the API that have been acknowledged by the - developer.""" - output: "_models.ComputerToolCallOutputItemOutput" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "completed", "incomplete"], - call_id: str, - output: "_models.ComputerToolCallOutputItemOutput", - created_by: Optional["_models.CreatedBy"] = None, - acknowledged_safety_checks: Optional[list["_models.ComputerToolCallSafetyCheck"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.COMPUTER_CALL_OUTPUT # type: ignore - - -class ComputerToolCallSafetyCheck(_Model): - """A pending safety check for the computer call. - - :ivar id: The ID of the pending safety check. Required. - :vartype id: str - :ivar code: The type of the pending safety check. Required. - :vartype code: str - :ivar message: Details about the pending safety check. Required. - :vartype message: str - """ - - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the pending safety check. Required.""" - code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The type of the pending safety check. Required.""" - message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Details about the pending safety check. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - code: str, - message: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ComputerUsePreviewTool(Tool, discriminator="computer_use_preview"): - """A tool that controls a virtual computer. Learn more about the `computer tool - `_. - - :ivar type: The type of the computer use tool. Always ``computer_use_preview``. Required. - :vartype type: str or ~azure.ai.projects.models.COMPUTER_USE_PREVIEW - :ivar environment: The type of computer environment to control. Required. Is one of the - following types: Literal["windows"], Literal["mac"], Literal["linux"], Literal["ubuntu"], - Literal["browser"] - :vartype environment: str or str or str or str or str - :ivar display_width: The width of the computer display. Required. - :vartype display_width: int - :ivar display_height: The height of the computer display. Required. - :vartype display_height: int - """ - - type: Literal[ToolType.COMPUTER_USE_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the computer use tool. Always ``computer_use_preview``. Required.""" - environment: Literal["windows", "mac", "linux", "ubuntu", "browser"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The type of computer environment to control. Required. Is one of the following types: - Literal[\"windows\"], Literal[\"mac\"], Literal[\"linux\"], Literal[\"ubuntu\"], - Literal[\"browser\"]""" - display_width: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The width of the computer display. Required.""" - display_height: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The height of the computer display. Required.""" - - @overload - def __init__( - self, - *, - environment: Literal["windows", "mac", "linux", "ubuntu", "browser"], - display_width: int, - display_height: int, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolType.COMPUTER_USE_PREVIEW # type: ignore - - -class Connection(_Model): - """Response from the list and get connections operations. - - :ivar name: The friendly name of the connection, provided by the user. Required. - :vartype name: str - :ivar id: A unique identifier for the connection, generated by the service. Required. - :vartype id: str - :ivar type: Category of the connection. Required. Known values are: "AzureOpenAI", "AzureBlob", - "AzureStorageAccount", "CognitiveSearch", "CosmosDB", "ApiKey", "AppConfig", "AppInsights", - "CustomKeys", and "RemoteTool". - :vartype type: str or ~azure.ai.projects.models.ConnectionType - :ivar target: The connection URL to be used for this service. Required. - :vartype target: str - :ivar is_default: Whether the connection is tagged as the default connection of its type. - Required. - :vartype is_default: bool - :ivar credentials: The credentials used by the connection. Required. - :vartype credentials: ~azure.ai.projects.models.BaseCredentials - :ivar metadata: Metadata of the connection. Required. - :vartype metadata: dict[str, str] - """ - - name: str = rest_field(visibility=["read"]) - """The friendly name of the connection, provided by the user. Required.""" - id: str = rest_field(visibility=["read"]) - """A unique identifier for the connection, generated by the service. Required.""" - type: Union[str, "_models.ConnectionType"] = rest_field(visibility=["read"]) - """Category of the connection. Required. Known values are: \"AzureOpenAI\", \"AzureBlob\", - \"AzureStorageAccount\", \"CognitiveSearch\", \"CosmosDB\", \"ApiKey\", \"AppConfig\", - \"AppInsights\", \"CustomKeys\", and \"RemoteTool\".""" - target: str = rest_field(visibility=["read"]) - """The connection URL to be used for this service. Required.""" - is_default: bool = rest_field(name="isDefault", visibility=["read"]) - """Whether the connection is tagged as the default connection of its type. Required.""" - credentials: "_models.BaseCredentials" = rest_field(visibility=["read"]) - """The credentials used by the connection. Required.""" - metadata: dict[str, str] = rest_field(visibility=["read"]) - """Metadata of the connection. Required.""" - - -class ContainerAppAgentDefinition(AgentDefinition, discriminator="container_app"): - """The container app agent definition. - - :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. - :vartype rai_config: ~azure.ai.projects.models.RaiConfig - :ivar kind: Required. - :vartype kind: str or ~azure.ai.projects.models.CONTAINER_APP - :ivar container_protocol_versions: The protocols that the agent supports for ingress - communication of the containers. Required. - :vartype container_protocol_versions: list[~azure.ai.projects.models.ProtocolVersionRecord] - :ivar container_app_resource_id: The resource ID of the Azure Container App that hosts this - agent. Not mutable across versions. Required. - :vartype container_app_resource_id: str - :ivar ingress_subdomain_suffix: The suffix to apply to the app subdomain when sending ingress - to the agent. This can be a label (e.g., '---current'), a specific revision (e.g., - '--0000001'), or empty to use the default endpoint for the container app. Required. - :vartype ingress_subdomain_suffix: str - """ - - kind: Literal[AgentKind.CONTAINER_APP] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - container_protocol_versions: list["_models.ProtocolVersionRecord"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The protocols that the agent supports for ingress communication of the containers. Required.""" - container_app_resource_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The resource ID of the Azure Container App that hosts this agent. Not mutable across versions. - Required.""" - ingress_subdomain_suffix: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The suffix to apply to the app subdomain when sending ingress to the agent. This can be a label - (e.g., '---current'), a specific revision (e.g., '--0000001'), or empty to use the default - endpoint for the container app. Required.""" - - @overload - def __init__( - self, - *, - container_protocol_versions: list["_models.ProtocolVersionRecord"], - container_app_resource_id: str, - ingress_subdomain_suffix: str, - rai_config: Optional["_models.RaiConfig"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.kind = AgentKind.CONTAINER_APP # type: ignore - - -class EvaluationRuleAction(_Model): - """Evaluation action model. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ContinuousEvaluationRuleAction, HumanEvaluationRuleAction - - :ivar type: Type of the evaluation action. Required. Known values are: "continuousEvaluation" - and "humanEvaluation". - :vartype type: str or ~azure.ai.projects.models.EvaluationRuleActionType - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Type of the evaluation action. Required. Known values are: \"continuousEvaluation\" and - \"humanEvaluation\".""" - - @overload - def __init__( - self, - *, - type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ContinuousEvaluationRuleAction(EvaluationRuleAction, discriminator="continuousEvaluation"): - """Evaluation rule action for continuous evaluation. - - :ivar type: Required. Continuous evaluation. - :vartype type: str or ~azure.ai.projects.models.CONTINUOUS_EVALUATION - :ivar eval_id: Eval Id to add continuous evaluation runs to. Required. - :vartype eval_id: str - :ivar max_hourly_runs: Maximum number of evaluation runs allowed per hour. - :vartype max_hourly_runs: int - """ - - type: Literal[EvaluationRuleActionType.CONTINUOUS_EVALUATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required. Continuous evaluation.""" - eval_id: str = rest_field(name="evalId", visibility=["read", "create", "update", "delete", "query"]) - """Eval Id to add continuous evaluation runs to. Required.""" - max_hourly_runs: Optional[int] = rest_field( - name="maxHourlyRuns", visibility=["read", "create", "update", "delete", "query"] - ) - """Maximum number of evaluation runs allowed per hour.""" - - @overload - def __init__( - self, - *, - eval_id: str, - max_hourly_runs: Optional[int] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = EvaluationRuleActionType.CONTINUOUS_EVALUATION # type: ignore - - -class Coordinate(_Model): - """An x/y coordinate pair, e.g. ``{ x: 100, y: 200 }``. - - :ivar x: The x-coordinate. Required. - :vartype x: int - :ivar y: The y-coordinate. Required. - :vartype y: int - """ - - x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The x-coordinate. Required.""" - y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The y-coordinate. Required.""" - - @overload - def __init__( - self, - *, - x: int, - y: int, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class CosmosDBIndex(Index, discriminator="CosmosDBNoSqlVectorStore"): - """CosmosDB Vector Store Index Definition. - - :ivar id: Asset ID, a unique identifier for the asset. - :vartype id: str - :ivar name: The name of the resource. Required. - :vartype name: str - :ivar version: The version of the resource. Required. - :vartype version: str - :ivar description: The asset description text. - :vartype description: str - :ivar tags: Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar type: Type of index. Required. CosmosDB - :vartype type: str or ~azure.ai.projects.models.COSMOS_DB - :ivar connection_name: Name of connection to CosmosDB. Required. - :vartype connection_name: str - :ivar database_name: Name of the CosmosDB Database. Required. - :vartype database_name: str - :ivar container_name: Name of CosmosDB Container. Required. - :vartype container_name: str - :ivar embedding_configuration: Embedding model configuration. Required. - :vartype embedding_configuration: ~azure.ai.projects.models.EmbeddingConfiguration - :ivar field_mapping: Field mapping configuration. Required. - :vartype field_mapping: ~azure.ai.projects.models.FieldMapping - """ - - type: Literal[IndexType.COSMOS_DB] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Type of index. Required. CosmosDB""" - connection_name: str = rest_field(name="connectionName", visibility=["create"]) - """Name of connection to CosmosDB. Required.""" - database_name: str = rest_field(name="databaseName", visibility=["create"]) - """Name of the CosmosDB Database. Required.""" - container_name: str = rest_field(name="containerName", visibility=["create"]) - """Name of CosmosDB Container. Required.""" - embedding_configuration: "_models.EmbeddingConfiguration" = rest_field( - name="embeddingConfiguration", visibility=["create"] - ) - """Embedding model configuration. Required.""" - field_mapping: "_models.FieldMapping" = rest_field(name="fieldMapping", visibility=["create"]) - """Field mapping configuration. Required.""" - - @overload - def __init__( - self, - *, - connection_name: str, - database_name: str, - container_name: str, - embedding_configuration: "_models.EmbeddingConfiguration", - field_mapping: "_models.FieldMapping", - description: Optional[str] = None, - tags: Optional[dict[str, str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = IndexType.COSMOS_DB # type: ignore - - -class CreatedBy(_Model): - """CreatedBy. - - :ivar agent: The agent that created the item. - :vartype agent: ~azure.ai.projects.models.AgentId - :ivar response_id: The response on which the item is created. - :vartype response_id: str - """ - - agent: Optional["_models.AgentId"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The agent that created the item.""" - response_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The response on which the item is created.""" - - @overload - def __init__( - self, - *, - agent: Optional["_models.AgentId"] = None, - response_id: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class Trigger(_Model): - """Base model for Trigger of the schedule. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - CronTrigger, OneTimeTrigger, RecurrenceTrigger - - :ivar type: Type of the trigger. Required. Known values are: "Cron", "Recurrence", and - "OneTime". - :vartype type: str or ~azure.ai.projects.models.TriggerType - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Type of the trigger. Required. Known values are: \"Cron\", \"Recurrence\", and \"OneTime\".""" - - @overload - def __init__( - self, - *, - type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class CronTrigger(Trigger, discriminator="Cron"): - """Cron based trigger. - - :ivar type: Required. Cron based trigger. - :vartype type: str or ~azure.ai.projects.models.CRON - :ivar expression: Cron expression that defines the schedule frequency. Required. - :vartype expression: str - :ivar time_zone: Time zone for the cron schedule. - :vartype time_zone: str - :ivar start_time: Start time for the cron schedule in ISO 8601 format. - :vartype start_time: str - :ivar end_time: End time for the cron schedule in ISO 8601 format. - :vartype end_time: str - """ - - type: Literal[TriggerType.CRON] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required. Cron based trigger.""" - expression: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Cron expression that defines the schedule frequency. Required.""" - time_zone: Optional[str] = rest_field(name="timeZone", visibility=["read", "create", "update", "delete", "query"]) - """Time zone for the cron schedule.""" - start_time: Optional[str] = rest_field(name="startTime", visibility=["read", "create", "update", "delete", "query"]) - """Start time for the cron schedule in ISO 8601 format.""" - end_time: Optional[str] = rest_field(name="endTime", visibility=["read", "create", "update", "delete", "query"]) - """End time for the cron schedule in ISO 8601 format.""" - - @overload - def __init__( - self, - *, - expression: str, - time_zone: Optional[str] = None, - start_time: Optional[str] = None, - end_time: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = TriggerType.CRON # type: ignore - - -class CustomCredential(BaseCredentials, discriminator="CustomKeys"): - """Custom credential definition. - - :ivar type: The credential type. Required. Custom credential - :vartype type: str or ~azure.ai.projects.models.CUSTOM - """ - - type: Literal[CredentialType.CUSTOM] = rest_discriminator(name="type", visibility=["read"]) # type: ignore - """The credential type. Required. Custom credential""" - - @overload - def __init__( - self, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = CredentialType.CUSTOM # type: ignore - - -class RecurrenceSchedule(_Model): - """Recurrence schedule model. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - DailyRecurrenceSchedule, HourlyRecurrenceSchedule, MonthlyRecurrenceSchedule, - WeeklyRecurrenceSchedule - - :ivar type: Recurrence type for the recurrence schedule. Required. Known values are: "Hourly", - "Daily", "Weekly", and "Monthly". - :vartype type: str or ~azure.ai.projects.models.RecurrenceType - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Recurrence type for the recurrence schedule. Required. Known values are: \"Hourly\", \"Daily\", - \"Weekly\", and \"Monthly\".""" - - @overload - def __init__( - self, - *, - type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class DailyRecurrenceSchedule(RecurrenceSchedule, discriminator="Daily"): - """Daily recurrence schedule. - - :ivar type: Daily recurrence type. Required. Daily recurrence pattern. - :vartype type: str or ~azure.ai.projects.models.DAILY - :ivar hours: Hours for the recurrence schedule. Required. - :vartype hours: list[int] - """ - - type: Literal[RecurrenceType.DAILY] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Daily recurrence type. Required. Daily recurrence pattern.""" - hours: list[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Hours for the recurrence schedule. Required.""" - - @overload - def __init__( - self, - *, - hours: list[int], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = RecurrenceType.DAILY # type: ignore - - -class DatasetCredential(_Model): - """Represents a reference to a blob for consumption. - - :ivar blob_reference: Credential info to access the storage account. Required. - :vartype blob_reference: ~azure.ai.projects.models.BlobReference - """ - - blob_reference: "_models.BlobReference" = rest_field( - name="blobReference", visibility=["read", "create", "update", "delete", "query"] - ) - """Credential info to access the storage account. Required.""" - - @overload - def __init__( - self, - *, - blob_reference: "_models.BlobReference", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class DatasetVersion(_Model): - """DatasetVersion Definition. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - FileDatasetVersion, FolderDatasetVersion - - :ivar data_uri: URI of the data. Example: `https://go.microsoft.com/fwlink/?linkid=2202330 - `_. Required. - :vartype data_uri: str - :ivar type: Dataset type. Required. Known values are: "uri_file" and "uri_folder". - :vartype type: str or ~azure.ai.projects.models.DatasetType - :ivar is_reference: Indicates if the dataset holds a reference to the storage, or the dataset - manages storage itself. If true, the underlying data will not be deleted when the dataset - version is deleted. - :vartype is_reference: bool - :ivar connection_name: The Azure Storage Account connection name. Required if - startPendingUploadVersion was not called before creating the Dataset. - :vartype connection_name: str - :ivar id: Asset ID, a unique identifier for the asset. - :vartype id: str - :ivar name: The name of the resource. Required. - :vartype name: str - :ivar version: The version of the resource. Required. - :vartype version: str - :ivar description: The asset description text. - :vartype description: str - :ivar tags: Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - """ - - __mapping__: dict[str, _Model] = {} - data_uri: str = rest_field(name="dataUri", visibility=["read", "create"]) - """URI of the data. Example: `https://go.microsoft.com/fwlink/?linkid=2202330 - `_. Required.""" - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Dataset type. Required. Known values are: \"uri_file\" and \"uri_folder\".""" - is_reference: Optional[bool] = rest_field(name="isReference", visibility=["read"]) - """Indicates if the dataset holds a reference to the storage, or the dataset manages storage - itself. If true, the underlying data will not be deleted when the dataset version is deleted.""" - connection_name: Optional[str] = rest_field(name="connectionName", visibility=["read", "create"]) - """The Azure Storage Account connection name. Required if startPendingUploadVersion was not called - before creating the Dataset.""" - id: Optional[str] = rest_field(visibility=["read"]) - """Asset ID, a unique identifier for the asset.""" - name: str = rest_field(visibility=["read"]) - """The name of the resource. Required.""" - version: str = rest_field(visibility=["read"]) - """The version of the resource. Required.""" - description: Optional[str] = rest_field(visibility=["create", "update"]) - """The asset description text.""" - tags: Optional[dict[str, str]] = rest_field(visibility=["create", "update"]) - """Tag dictionary. Tags can be added, removed, and updated.""" - - @overload - def __init__( - self, - *, - data_uri: str, - type: str, - connection_name: Optional[str] = None, - description: Optional[str] = None, - tags: Optional[dict[str, str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class DeleteAgentResponse(_Model): - """A deleted agent Object. - - :ivar object: The object type. Always 'agent.deleted'. Required. Default value is - "agent.deleted". - :vartype object: str - :ivar name: The name of the agent. Required. - :vartype name: str - :ivar deleted: Whether the agent was successfully deleted. Required. - :vartype deleted: bool - """ - - object: Literal["agent.deleted"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The object type. Always 'agent.deleted'. Required. Default value is \"agent.deleted\".""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the agent. Required.""" - deleted: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Whether the agent was successfully deleted. Required.""" - - @overload - def __init__( - self, - *, - name: str, - deleted: bool, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.object: Literal["agent.deleted"] = "agent.deleted" - - -class DeleteAgentVersionResponse(_Model): - """A deleted agent version Object. - - :ivar object: The object type. Always 'agent.deleted'. Required. Default value is - "agent.version.deleted". - :vartype object: str - :ivar name: The name of the agent. Required. - :vartype name: str - :ivar version: The version identifier of the agent. Required. - :vartype version: str - :ivar deleted: Whether the agent was successfully deleted. Required. - :vartype deleted: bool - """ - - object: Literal["agent.version.deleted"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The object type. Always 'agent.deleted'. Required. Default value is \"agent.version.deleted\".""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the agent. Required.""" - version: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The version identifier of the agent. Required.""" - deleted: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Whether the agent was successfully deleted. Required.""" - - @overload - def __init__( - self, - *, - name: str, - version: str, - deleted: bool, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.object: Literal["agent.version.deleted"] = "agent.version.deleted" - - -class DeleteMemoryStoreResponse(_Model): - """DeleteMemoryStoreResponse. - - :ivar object: The object type. Always 'memory_store.deleted'. Required. Default value is - "memory_store.deleted". - :vartype object: str - :ivar name: The name of the memory store. Required. - :vartype name: str - :ivar deleted: Whether the memory store was successfully deleted. Required. - :vartype deleted: bool - """ - - object: Literal["memory_store.deleted"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The object type. Always 'memory_store.deleted'. Required. Default value is - \"memory_store.deleted\".""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the memory store. Required.""" - deleted: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Whether the memory store was successfully deleted. Required.""" - - @overload - def __init__( - self, - *, - name: str, - deleted: bool, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.object: Literal["memory_store.deleted"] = "memory_store.deleted" - - -class Deployment(_Model): - """Model Deployment Definition. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ModelDeployment - - :ivar type: The type of the deployment. Required. "ModelDeployment" - :vartype type: str or ~azure.ai.projects.models.DeploymentType - :ivar name: Name of the deployment. Required. - :vartype name: str - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """The type of the deployment. Required. \"ModelDeployment\"""" - name: str = rest_field(visibility=["read"]) - """Name of the deployment. Required.""" - - @overload - def __init__( - self, - *, - type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class EmbeddingConfiguration(_Model): - """Embedding configuration class. - - :ivar model_deployment_name: Deployment name of embedding model. It can point to a model - deployment either in the parent AIServices or a connection. Required. - :vartype model_deployment_name: str - :ivar embedding_field: Embedding field. Required. - :vartype embedding_field: str - """ - - model_deployment_name: str = rest_field(name="modelDeploymentName", visibility=["create"]) - """Deployment name of embedding model. It can point to a model deployment either in the parent - AIServices or a connection. Required.""" - embedding_field: str = rest_field(name="embeddingField", visibility=["create"]) - """Embedding field. Required.""" - - @overload - def __init__( - self, - *, - model_deployment_name: str, - embedding_field: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class EntraIDCredentials(BaseCredentials, discriminator="AAD"): - """Entra ID credential definition. - - :ivar type: The credential type. Required. Entra ID credential (formerly known as AAD) - :vartype type: str or ~azure.ai.projects.models.ENTRA_ID - """ - - type: Literal[CredentialType.ENTRA_ID] = rest_discriminator(name="type", visibility=["read"]) # type: ignore - """The credential type. Required. Entra ID credential (formerly known as AAD)""" - - @overload - def __init__( - self, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = CredentialType.ENTRA_ID # type: ignore - - -class EvalCompareReport(InsightResult, discriminator="EvaluationComparison"): - """Insights from the evaluation comparison. - - :ivar type: The type of insights result. Required. Evaluation Comparison. - :vartype type: str or ~azure.ai.projects.models.EVALUATION_COMPARISON - :ivar comparisons: Comparison results for each treatment run against the baseline. Required. - :vartype comparisons: list[~azure.ai.projects.models.EvalRunResultComparison] - :ivar method: The statistical method used for comparison. Required. - :vartype method: str - """ - - type: Literal[InsightType.EVALUATION_COMPARISON] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of insights result. Required. Evaluation Comparison.""" - comparisons: list["_models.EvalRunResultComparison"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Comparison results for each treatment run against the baseline. Required.""" - method: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The statistical method used for comparison. Required.""" - - @overload - def __init__( - self, - *, - comparisons: list["_models.EvalRunResultComparison"], - method: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = InsightType.EVALUATION_COMPARISON # type: ignore - - -class EvalResult(_Model): - """Result of the evaluation. - - :ivar name: name of the check. Required. - :vartype name: str - :ivar type: type of the check. Required. - :vartype type: str - :ivar score: score. Required. - :vartype score: float - :ivar passed: indicates if the check passed or failed. Required. - :vartype passed: bool - """ - - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """name of the check. Required.""" - type: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """type of the check. Required.""" - score: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """score. Required.""" - passed: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """indicates if the check passed or failed. Required.""" - - @overload - def __init__( - self, - *, - name: str, - type: str, - score: float, - passed: bool, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class EvalRunResultCompareItem(_Model): - """Metric comparison for a treatment against the baseline. - - :ivar treatment_run_id: The treatment run ID. Required. - :vartype treatment_run_id: str - :ivar treatment_run_summary: Summary statistics of the treatment run. Required. - :vartype treatment_run_summary: ~azure.ai.projects.models.EvalRunResultSummary - :ivar delta_estimate: Estimated difference between treatment and baseline. Required. - :vartype delta_estimate: float - :ivar p_value: P-value for the treatment effect. Required. - :vartype p_value: float - :ivar treatment_effect: Type of treatment effect. Required. Known values are: "TooFewSamples", - "Inconclusive", "Changed", "Improved", and "Degraded". - :vartype treatment_effect: str or ~azure.ai.projects.models.TreatmentEffectType - """ - - treatment_run_id: str = rest_field( - name="treatmentRunId", visibility=["read", "create", "update", "delete", "query"] - ) - """The treatment run ID. Required.""" - treatment_run_summary: "_models.EvalRunResultSummary" = rest_field( - name="treatmentRunSummary", visibility=["read", "create", "update", "delete", "query"] - ) - """Summary statistics of the treatment run. Required.""" - delta_estimate: float = rest_field(name="deltaEstimate", visibility=["read", "create", "update", "delete", "query"]) - """Estimated difference between treatment and baseline. Required.""" - p_value: float = rest_field(name="pValue", visibility=["read", "create", "update", "delete", "query"]) - """P-value for the treatment effect. Required.""" - treatment_effect: Union[str, "_models.TreatmentEffectType"] = rest_field( - name="treatmentEffect", visibility=["read", "create", "update", "delete", "query"] - ) - """Type of treatment effect. Required. Known values are: \"TooFewSamples\", \"Inconclusive\", - \"Changed\", \"Improved\", and \"Degraded\".""" - - @overload - def __init__( - self, - *, - treatment_run_id: str, - treatment_run_summary: "_models.EvalRunResultSummary", - delta_estimate: float, - p_value: float, - treatment_effect: Union[str, "_models.TreatmentEffectType"], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class EvalRunResultComparison(_Model): - """Comparison results for treatment runs against the baseline. - - :ivar testing_criteria: Name of the testing criteria. Required. - :vartype testing_criteria: str - :ivar metric: Metric being evaluated. Required. - :vartype metric: str - :ivar evaluator: Name of the evaluator for this testing criteria. Required. - :vartype evaluator: str - :ivar baseline_run_summary: Summary statistics of the baseline run. Required. - :vartype baseline_run_summary: ~azure.ai.projects.models.EvalRunResultSummary - :ivar compare_items: List of comparison results for each treatment run. Required. - :vartype compare_items: list[~azure.ai.projects.models.EvalRunResultCompareItem] - """ - - testing_criteria: str = rest_field( - name="testingCriteria", visibility=["read", "create", "update", "delete", "query"] - ) - """Name of the testing criteria. Required.""" - metric: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Metric being evaluated. Required.""" - evaluator: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Name of the evaluator for this testing criteria. Required.""" - baseline_run_summary: "_models.EvalRunResultSummary" = rest_field( - name="baselineRunSummary", visibility=["read", "create", "update", "delete", "query"] - ) - """Summary statistics of the baseline run. Required.""" - compare_items: list["_models.EvalRunResultCompareItem"] = rest_field( - name="compareItems", visibility=["read", "create", "update", "delete", "query"] - ) - """List of comparison results for each treatment run. Required.""" - - @overload - def __init__( - self, - *, - testing_criteria: str, - metric: str, - evaluator: str, - baseline_run_summary: "_models.EvalRunResultSummary", - compare_items: list["_models.EvalRunResultCompareItem"], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class EvalRunResultSummary(_Model): - """Summary statistics of a metric in an evaluation run. - - :ivar run_id: The evaluation run ID. Required. - :vartype run_id: str - :ivar sample_count: Number of samples in the evaluation run. Required. - :vartype sample_count: int - :ivar average: Average value of the metric in the evaluation run. Required. - :vartype average: float - :ivar standard_deviation: Standard deviation of the metric in the evaluation run. Required. - :vartype standard_deviation: float - """ - - run_id: str = rest_field(name="runId", visibility=["read", "create", "update", "delete", "query"]) - """The evaluation run ID. Required.""" - sample_count: int = rest_field(name="sampleCount", visibility=["read", "create", "update", "delete", "query"]) - """Number of samples in the evaluation run. Required.""" - average: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Average value of the metric in the evaluation run. Required.""" - standard_deviation: float = rest_field( - name="standardDeviation", visibility=["read", "create", "update", "delete", "query"] - ) - """Standard deviation of the metric in the evaluation run. Required.""" - - @overload - def __init__( - self, - *, - run_id: str, - sample_count: int, - average: float, - standard_deviation: float, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class EvaluationComparisonRequest(InsightRequest, discriminator="EvaluationComparison"): - """Evaluation Comparison Request. - - :ivar type: The type of request. Required. Evaluation Comparison. - :vartype type: str or ~azure.ai.projects.models.EVALUATION_COMPARISON - :ivar eval_id: Identifier for the evaluation. Required. - :vartype eval_id: str - :ivar baseline_run_id: The baseline run ID for comparison. Required. - :vartype baseline_run_id: str - :ivar treatment_run_ids: List of treatment run IDs for comparison. Required. - :vartype treatment_run_ids: list[str] - """ - - type: Literal[InsightType.EVALUATION_COMPARISON] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of request. Required. Evaluation Comparison.""" - eval_id: str = rest_field(name="evalId", visibility=["read", "create", "update", "delete", "query"]) - """Identifier for the evaluation. Required.""" - baseline_run_id: str = rest_field(name="baselineRunId", visibility=["read", "create", "update", "delete", "query"]) - """The baseline run ID for comparison. Required.""" - treatment_run_ids: list[str] = rest_field( - name="treatmentRunIds", visibility=["read", "create", "update", "delete", "query"] - ) - """List of treatment run IDs for comparison. Required.""" - - @overload - def __init__( - self, - *, - eval_id: str, - baseline_run_id: str, - treatment_run_ids: list[str], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = InsightType.EVALUATION_COMPARISON # type: ignore - - -class InsightSample(_Model): - """A sample from the analysis. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - EvaluationResultSample - - :ivar id: The unique identifier for the analysis sample. Required. - :vartype id: str - :ivar type: Sample type. Required. "EvaluationResultSample" - :vartype type: str or ~azure.ai.projects.models.SampleType - :ivar features: Features to help with additional filtering of data in UX. Required. - :vartype features: dict[str, any] - :ivar correlation_info: Info about the correlation for the analysis sample. Required. - :vartype correlation_info: dict[str, any] - """ - - __mapping__: dict[str, _Model] = {} - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier for the analysis sample. Required.""" - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Sample type. Required. \"EvaluationResultSample\"""" - features: dict[str, Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Features to help with additional filtering of data in UX. Required.""" - correlation_info: dict[str, Any] = rest_field( - name="correlationInfo", visibility=["read", "create", "update", "delete", "query"] - ) - """Info about the correlation for the analysis sample. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - type: str, - features: dict[str, Any], - correlation_info: dict[str, Any], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class EvaluationResultSample(InsightSample, discriminator="EvaluationResultSample"): - """A sample from the evaluation result. - - :ivar id: The unique identifier for the analysis sample. Required. - :vartype id: str - :ivar features: Features to help with additional filtering of data in UX. Required. - :vartype features: dict[str, any] - :ivar correlation_info: Info about the correlation for the analysis sample. Required. - :vartype correlation_info: dict[str, any] - :ivar type: Evaluation Result Sample Type. Required. A sample from the evaluation result. - :vartype type: str or ~azure.ai.projects.models.EVALUATION_RESULT_SAMPLE - :ivar evaluation_result: Evaluation result for the analysis sample. Required. - :vartype evaluation_result: ~azure.ai.projects.models.EvalResult - """ - - type: Literal[SampleType.EVALUATION_RESULT_SAMPLE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Evaluation Result Sample Type. Required. A sample from the evaluation result.""" - evaluation_result: "_models.EvalResult" = rest_field( - name="evaluationResult", visibility=["read", "create", "update", "delete", "query"] - ) - """Evaluation result for the analysis sample. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - features: dict[str, Any], - correlation_info: dict[str, Any], - evaluation_result: "_models.EvalResult", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = SampleType.EVALUATION_RESULT_SAMPLE # type: ignore - - -class EvaluationRule(_Model): - """Evaluation rule model. - - :ivar id: Unique identifier for the evaluation rule. Required. - :vartype id: str - :ivar display_name: Display Name for the evaluation rule. - :vartype display_name: str - :ivar description: Description for the evaluation rule. - :vartype description: str - :ivar action: Definition of the evaluation rule action. Required. - :vartype action: ~azure.ai.projects.models.EvaluationRuleAction - :ivar filter: Filter condition of the evaluation rule. - :vartype filter: ~azure.ai.projects.models.EvaluationRuleFilter - :ivar event_type: Event type that the evaluation rule applies to. Required. Known values are: - "response.completed" and "manual". - :vartype event_type: str or ~azure.ai.projects.models.EvaluationRuleEventType - :ivar enabled: Indicates whether the evaluation rule is enabled. Default is true. Required. - :vartype enabled: bool - :ivar system_data: System metadata for the evaluation rule. Required. - :vartype system_data: dict[str, str] - """ - - id: str = rest_field(visibility=["read"]) - """Unique identifier for the evaluation rule. Required.""" - display_name: Optional[str] = rest_field( - name="displayName", visibility=["read", "create", "update", "delete", "query"] - ) - """Display Name for the evaluation rule.""" - description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Description for the evaluation rule.""" - action: "_models.EvaluationRuleAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Definition of the evaluation rule action. Required.""" - filter: Optional["_models.EvaluationRuleFilter"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Filter condition of the evaluation rule.""" - event_type: Union[str, "_models.EvaluationRuleEventType"] = rest_field( - name="eventType", visibility=["read", "create", "update", "delete", "query"] - ) - """Event type that the evaluation rule applies to. Required. Known values are: - \"response.completed\" and \"manual\".""" - enabled: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Indicates whether the evaluation rule is enabled. Default is true. Required.""" - system_data: dict[str, str] = rest_field(name="systemData", visibility=["read"]) - """System metadata for the evaluation rule. Required.""" - - @overload - def __init__( - self, - *, - action: "_models.EvaluationRuleAction", - event_type: Union[str, "_models.EvaluationRuleEventType"], - enabled: bool, - display_name: Optional[str] = None, - description: Optional[str] = None, - filter: Optional["_models.EvaluationRuleFilter"] = None, # pylint: disable=redefined-builtin - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class EvaluationRuleFilter(_Model): - """Evaluation filter model. - - :ivar agent_name: Filter by agent name. Required. - :vartype agent_name: str - """ - - agent_name: str = rest_field(name="agentName", visibility=["read", "create", "update", "delete", "query"]) - """Filter by agent name. Required.""" - - @overload - def __init__( - self, - *, - agent_name: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class EvaluationRunClusterInsightResult(InsightResult, discriminator="EvaluationRunClusterInsight"): - """Insights from the evaluation run cluster analysis. - - :ivar type: The type of insights result. Required. Insights on an Evaluation run result. - :vartype type: str or ~azure.ai.projects.models.EVALUATION_RUN_CLUSTER_INSIGHT - :ivar cluster_insight: Required. - :vartype cluster_insight: ~azure.ai.projects.models.ClusterInsightResult - """ - - type: Literal[InsightType.EVALUATION_RUN_CLUSTER_INSIGHT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of insights result. Required. Insights on an Evaluation run result.""" - cluster_insight: "_models.ClusterInsightResult" = rest_field( - name="clusterInsight", visibility=["read", "create", "update", "delete", "query"] - ) - """Required.""" - - @overload - def __init__( - self, - *, - cluster_insight: "_models.ClusterInsightResult", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = InsightType.EVALUATION_RUN_CLUSTER_INSIGHT # type: ignore - - -class EvaluationRunClusterInsightsRequest(InsightRequest, discriminator="EvaluationRunClusterInsight"): - """Insights on set of Evaluation Results. - - :ivar type: The type of insights request. Required. Insights on an Evaluation run result. - :vartype type: str or ~azure.ai.projects.models.EVALUATION_RUN_CLUSTER_INSIGHT - :ivar eval_id: Evaluation Id for the insights. Required. - :vartype eval_id: str - :ivar run_ids: List of evaluation run IDs for the insights. Required. - :vartype run_ids: list[str] - :ivar model_configuration: Configuration of the model used in the insight generation. - :vartype model_configuration: ~azure.ai.projects.models.InsightModelConfiguration - """ - - type: Literal[InsightType.EVALUATION_RUN_CLUSTER_INSIGHT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of insights request. Required. Insights on an Evaluation run result.""" - eval_id: str = rest_field(name="evalId", visibility=["read", "create", "update", "delete", "query"]) - """Evaluation Id for the insights. Required.""" - run_ids: list[str] = rest_field(name="runIds", visibility=["read", "create", "update", "delete", "query"]) - """List of evaluation run IDs for the insights. Required.""" - model_configuration: Optional["_models.InsightModelConfiguration"] = rest_field( - name="modelConfiguration", visibility=["read", "create", "update", "delete", "query"] - ) - """Configuration of the model used in the insight generation.""" - - @overload - def __init__( - self, - *, - eval_id: str, - run_ids: list[str], - model_configuration: Optional["_models.InsightModelConfiguration"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = InsightType.EVALUATION_RUN_CLUSTER_INSIGHT # type: ignore - - -class ScheduleTask(_Model): - """Schedule task model. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - EvaluationScheduleTask, InsightScheduleTask - - :ivar type: Type of the task. Required. Known values are: "Evaluation" and "Insight". - :vartype type: str or ~azure.ai.projects.models.ScheduleTaskType - :ivar configuration: Configuration for the task. - :vartype configuration: dict[str, str] - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Type of the task. Required. Known values are: \"Evaluation\" and \"Insight\".""" - configuration: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Configuration for the task.""" - - @overload - def __init__( - self, - *, - type: str, - configuration: Optional[dict[str, str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class EvaluationScheduleTask(ScheduleTask, discriminator="Evaluation"): - """Evaluation task for the schedule. - - :ivar configuration: Configuration for the task. - :vartype configuration: dict[str, str] - :ivar type: Required. Evaluation task. - :vartype type: str or ~azure.ai.projects.models.EVALUATION - :ivar eval_id: Identifier of the evaluation group. Required. - :vartype eval_id: str - :ivar eval_run: The evaluation run payload. Required. - :vartype eval_run: any - """ - - type: Literal[ScheduleTaskType.EVALUATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required. Evaluation task.""" - eval_id: str = rest_field(name="evalId", visibility=["read", "create", "update", "delete", "query"]) - """Identifier of the evaluation group. Required.""" - eval_run: Any = rest_field(name="evalRun", visibility=["read", "create", "update", "delete", "query"]) - """The evaluation run payload. Required.""" - - @overload - def __init__( - self, - *, - eval_id: str, - eval_run: Any, - configuration: Optional[dict[str, str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ScheduleTaskType.EVALUATION # type: ignore - - -class EvaluationTaxonomy(_Model): - """Evaluation Taxonomy Definition. - - :ivar id: Asset ID, a unique identifier for the asset. - :vartype id: str - :ivar name: The name of the resource. Required. - :vartype name: str - :ivar version: The version of the resource. Required. - :vartype version: str - :ivar description: The asset description text. - :vartype description: str - :ivar tags: Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar taxonomy_input: Input configuration for the evaluation taxonomy. Required. - :vartype taxonomy_input: ~azure.ai.projects.models.EvaluationTaxonomyInput - :ivar taxonomy_categories: List of taxonomy categories. - :vartype taxonomy_categories: list[~azure.ai.projects.models.TaxonomyCategory] - :ivar properties: Additional properties for the evaluation taxonomy. - :vartype properties: dict[str, str] - """ - - id: Optional[str] = rest_field(visibility=["read"]) - """Asset ID, a unique identifier for the asset.""" - name: str = rest_field(visibility=["read"]) - """The name of the resource. Required.""" - version: str = rest_field(visibility=["read"]) - """The version of the resource. Required.""" - description: Optional[str] = rest_field(visibility=["create", "update"]) - """The asset description text.""" - tags: Optional[dict[str, str]] = rest_field(visibility=["create", "update"]) - """Tag dictionary. Tags can be added, removed, and updated.""" - taxonomy_input: "_models.EvaluationTaxonomyInput" = rest_field( - name="taxonomyInput", visibility=["read", "create", "update", "delete", "query"] - ) - """Input configuration for the evaluation taxonomy. Required.""" - taxonomy_categories: Optional[list["_models.TaxonomyCategory"]] = rest_field( - name="taxonomyCategories", visibility=["read", "create", "update", "delete", "query"] - ) - """List of taxonomy categories.""" - properties: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Additional properties for the evaluation taxonomy.""" - - @overload - def __init__( - self, - *, - taxonomy_input: "_models.EvaluationTaxonomyInput", - description: Optional[str] = None, - tags: Optional[dict[str, str]] = None, - taxonomy_categories: Optional[list["_models.TaxonomyCategory"]] = None, - properties: Optional[dict[str, str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class EvaluatorMetric(_Model): - """Evaluator Metric. - - :ivar type: Type of the metric. Known values are: "ordinal", "continuous", and "boolean". - :vartype type: str or ~azure.ai.projects.models.EvaluatorMetricType - :ivar desirable_direction: It indicates whether a higher value is better or a lower value is - better for this metric. Known values are: "increase", "decrease", and "neutral". - :vartype desirable_direction: str or ~azure.ai.projects.models.EvaluatorMetricDirection - :ivar min_value: Minimum value for the metric. - :vartype min_value: float - :ivar max_value: Maximum value for the metric. If not specified, it is assumed to be unbounded. - :vartype max_value: float - :ivar is_primary: Indicates if this metric is primary when there are multiple metrics. - :vartype is_primary: bool - """ - - type: Optional[Union[str, "_models.EvaluatorMetricType"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Type of the metric. Known values are: \"ordinal\", \"continuous\", and \"boolean\".""" - desirable_direction: Optional[Union[str, "_models.EvaluatorMetricDirection"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """It indicates whether a higher value is better or a lower value is better for this metric. Known - values are: \"increase\", \"decrease\", and \"neutral\".""" - min_value: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Minimum value for the metric.""" - max_value: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Maximum value for the metric. If not specified, it is assumed to be unbounded.""" - is_primary: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Indicates if this metric is primary when there are multiple metrics.""" - - @overload - def __init__( - self, - *, - type: Optional[Union[str, "_models.EvaluatorMetricType"]] = None, - desirable_direction: Optional[Union[str, "_models.EvaluatorMetricDirection"]] = None, - min_value: Optional[float] = None, - max_value: Optional[float] = None, - is_primary: Optional[bool] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class EvaluatorVersion(_Model): - """Evaluator Definition. - - :ivar display_name: Display Name for evaluator. It helps to find the evaluator easily in AI - Foundry. It does not need to be unique. - :vartype display_name: str - :ivar metadata: Metadata about the evaluator. - :vartype metadata: dict[str, str] - :ivar evaluator_type: The type of the evaluator. Required. Known values are: "builtin" and - "custom". - :vartype evaluator_type: str or ~azure.ai.projects.models.EvaluatorType - :ivar categories: The categories of the evaluator. Required. - :vartype categories: list[str or ~azure.ai.projects.models.EvaluatorCategory] - :ivar definition: Definition of the evaluator. Required. - :vartype definition: ~azure.ai.projects.models.EvaluatorDefinition - :ivar created_by: Creator of the evaluator. Required. - :vartype created_by: str - :ivar created_at: Creation date/time of the evaluator. Required. - :vartype created_at: int - :ivar modified_at: Last modified date/time of the evaluator. Required. - :vartype modified_at: int - :ivar id: Asset ID, a unique identifier for the asset. - :vartype id: str - :ivar name: The name of the resource. Required. - :vartype name: str - :ivar version: The version of the resource. Required. - :vartype version: str - :ivar description: The asset description text. - :vartype description: str - :ivar tags: Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - """ - - display_name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Display Name for evaluator. It helps to find the evaluator easily in AI Foundry. It does not - need to be unique.""" - metadata: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Metadata about the evaluator.""" - evaluator_type: Union[str, "_models.EvaluatorType"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The type of the evaluator. Required. Known values are: \"builtin\" and \"custom\".""" - categories: list[Union[str, "_models.EvaluatorCategory"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The categories of the evaluator. Required.""" - definition: "_models.EvaluatorDefinition" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Definition of the evaluator. Required.""" - created_by: str = rest_field(visibility=["read"]) - """Creator of the evaluator. Required.""" - created_at: int = rest_field(visibility=["read"]) - """Creation date/time of the evaluator. Required.""" - modified_at: int = rest_field(visibility=["read"]) - """Last modified date/time of the evaluator. Required.""" - id: Optional[str] = rest_field(visibility=["read"]) - """Asset ID, a unique identifier for the asset.""" - name: str = rest_field(visibility=["read"]) - """The name of the resource. Required.""" - version: str = rest_field(visibility=["read"]) - """The version of the resource. Required.""" - description: Optional[str] = rest_field(visibility=["create", "update"]) - """The asset description text.""" - tags: Optional[dict[str, str]] = rest_field(visibility=["create", "update"]) - """Tag dictionary. Tags can be added, removed, and updated.""" - - @overload - def __init__( - self, - *, - evaluator_type: Union[str, "_models.EvaluatorType"], - categories: list[Union[str, "_models.EvaluatorCategory"]], - definition: "_models.EvaluatorDefinition", - display_name: Optional[str] = None, - metadata: Optional[dict[str, str]] = None, - description: Optional[str] = None, - tags: Optional[dict[str, str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class FabricDataAgentToolParameters(_Model): - """The fabric data agent tool parameters. - - :ivar project_connections: The project connections attached to this tool. There can be a - maximum of 1 connection - resource attached to the tool. - :vartype project_connections: list[~azure.ai.projects.models.ToolProjectConnection] - """ - - project_connections: Optional[list["_models.ToolProjectConnection"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The project connections attached to this tool. There can be a maximum of 1 connection - resource attached to the tool.""" - - @overload - def __init__( - self, - *, - project_connections: Optional[list["_models.ToolProjectConnection"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class FieldMapping(_Model): - """Field mapping configuration class. - - :ivar content_fields: List of fields with text content. Required. - :vartype content_fields: list[str] - :ivar filepath_field: Path of file to be used as a source of text content. - :vartype filepath_field: str - :ivar title_field: Field containing the title of the document. - :vartype title_field: str - :ivar url_field: Field containing the url of the document. - :vartype url_field: str - :ivar vector_fields: List of fields with vector content. - :vartype vector_fields: list[str] - :ivar metadata_fields: List of fields with metadata content. - :vartype metadata_fields: list[str] - """ - - content_fields: list[str] = rest_field(name="contentFields", visibility=["create"]) - """List of fields with text content. Required.""" - filepath_field: Optional[str] = rest_field(name="filepathField", visibility=["create"]) - """Path of file to be used as a source of text content.""" - title_field: Optional[str] = rest_field(name="titleField", visibility=["create"]) - """Field containing the title of the document.""" - url_field: Optional[str] = rest_field(name="urlField", visibility=["create"]) - """Field containing the url of the document.""" - vector_fields: Optional[list[str]] = rest_field(name="vectorFields", visibility=["create"]) - """List of fields with vector content.""" - metadata_fields: Optional[list[str]] = rest_field(name="metadataFields", visibility=["create"]) - """List of fields with metadata content.""" - - @overload - def __init__( - self, - *, - content_fields: list[str], - filepath_field: Optional[str] = None, - title_field: Optional[str] = None, - url_field: Optional[str] = None, - vector_fields: Optional[list[str]] = None, - metadata_fields: Optional[list[str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class FileDatasetVersion(DatasetVersion, discriminator="uri_file"): - """FileDatasetVersion Definition. - - :ivar data_uri: URI of the data. Example: `https://go.microsoft.com/fwlink/?linkid=2202330 - `_. Required. - :vartype data_uri: str - :ivar is_reference: Indicates if the dataset holds a reference to the storage, or the dataset - manages storage itself. If true, the underlying data will not be deleted when the dataset - version is deleted. - :vartype is_reference: bool - :ivar connection_name: The Azure Storage Account connection name. Required if - startPendingUploadVersion was not called before creating the Dataset. - :vartype connection_name: str - :ivar id: Asset ID, a unique identifier for the asset. - :vartype id: str - :ivar name: The name of the resource. Required. - :vartype name: str - :ivar version: The version of the resource. Required. - :vartype version: str - :ivar description: The asset description text. - :vartype description: str - :ivar tags: Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar type: Dataset type. Required. URI file. - :vartype type: str or ~azure.ai.projects.models.URI_FILE - """ - - type: Literal[DatasetType.URI_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Dataset type. Required. URI file.""" - - @overload - def __init__( - self, - *, - data_uri: str, - connection_name: Optional[str] = None, - description: Optional[str] = None, - tags: Optional[dict[str, str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = DatasetType.URI_FILE # type: ignore - - -class FileSearchTool(Tool, discriminator="file_search"): - """A tool that searches for relevant content from uploaded files. Learn more about the `file - search tool `_. - - :ivar type: The type of the file search tool. Always ``file_search``. Required. - :vartype type: str or ~azure.ai.projects.models.FILE_SEARCH - :ivar vector_store_ids: The IDs of the vector stores to search. Required. - :vartype vector_store_ids: list[str] - :ivar max_num_results: The maximum number of results to return. This number should be between 1 - and 50 inclusive. - :vartype max_num_results: int - :ivar ranking_options: Ranking options for search. - :vartype ranking_options: ~azure.ai.projects.models.RankingOptions - :ivar filters: A filter to apply. Is either a ComparisonFilter type or a CompoundFilter type. - :vartype filters: ~azure.ai.projects.models.ComparisonFilter or - ~azure.ai.projects.models.CompoundFilter - """ - - type: Literal[ToolType.FILE_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the file search tool. Always ``file_search``. Required.""" - vector_store_ids: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The IDs of the vector stores to search. Required.""" - max_num_results: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The maximum number of results to return. This number should be between 1 and 50 inclusive.""" - ranking_options: Optional["_models.RankingOptions"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Ranking options for search.""" - filters: Optional[Union["_models.ComparisonFilter", "_models.CompoundFilter"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A filter to apply. Is either a ComparisonFilter type or a CompoundFilter type.""" - - @overload - def __init__( - self, - *, - vector_store_ids: list[str], - max_num_results: Optional[int] = None, - ranking_options: Optional["_models.RankingOptions"] = None, - filters: Optional[Union["_models.ComparisonFilter", "_models.CompoundFilter"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolType.FILE_SEARCH # type: ignore - - -class FileSearchToolCallItemParam(ItemParam, discriminator="file_search_call"): - """The results of a file search tool call. See the - `file search guide `_ for more information. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.FILE_SEARCH_CALL - :ivar queries: The queries used to search for files. Required. - :vartype queries: list[str] - :ivar results: The results of the file search tool call. - :vartype results: list[~azure.ai.projects.models.FileSearchToolCallItemParamResult] - """ - - type: Literal[ItemType.FILE_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - queries: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The queries used to search for files. Required.""" - results: Optional[list["_models.FileSearchToolCallItemParamResult"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The results of the file search tool call.""" - - @overload - def __init__( - self, - *, - queries: list[str], - results: Optional[list["_models.FileSearchToolCallItemParamResult"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.FILE_SEARCH_CALL # type: ignore - - -class FileSearchToolCallItemParamResult(_Model): - """FileSearchToolCallItemParamResult. - - :ivar file_id: The unique ID of the file. - :vartype file_id: str - :ivar text: The text that was retrieved from the file. - :vartype text: str - :ivar filename: The name of the file. - :vartype filename: str - :ivar attributes: - :vartype attributes: ~azure.ai.projects.models.VectorStoreFileAttributes - :ivar score: The relevance score of the file - a value between 0 and 1. - :vartype score: float - """ - - file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the file.""" - text: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The text that was retrieved from the file.""" - filename: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the file.""" - attributes: Optional["_models.VectorStoreFileAttributes"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - score: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The relevance score of the file - a value between 0 and 1.""" - - @overload - def __init__( - self, - *, - file_id: Optional[str] = None, - text: Optional[str] = None, - filename: Optional[str] = None, - attributes: Optional["_models.VectorStoreFileAttributes"] = None, - score: Optional[float] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class FileSearchToolCallItemResource(ItemResource, discriminator="file_search_call"): - """The results of a file search tool call. See the - `file search guide `_ for more information. - - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.FILE_SEARCH_CALL - :ivar status: The status of the file search tool call. One of ``in_progress``, - ``searching``, ``incomplete`` or ``failed``,. Required. Is one of the following types: - Literal["in_progress"], Literal["searching"], Literal["completed"], Literal["incomplete"], - Literal["failed"] - :vartype status: str or str or str or str or str - :ivar queries: The queries used to search for files. Required. - :vartype queries: list[str] - :ivar results: The results of the file search tool call. - :vartype results: list[~azure.ai.projects.models.FileSearchToolCallItemParamResult] - """ - - type: Literal[ItemType.FILE_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - status: Literal["in_progress", "searching", "completed", "incomplete", "failed"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the file search tool call. One of ``in_progress``, - ``searching``, ``incomplete`` or ``failed``,. Required. Is one of the following types: - Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"], - Literal[\"incomplete\"], Literal[\"failed\"]""" - queries: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The queries used to search for files. Required.""" - results: Optional[list["_models.FileSearchToolCallItemParamResult"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The results of the file search tool call.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "searching", "completed", "incomplete", "failed"], - queries: list[str], - created_by: Optional["_models.CreatedBy"] = None, - results: Optional[list["_models.FileSearchToolCallItemParamResult"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.FILE_SEARCH_CALL # type: ignore - - -class FolderDatasetVersion(DatasetVersion, discriminator="uri_folder"): - """FileDatasetVersion Definition. - - :ivar data_uri: URI of the data. Example: `https://go.microsoft.com/fwlink/?linkid=2202330 - `_. Required. - :vartype data_uri: str - :ivar is_reference: Indicates if the dataset holds a reference to the storage, or the dataset - manages storage itself. If true, the underlying data will not be deleted when the dataset - version is deleted. - :vartype is_reference: bool - :ivar connection_name: The Azure Storage Account connection name. Required if - startPendingUploadVersion was not called before creating the Dataset. - :vartype connection_name: str - :ivar id: Asset ID, a unique identifier for the asset. - :vartype id: str - :ivar name: The name of the resource. Required. - :vartype name: str - :ivar version: The version of the resource. Required. - :vartype version: str - :ivar description: The asset description text. - :vartype description: str - :ivar tags: Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar type: Dataset type. Required. URI folder. - :vartype type: str or ~azure.ai.projects.models.URI_FOLDER - """ - - type: Literal[DatasetType.URI_FOLDER] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Dataset type. Required. URI folder.""" - - @overload - def __init__( - self, - *, - data_uri: str, - connection_name: Optional[str] = None, - description: Optional[str] = None, - tags: Optional[dict[str, str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = DatasetType.URI_FOLDER # type: ignore - - -class FunctionTool(Tool, discriminator="function"): - """Defines a function in your own code the model can choose to call. Learn more about `function - calling `_. - - :ivar type: The type of the function tool. Always ``function``. Required. - :vartype type: str or ~azure.ai.projects.models.FUNCTION - :ivar name: The name of the function to call. Required. - :vartype name: str - :ivar description: A description of the function. Used by the model to determine whether or not - to call the function. - :vartype description: str - :ivar parameters: A JSON schema object describing the parameters of the function. Required. - :vartype parameters: any - :ivar strict: Whether to enforce strict parameter validation. Default ``true``. Required. - :vartype strict: bool - """ - - type: Literal[ToolType.FUNCTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the function tool. Always ``function``. Required.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the function to call. Required.""" - description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A description of the function. Used by the model to determine whether or not to call the - function.""" - parameters: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A JSON schema object describing the parameters of the function. Required.""" - strict: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Whether to enforce strict parameter validation. Default ``true``. Required.""" - - @overload - def __init__( - self, - *, - name: str, - parameters: Any, - strict: bool, - description: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolType.FUNCTION # type: ignore - - -class FunctionToolCallItemParam(ItemParam, discriminator="function_call"): - """A tool call to run a function. See the - `function calling guide `_ for more information. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.FUNCTION_CALL - :ivar call_id: The unique ID of the function tool call generated by the model. Required. - :vartype call_id: str - :ivar name: The name of the function to run. Required. - :vartype name: str - :ivar arguments: A JSON string of the arguments to pass to the function. Required. - :vartype arguments: str - """ - - type: Literal[ItemType.FUNCTION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the function tool call generated by the model. Required.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the function to run. Required.""" - arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A JSON string of the arguments to pass to the function. Required.""" - - @overload - def __init__( - self, - *, - call_id: str, - name: str, - arguments: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.FUNCTION_CALL # type: ignore - - -class FunctionToolCallItemResource(ItemResource, discriminator="function_call"): - """A tool call to run a function. See the - `function calling guide `_ for more information. - - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.FUNCTION_CALL - :ivar status: The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - :ivar call_id: The unique ID of the function tool call generated by the model. Required. - :vartype call_id: str - :ivar name: The name of the function to run. Required. - :vartype name: str - :ivar arguments: A JSON string of the arguments to pass to the function. Required. - :vartype arguments: str - """ - - type: Literal[ItemType.FUNCTION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - status: Literal["in_progress", "completed", "incomplete"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the function tool call generated by the model. Required.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the function to run. Required.""" - arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A JSON string of the arguments to pass to the function. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "completed", "incomplete"], - call_id: str, - name: str, - arguments: str, - created_by: Optional["_models.CreatedBy"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.FUNCTION_CALL # type: ignore - - -class FunctionToolCallOutputItemParam(ItemParam, discriminator="function_call_output"): - """The output of a function tool call. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.FUNCTION_CALL_OUTPUT - :ivar call_id: The unique ID of the function tool call generated by the model. Required. - :vartype call_id: str - :ivar output: A JSON string of the output of the function tool call. Required. - :vartype output: str - """ - - type: Literal[ItemType.FUNCTION_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the function tool call generated by the model. Required.""" - output: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A JSON string of the output of the function tool call. Required.""" - - @overload - def __init__( - self, - *, - call_id: str, - output: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.FUNCTION_CALL_OUTPUT # type: ignore - - -class FunctionToolCallOutputItemResource(ItemResource, discriminator="function_call_output"): - """The output of a function tool call. - - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.FUNCTION_CALL_OUTPUT - :ivar status: The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - :ivar call_id: The unique ID of the function tool call generated by the model. Required. - :vartype call_id: str - :ivar output: A JSON string of the output of the function tool call. Required. - :vartype output: str - """ - - type: Literal[ItemType.FUNCTION_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - status: Literal["in_progress", "completed", "incomplete"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the function tool call generated by the model. Required.""" - output: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A JSON string of the output of the function tool call. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "completed", "incomplete"], - call_id: str, - output: str, - created_by: Optional["_models.CreatedBy"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.FUNCTION_CALL_OUTPUT # type: ignore - - -class HostedAgentDefinition(AgentDefinition, discriminator="hosted"): - """The hosted agent definition. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ImageBasedHostedAgentDefinition - - :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. - :vartype rai_config: ~azure.ai.projects.models.RaiConfig - :ivar kind: Required. - :vartype kind: str or ~azure.ai.projects.models.HOSTED - :ivar tools: An array of tools the hosted agent's model may call while generating a response. - You - can specify which tool to use by setting the ``tool_choice`` parameter. - :vartype tools: list[~azure.ai.projects.models.Tool] - :ivar container_protocol_versions: The protocols that the agent supports for ingress - communication of the containers. Required. - :vartype container_protocol_versions: list[~azure.ai.projects.models.ProtocolVersionRecord] - :ivar cpu: The CPU configuration for the hosted agent. Required. - :vartype cpu: str - :ivar memory: The memory configuration for the hosted agent. Required. - :vartype memory: str - :ivar environment_variables: Environment variables to set in the hosted agent container. - :vartype environment_variables: dict[str, str] - """ - - __mapping__: dict[str, _Model] = {} - kind: Literal[AgentKind.HOSTED] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - tools: Optional[list["_models.Tool"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An array of tools the hosted agent's model may call while generating a response. You - can specify which tool to use by setting the ``tool_choice`` parameter.""" - container_protocol_versions: list["_models.ProtocolVersionRecord"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The protocols that the agent supports for ingress communication of the containers. Required.""" - cpu: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The CPU configuration for the hosted agent. Required.""" - memory: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The memory configuration for the hosted agent. Required.""" - environment_variables: Optional[dict[str, str]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Environment variables to set in the hosted agent container.""" - - @overload - def __init__( - self, - *, - container_protocol_versions: list["_models.ProtocolVersionRecord"], - cpu: str, - memory: str, - rai_config: Optional["_models.RaiConfig"] = None, - tools: Optional[list["_models.Tool"]] = None, - environment_variables: Optional[dict[str, str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.kind = AgentKind.HOSTED # type: ignore - - -class HourlyRecurrenceSchedule(RecurrenceSchedule, discriminator="Hourly"): - """Hourly recurrence schedule. - - :ivar type: Required. Hourly recurrence pattern. - :vartype type: str or ~azure.ai.projects.models.HOURLY - """ - - type: Literal[RecurrenceType.HOURLY] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required. Hourly recurrence pattern.""" - - @overload - def __init__( - self, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = RecurrenceType.HOURLY # type: ignore - - -class HumanEvaluationRuleAction(EvaluationRuleAction, discriminator="humanEvaluation"): - """Evaluation rule action for human evaluation. - - :ivar type: Required. Human evaluation. - :vartype type: str or ~azure.ai.projects.models.HUMAN_EVALUATION - :ivar template_id: Human evaluation template Id. Required. - :vartype template_id: str - """ - - type: Literal[EvaluationRuleActionType.HUMAN_EVALUATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required. Human evaluation.""" - template_id: str = rest_field(name="templateId", visibility=["read", "create", "update", "delete", "query"]) - """Human evaluation template Id. Required.""" - - @overload - def __init__( - self, - *, - template_id: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = EvaluationRuleActionType.HUMAN_EVALUATION # type: ignore - - -class ImageBasedHostedAgentDefinition(HostedAgentDefinition, discriminator="hosted"): - """The image-based deployment definition for a hosted agent. - - :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. - :vartype rai_config: ~azure.ai.projects.models.RaiConfig - :ivar tools: An array of tools the hosted agent's model may call while generating a response. - You - can specify which tool to use by setting the ``tool_choice`` parameter. - :vartype tools: list[~azure.ai.projects.models.Tool] - :ivar container_protocol_versions: The protocols that the agent supports for ingress - communication of the containers. Required. - :vartype container_protocol_versions: list[~azure.ai.projects.models.ProtocolVersionRecord] - :ivar cpu: The CPU configuration for the hosted agent. Required. - :vartype cpu: str - :ivar memory: The memory configuration for the hosted agent. Required. - :vartype memory: str - :ivar environment_variables: Environment variables to set in the hosted agent container. - :vartype environment_variables: dict[str, str] - :ivar kind: Required. - :vartype kind: str or ~azure.ai.projects.models.HOSTED - :ivar image: The image for the hosted agent. Required. - :vartype image: str - """ - - image: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The image for the hosted agent. Required.""" - - @overload - def __init__( - self, - *, - container_protocol_versions: list["_models.ProtocolVersionRecord"], - cpu: str, - memory: str, - image: str, - rai_config: Optional["_models.RaiConfig"] = None, - tools: Optional[list["_models.Tool"]] = None, - environment_variables: Optional[dict[str, str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ImageGenTool(Tool, discriminator="image_generation"): - """A tool that generates images using a model like ``gpt-image-1``. - - :ivar type: The type of the image generation tool. Always ``image_generation``. Required. - :vartype type: str or ~azure.ai.projects.models.IMAGE_GENERATION - :ivar model: The image generation model to use. Default: ``gpt-image-1``. Default value is - "gpt-image-1". - :vartype model: str - :ivar quality: The quality of the generated image. One of ``low``, ``medium``, ``high``, - or ``auto``. Default: ``auto``. Is one of the following types: Literal["low"], - Literal["medium"], Literal["high"], Literal["auto"] - :vartype quality: str or str or str or str - :ivar size: The size of the generated image. One of ``1024x1024``, ``1024x1536``, - ``1536x1024``, or ``auto``. Default: ``auto``. Is one of the following types: - Literal["1024x1024"], Literal["1024x1536"], Literal["1536x1024"], Literal["auto"] - :vartype size: str or str or str or str - :ivar output_format: The output format of the generated image. One of ``png``, ``webp``, or - ``jpeg``. Default: ``png``. Is one of the following types: Literal["png"], Literal["webp"], - Literal["jpeg"] - :vartype output_format: str or str or str - :ivar output_compression: Compression level for the output image. Default: 100. - :vartype output_compression: int - :ivar moderation: Moderation level for the generated image. Default: ``auto``. Is either a - Literal["auto"] type or a Literal["low"] type. - :vartype moderation: str or str - :ivar background: Background type for the generated image. One of ``transparent``, - ``opaque``, or ``auto``. Default: ``auto``. Is one of the following types: - Literal["transparent"], Literal["opaque"], Literal["auto"] - :vartype background: str or str or str - :ivar input_image_mask: Optional mask for inpainting. Contains ``image_url`` - (string, optional) and ``file_id`` (string, optional). - :vartype input_image_mask: ~azure.ai.projects.models.ImageGenToolInputImageMask - :ivar partial_images: Number of partial images to generate in streaming mode, from 0 (default - value) to 3. - :vartype partial_images: int - """ - - type: Literal[ToolType.IMAGE_GENERATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the image generation tool. Always ``image_generation``. Required.""" - model: Optional[Literal["gpt-image-1"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The image generation model to use. Default: ``gpt-image-1``. Default value is \"gpt-image-1\".""" - quality: Optional[Literal["low", "medium", "high", "auto"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The quality of the generated image. One of ``low``, ``medium``, ``high``, - or ``auto``. Default: ``auto``. Is one of the following types: Literal[\"low\"], - Literal[\"medium\"], Literal[\"high\"], Literal[\"auto\"]""" - size: Optional[Literal["1024x1024", "1024x1536", "1536x1024", "auto"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The size of the generated image. One of ``1024x1024``, ``1024x1536``, - ``1536x1024``, or ``auto``. Default: ``auto``. Is one of the following types: - Literal[\"1024x1024\"], Literal[\"1024x1536\"], Literal[\"1536x1024\"], Literal[\"auto\"]""" - output_format: Optional[Literal["png", "webp", "jpeg"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The output format of the generated image. One of ``png``, ``webp``, or - ``jpeg``. Default: ``png``. Is one of the following types: Literal[\"png\"], Literal[\"webp\"], - Literal[\"jpeg\"]""" - output_compression: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Compression level for the output image. Default: 100.""" - moderation: Optional[Literal["auto", "low"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Moderation level for the generated image. Default: ``auto``. Is either a Literal[\"auto\"] type - or a Literal[\"low\"] type.""" - background: Optional[Literal["transparent", "opaque", "auto"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Background type for the generated image. One of ``transparent``, - ``opaque``, or ``auto``. Default: ``auto``. Is one of the following types: - Literal[\"transparent\"], Literal[\"opaque\"], Literal[\"auto\"]""" - input_image_mask: Optional["_models.ImageGenToolInputImageMask"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Optional mask for inpainting. Contains ``image_url`` - (string, optional) and ``file_id`` (string, optional).""" - partial_images: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Number of partial images to generate in streaming mode, from 0 (default value) to 3.""" - - @overload - def __init__( - self, - *, - model: Optional[Literal["gpt-image-1"]] = None, - quality: Optional[Literal["low", "medium", "high", "auto"]] = None, - size: Optional[Literal["1024x1024", "1024x1536", "1536x1024", "auto"]] = None, - output_format: Optional[Literal["png", "webp", "jpeg"]] = None, - output_compression: Optional[int] = None, - moderation: Optional[Literal["auto", "low"]] = None, - background: Optional[Literal["transparent", "opaque", "auto"]] = None, - input_image_mask: Optional["_models.ImageGenToolInputImageMask"] = None, - partial_images: Optional[int] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolType.IMAGE_GENERATION # type: ignore - - -class ImageGenToolCallItemParam(ItemParam, discriminator="image_generation_call"): - """An image generation request made by the model. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.IMAGE_GENERATION_CALL - :ivar result: The generated image encoded in base64. Required. - :vartype result: str - """ - - type: Literal[ItemType.IMAGE_GENERATION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - result: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The generated image encoded in base64. Required.""" - - @overload - def __init__( - self, - *, - result: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.IMAGE_GENERATION_CALL # type: ignore - - -class ImageGenToolCallItemResource(ItemResource, discriminator="image_generation_call"): - """An image generation request made by the model. - - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.IMAGE_GENERATION_CALL - :ivar status: Required. Is one of the following types: Literal["in_progress"], - Literal["completed"], Literal["generating"], Literal["failed"] - :vartype status: str or str or str or str - :ivar result: The generated image encoded in base64. Required. - :vartype result: str - """ - - type: Literal[ItemType.IMAGE_GENERATION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - status: Literal["in_progress", "completed", "generating", "failed"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Required. Is one of the following types: Literal[\"in_progress\"], Literal[\"completed\"], - Literal[\"generating\"], Literal[\"failed\"]""" - result: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The generated image encoded in base64. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "completed", "generating", "failed"], - result: str, - created_by: Optional["_models.CreatedBy"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.IMAGE_GENERATION_CALL # type: ignore - - -class ImageGenToolInputImageMask(_Model): - """ImageGenToolInputImageMask. - - :ivar image_url: Base64-encoded mask image. - :vartype image_url: str - :ivar file_id: File ID for the mask image. - :vartype file_id: str - """ - - image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Base64-encoded mask image.""" - file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """File ID for the mask image.""" - - @overload - def __init__( - self, - *, - image_url: Optional[str] = None, - file_id: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class Insight(_Model): - """The response body for cluster insights. - - :ivar id: The unique identifier for the insights report. Required. - :vartype id: str - :ivar metadata: Metadata about the insights report. Required. - :vartype metadata: ~azure.ai.projects.models.InsightsMetadata - :ivar state: The current state of the insights. Required. Known values are: "NotStarted", - "Running", "Succeeded", "Failed", and "Canceled". - :vartype state: str or ~azure.ai.projects.models.OperationState - :ivar display_name: User friendly display name for the insight. Required. - :vartype display_name: str - :ivar request: Request for the insights analysis. Required. - :vartype request: ~azure.ai.projects.models.InsightRequest - :ivar result: The result of the insights report. - :vartype result: ~azure.ai.projects.models.InsightResult - """ - - id: str = rest_field(visibility=["read"]) - """The unique identifier for the insights report. Required.""" - metadata: "_models.InsightsMetadata" = rest_field(visibility=["read"]) - """Metadata about the insights report. Required.""" - state: Union[str, "_models.OperationState"] = rest_field(visibility=["read"]) - """The current state of the insights. Required. Known values are: \"NotStarted\", \"Running\", - \"Succeeded\", \"Failed\", and \"Canceled\".""" - display_name: str = rest_field(name="displayName", visibility=["read", "create", "update", "delete", "query"]) - """User friendly display name for the insight. Required.""" - request: "_models.InsightRequest" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Request for the insights analysis. Required.""" - result: Optional["_models.InsightResult"] = rest_field(visibility=["read"]) - """The result of the insights report.""" - - @overload - def __init__( - self, - *, - display_name: str, - request: "_models.InsightRequest", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class InsightCluster(_Model): - """A cluster of analysis samples. - - :ivar id: The id of the analysis cluster. Required. - :vartype id: str - :ivar label: Label for the cluster. Required. - :vartype label: str - :ivar suggestion: Suggestion for the cluster. Required. - :vartype suggestion: str - :ivar description: Description of the analysis cluster. Required. - :vartype description: str - :ivar weight: The weight of the analysis cluster. This indicate number of samples in the - cluster. Required. - :vartype weight: int - :ivar sub_clusters: List of subclusters within this cluster. Empty if no subclusters exist. - :vartype sub_clusters: list[~azure.ai.projects.models.InsightCluster] - :ivar samples: List of samples that belong to this cluster. Empty if samples are part of - subclusters. - :vartype samples: list[~azure.ai.projects.models.InsightSample] - """ - - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The id of the analysis cluster. Required.""" - label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Label for the cluster. Required.""" - suggestion: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Suggestion for the cluster. Required.""" - description: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Description of the analysis cluster. Required.""" - weight: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The weight of the analysis cluster. This indicate number of samples in the cluster. Required.""" - sub_clusters: Optional[list["_models.InsightCluster"]] = rest_field( - name="subClusters", visibility=["read", "create", "update", "delete", "query"] - ) - """List of subclusters within this cluster. Empty if no subclusters exist.""" - samples: Optional[list["_models.InsightSample"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """List of samples that belong to this cluster. Empty if samples are part of subclusters.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - label: str, - suggestion: str, - description: str, - weight: int, - sub_clusters: Optional[list["_models.InsightCluster"]] = None, - samples: Optional[list["_models.InsightSample"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class InsightModelConfiguration(_Model): - """Configuration of the model used in the insight generation. - - :ivar model_deployment_name: The model deployment to be evaluated. Accepts either the - deployment name alone or with the connection name as '{connectionName}/'. - Required. - :vartype model_deployment_name: str - """ - - model_deployment_name: str = rest_field( - name="modelDeploymentName", visibility=["read", "create", "update", "delete", "query"] - ) - """The model deployment to be evaluated. Accepts either the deployment name alone or with the - connection name as '{connectionName}/'. Required.""" - - @overload - def __init__( - self, - *, - model_deployment_name: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class InsightScheduleTask(ScheduleTask, discriminator="Insight"): - """Insight task for the schedule. - - :ivar configuration: Configuration for the task. - :vartype configuration: dict[str, str] - :ivar type: Required. Insight task. - :vartype type: str or ~azure.ai.projects.models.INSIGHT - :ivar insight: The insight payload. Required. - :vartype insight: ~azure.ai.projects.models.Insight - """ - - type: Literal[ScheduleTaskType.INSIGHT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required. Insight task.""" - insight: "_models.Insight" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The insight payload. Required.""" - - @overload - def __init__( - self, - *, - insight: "_models.Insight", - configuration: Optional[dict[str, str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ScheduleTaskType.INSIGHT # type: ignore - - -class InsightsMetadata(_Model): - """Metadata about the insights. - - :ivar created_at: The timestamp when the insights were created. Required. - :vartype created_at: ~datetime.datetime - :ivar completed_at: The timestamp when the insights were completed. - :vartype completed_at: ~datetime.datetime - """ - - created_at: datetime.datetime = rest_field( - name="createdAt", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" - ) - """The timestamp when the insights were created. Required.""" - completed_at: Optional[datetime.datetime] = rest_field( - name="completedAt", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" - ) - """The timestamp when the insights were completed.""" - - @overload - def __init__( - self, - *, - created_at: datetime.datetime, - completed_at: Optional[datetime.datetime] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class InsightSummary(_Model): - """Summary of the error cluster analysis. - - :ivar sample_count: Total number of samples analyzed. Required. - :vartype sample_count: int - :ivar unique_subcluster_count: Total number of unique subcluster labels. Required. - :vartype unique_subcluster_count: int - :ivar unique_cluster_count: Total number of unique clusters. Required. - :vartype unique_cluster_count: int - :ivar method: Method used for clustering. Required. - :vartype method: str - :ivar usage: Token usage while performing clustering analysis. Required. - :vartype usage: ~azure.ai.projects.models.ClusterTokenUsage - """ - - sample_count: int = rest_field(name="sampleCount", visibility=["read", "create", "update", "delete", "query"]) - """Total number of samples analyzed. Required.""" - unique_subcluster_count: int = rest_field( - name="uniqueSubclusterCount", visibility=["read", "create", "update", "delete", "query"] - ) - """Total number of unique subcluster labels. Required.""" - unique_cluster_count: int = rest_field( - name="uniqueClusterCount", visibility=["read", "create", "update", "delete", "query"] - ) - """Total number of unique clusters. Required.""" - method: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Method used for clustering. Required.""" - usage: "_models.ClusterTokenUsage" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Token usage while performing clustering analysis. Required.""" - - @overload - def __init__( - self, - *, - sample_count: int, - unique_subcluster_count: int, - unique_cluster_count: int, - method: str, - usage: "_models.ClusterTokenUsage", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class WorkflowActionOutputItemResource(ItemResource, discriminator="workflow_action"): - """WorkflowActionOutputItemResource. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - InvokeAzureAgentWorkflowActionOutputItemResource - - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.WORKFLOW_ACTION - :ivar kind: The kind of CSDL action (e.g., 'SetVariable', 'InvokeAzureAgent'). Required. - Default value is None. - :vartype kind: str - :ivar action_id: Unique identifier for the action. Required. - :vartype action_id: str - :ivar parent_action_id: ID of the parent action if this is a nested action. - :vartype parent_action_id: str - :ivar previous_action_id: ID of the previous action if this action follows another. - :vartype previous_action_id: str - :ivar status: Status of the action (e.g., 'in_progress', 'completed', 'failed', 'cancelled'). - Required. Is one of the following types: Literal["completed"], Literal["failed"], - Literal["in_progress"], Literal["cancelled"] - :vartype status: str or str or str or str - """ - - __mapping__: dict[str, _Model] = {} - type: Literal[ItemType.WORKFLOW_ACTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - kind: str = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) - """The kind of CSDL action (e.g., 'SetVariable', 'InvokeAzureAgent'). Required. Default value is - None.""" - action_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Unique identifier for the action. Required.""" - parent_action_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """ID of the parent action if this is a nested action.""" - previous_action_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """ID of the previous action if this action follows another.""" - status: Literal["completed", "failed", "in_progress", "cancelled"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Status of the action (e.g., 'in_progress', 'completed', 'failed', 'cancelled'). Required. Is - one of the following types: Literal[\"completed\"], Literal[\"failed\"], - Literal[\"in_progress\"], Literal[\"cancelled\"]""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - kind: str, - action_id: str, - status: Literal["completed", "failed", "in_progress", "cancelled"], - created_by: Optional["_models.CreatedBy"] = None, - parent_action_id: Optional[str] = None, - previous_action_id: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.WORKFLOW_ACTION # type: ignore - - -class InvokeAzureAgentWorkflowActionOutputItemResource( - WorkflowActionOutputItemResource, discriminator="InvokeAzureAgent" -): # pylint: disable=name-too-long - """Details about an agent invocation as part of a workflow action. - - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.WORKFLOW_ACTION - :ivar action_id: Unique identifier for the action. Required. - :vartype action_id: str - :ivar parent_action_id: ID of the parent action if this is a nested action. - :vartype parent_action_id: str - :ivar previous_action_id: ID of the previous action if this action follows another. - :vartype previous_action_id: str - :ivar status: Status of the action (e.g., 'in_progress', 'completed', 'failed', 'cancelled'). - Required. Is one of the following types: Literal["completed"], Literal["failed"], - Literal["in_progress"], Literal["cancelled"] - :vartype status: str or str or str or str - :ivar kind: Required. Default value is "InvokeAzureAgent". - :vartype kind: str - :ivar agent: Agent id. Required. - :vartype agent: ~azure.ai.projects.models.AgentId - :ivar conversation_id: ID of the conversation for the agent invocation. - :vartype conversation_id: str - :ivar response_id: The response id for the agent invocation. Required. - :vartype response_id: str - """ - - __mapping__: dict[str, _Model] = {} - kind: Literal["InvokeAzureAgent"] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required. Default value is \"InvokeAzureAgent\".""" - agent: "_models.AgentId" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Agent id. Required.""" - conversation_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """ID of the conversation for the agent invocation.""" - response_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The response id for the agent invocation. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - action_id: str, - status: Literal["completed", "failed", "in_progress", "cancelled"], - agent: "_models.AgentId", - response_id: str, - created_by: Optional["_models.CreatedBy"] = None, - parent_action_id: Optional[str] = None, - previous_action_id: Optional[str] = None, - conversation_id: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.kind = "InvokeAzureAgent" # type: ignore - - -class ItemContent(_Model): - """ItemContent. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ItemContentInputAudio, ItemContentInputFile, ItemContentInputImage, ItemContentInputText, - ItemContentOutputAudio, ItemContentOutputText, ItemContentRefusal - - :ivar type: Required. Known values are: "input_text", "input_audio", "input_image", - "input_file", "output_text", "output_audio", and "refusal". - :vartype type: str or ~azure.ai.projects.models.ItemContentType - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. Known values are: \"input_text\", \"input_audio\", \"input_image\", \"input_file\", - \"output_text\", \"output_audio\", and \"refusal\".""" - - @overload - def __init__( - self, - *, - type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ItemContentInputAudio(ItemContent, discriminator="input_audio"): - """An audio input to the model. - - :ivar type: The type of the input item. Always ``input_audio``. Required. - :vartype type: str or ~azure.ai.projects.models.INPUT_AUDIO - :ivar data: Base64-encoded audio data. Required. - :vartype data: str - :ivar format: The format of the audio data. Currently supported formats are ``mp3`` and - ``wav``. Required. Is either a Literal["mp3"] type or a Literal["wav"] type. - :vartype format: str or str - """ - - type: Literal[ItemContentType.INPUT_AUDIO] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the input item. Always ``input_audio``. Required.""" - data: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Base64-encoded audio data. Required.""" - format: Literal["mp3", "wav"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The format of the audio data. Currently supported formats are ``mp3`` and - ``wav``. Required. Is either a Literal[\"mp3\"] type or a Literal[\"wav\"] type.""" - - @overload - def __init__( - self, - *, - data: str, - format: Literal["mp3", "wav"], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemContentType.INPUT_AUDIO # type: ignore - - -class ItemContentInputFile(ItemContent, discriminator="input_file"): - """A file input to the model. - - :ivar type: The type of the input item. Always ``input_file``. Required. - :vartype type: str or ~azure.ai.projects.models.INPUT_FILE - :ivar file_id: The ID of the file to be sent to the model. - :vartype file_id: str - :ivar filename: The name of the file to be sent to the model. - :vartype filename: str - :ivar file_data: The content of the file to be sent to the model. - :vartype file_data: str - """ - - type: Literal[ItemContentType.INPUT_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the input item. Always ``input_file``. Required.""" - file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the file to be sent to the model.""" - filename: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the file to be sent to the model.""" - file_data: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The content of the file to be sent to the model.""" - - @overload - def __init__( - self, - *, - file_id: Optional[str] = None, - filename: Optional[str] = None, - file_data: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemContentType.INPUT_FILE # type: ignore - - -class ItemContentInputImage(ItemContent, discriminator="input_image"): - """An image input to the model. Learn about `image inputs `_. - - :ivar type: The type of the input item. Always ``input_image``. Required. - :vartype type: str or ~azure.ai.projects.models.INPUT_IMAGE - :ivar image_url: The URL of the image to be sent to the model. A fully qualified URL or base64 - encoded image in a data URL. - :vartype image_url: str - :ivar file_id: The ID of the file to be sent to the model. - :vartype file_id: str - :ivar detail: The detail level of the image to be sent to the model. One of ``high``, ``low``, - or ``auto``. Defaults to ``auto``. Is one of the following types: Literal["low"], - Literal["high"], Literal["auto"] - :vartype detail: str or str or str - """ - - type: Literal[ItemContentType.INPUT_IMAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the input item. Always ``input_image``. Required.""" - image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The URL of the image to be sent to the model. A fully qualified URL or base64 encoded image in - a data URL.""" - file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the file to be sent to the model.""" - detail: Optional[Literal["low", "high", "auto"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The detail level of the image to be sent to the model. One of ``high``, ``low``, or ``auto``. - Defaults to ``auto``. Is one of the following types: Literal[\"low\"], Literal[\"high\"], - Literal[\"auto\"]""" - - @overload - def __init__( - self, - *, - image_url: Optional[str] = None, - file_id: Optional[str] = None, - detail: Optional[Literal["low", "high", "auto"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemContentType.INPUT_IMAGE # type: ignore - - -class ItemContentInputText(ItemContent, discriminator="input_text"): - """A text input to the model. - - :ivar type: The type of the input item. Always ``input_text``. Required. - :vartype type: str or ~azure.ai.projects.models.INPUT_TEXT - :ivar text: The text input to the model. Required. - :vartype text: str - """ - - type: Literal[ItemContentType.INPUT_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the input item. Always ``input_text``. Required.""" - text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The text input to the model. Required.""" - - @overload - def __init__( - self, - *, - text: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemContentType.INPUT_TEXT # type: ignore - - -class ItemContentOutputAudio(ItemContent, discriminator="output_audio"): - """An audio output from the model. - - :ivar type: The type of the output audio. Always ``output_audio``. Required. - :vartype type: str or ~azure.ai.projects.models.OUTPUT_AUDIO - :ivar data: Base64-encoded audio data from the model. Required. - :vartype data: str - :ivar transcript: The transcript of the audio data from the model. Required. - :vartype transcript: str - """ - - type: Literal[ItemContentType.OUTPUT_AUDIO] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the output audio. Always ``output_audio``. Required.""" - data: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Base64-encoded audio data from the model. Required.""" - transcript: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The transcript of the audio data from the model. Required.""" - - @overload - def __init__( - self, - *, - data: str, - transcript: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemContentType.OUTPUT_AUDIO # type: ignore - - -class ItemContentOutputText(ItemContent, discriminator="output_text"): - """A text output from the model. - - :ivar type: The type of the output text. Always ``output_text``. Required. - :vartype type: str or ~azure.ai.projects.models.OUTPUT_TEXT - :ivar text: The text output from the model. Required. - :vartype text: str - :ivar annotations: The annotations of the text output. Required. - :vartype annotations: list[~azure.ai.projects.models.Annotation] - :ivar logprobs: - :vartype logprobs: list[~azure.ai.projects.models.LogProb] - """ - - type: Literal[ItemContentType.OUTPUT_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the output text. Always ``output_text``. Required.""" - text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The text output from the model. Required.""" - annotations: list["_models.Annotation"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The annotations of the text output. Required.""" - logprobs: Optional[list["_models.LogProb"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - - @overload - def __init__( - self, - *, - text: str, - annotations: list["_models.Annotation"], - logprobs: Optional[list["_models.LogProb"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemContentType.OUTPUT_TEXT # type: ignore - - -class ItemContentRefusal(ItemContent, discriminator="refusal"): - """A refusal from the model. - - :ivar type: The type of the refusal. Always ``refusal``. Required. - :vartype type: str or ~azure.ai.projects.models.REFUSAL - :ivar refusal: The refusal explanationfrom the model. Required. - :vartype refusal: str - """ - - type: Literal[ItemContentType.REFUSAL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the refusal. Always ``refusal``. Required.""" - refusal: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The refusal explanationfrom the model. Required.""" - - @overload - def __init__( - self, - *, - refusal: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemContentType.REFUSAL # type: ignore - - -class ItemReferenceItemParam(ItemParam, discriminator="item_reference"): - """An internal identifier for an item to reference. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.ITEM_REFERENCE - :ivar id: The service-originated ID of the previously generated response item being referenced. - Required. - :vartype id: str - """ - - type: Literal[ItemType.ITEM_REFERENCE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The service-originated ID of the previously generated response item being referenced. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.ITEM_REFERENCE # type: ignore - - -class LocalShellExecAction(_Model): - """Execute a shell command on the server. - - :ivar type: The type of the local shell action. Always ``exec``. Required. Default value is - "exec". - :vartype type: str - :ivar command: The command to run. Required. - :vartype command: list[str] - :ivar timeout_ms: Optional timeout in milliseconds for the command. - :vartype timeout_ms: int - :ivar working_directory: Optional working directory to run the command in. - :vartype working_directory: str - :ivar env: Environment variables to set for the command. Required. - :vartype env: dict[str, str] - :ivar user: Optional user to run the command as. - :vartype user: str - """ - - type: Literal["exec"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The type of the local shell action. Always ``exec``. Required. Default value is \"exec\".""" - command: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The command to run. Required.""" - timeout_ms: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Optional timeout in milliseconds for the command.""" - working_directory: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Optional working directory to run the command in.""" - env: dict[str, str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Environment variables to set for the command. Required.""" - user: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Optional user to run the command as.""" - - @overload - def __init__( - self, - *, - command: list[str], - env: dict[str, str], - timeout_ms: Optional[int] = None, - working_directory: Optional[str] = None, - user: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type: Literal["exec"] = "exec" - - -class LocalShellTool(Tool, discriminator="local_shell"): - """A tool that allows the model to execute shell commands in a local environment. - - :ivar type: The type of the local shell tool. Always ``local_shell``. Required. - :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL - """ - - type: Literal[ToolType.LOCAL_SHELL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the local shell tool. Always ``local_shell``. Required.""" - - @overload - def __init__( - self, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolType.LOCAL_SHELL # type: ignore - - -class LocalShellToolCallItemParam(ItemParam, discriminator="local_shell_call"): - """A tool call to run a command on the local shell. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL_CALL - :ivar call_id: The unique ID of the local shell tool call generated by the model. Required. - :vartype call_id: str - :ivar action: Required. - :vartype action: ~azure.ai.projects.models.LocalShellExecAction - """ - - type: Literal[ItemType.LOCAL_SHELL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the local shell tool call generated by the model. Required.""" - action: "_models.LocalShellExecAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - - @overload - def __init__( - self, - *, - call_id: str, - action: "_models.LocalShellExecAction", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.LOCAL_SHELL_CALL # type: ignore - - -class LocalShellToolCallItemResource(ItemResource, discriminator="local_shell_call"): - """A tool call to run a command on the local shell. - - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL_CALL - :ivar status: Required. Is one of the following types: Literal["in_progress"], - Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - :ivar call_id: The unique ID of the local shell tool call generated by the model. Required. - :vartype call_id: str - :ivar action: Required. - :vartype action: ~azure.ai.projects.models.LocalShellExecAction - """ - - type: Literal[ItemType.LOCAL_SHELL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - status: Literal["in_progress", "completed", "incomplete"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Required. Is one of the following types: Literal[\"in_progress\"], Literal[\"completed\"], - Literal[\"incomplete\"]""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the local shell tool call generated by the model. Required.""" - action: "_models.LocalShellExecAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "completed", "incomplete"], - call_id: str, - action: "_models.LocalShellExecAction", - created_by: Optional["_models.CreatedBy"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.LOCAL_SHELL_CALL # type: ignore - - -class LocalShellToolCallOutputItemParam(ItemParam, discriminator="local_shell_call_output"): - """The output of a local shell tool call. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL_CALL_OUTPUT - :ivar output: A JSON string of the output of the local shell tool call. Required. - :vartype output: str - """ - - type: Literal[ItemType.LOCAL_SHELL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - output: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A JSON string of the output of the local shell tool call. Required.""" - - @overload - def __init__( - self, - *, - output: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.LOCAL_SHELL_CALL_OUTPUT # type: ignore - - -class LocalShellToolCallOutputItemResource(ItemResource, discriminator="local_shell_call_output"): - """The output of a local shell tool call. - - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL_CALL_OUTPUT - :ivar status: Required. Is one of the following types: Literal["in_progress"], - Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - :ivar output: A JSON string of the output of the local shell tool call. Required. - :vartype output: str - """ - - type: Literal[ItemType.LOCAL_SHELL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - status: Literal["in_progress", "completed", "incomplete"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Required. Is one of the following types: Literal[\"in_progress\"], Literal[\"completed\"], - Literal[\"incomplete\"]""" - output: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A JSON string of the output of the local shell tool call. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "completed", "incomplete"], - output: str, - created_by: Optional["_models.CreatedBy"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.LOCAL_SHELL_CALL_OUTPUT # type: ignore - - -class LogProb(_Model): - """The log probability of a token. - - :ivar token: Required. - :vartype token: str - :ivar logprob: Required. - :vartype logprob: float - :ivar bytes: Required. - :vartype bytes: list[int] - :ivar top_logprobs: Required. - :vartype top_logprobs: list[~azure.ai.projects.models.TopLogProb] - """ - - token: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - logprob: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - bytes: list[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - top_logprobs: list["_models.TopLogProb"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - - @overload - def __init__( - self, - *, - token: str, - logprob: float, - bytes: list[int], - top_logprobs: list["_models.TopLogProb"], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ManagedAzureAISearchIndex(Index, discriminator="ManagedAzureSearch"): - """Managed Azure AI Search Index Definition. - - :ivar id: Asset ID, a unique identifier for the asset. - :vartype id: str - :ivar name: The name of the resource. Required. - :vartype name: str - :ivar version: The version of the resource. Required. - :vartype version: str - :ivar description: The asset description text. - :vartype description: str - :ivar tags: Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar type: Type of index. Required. Managed Azure Search - :vartype type: str or ~azure.ai.projects.models.MANAGED_AZURE_SEARCH - :ivar vector_store_id: Vector store id of managed index. Required. - :vartype vector_store_id: str - """ - - type: Literal[IndexType.MANAGED_AZURE_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Type of index. Required. Managed Azure Search""" - vector_store_id: str = rest_field(name="vectorStoreId", visibility=["create"]) - """Vector store id of managed index. Required.""" - - @overload - def __init__( - self, - *, - vector_store_id: str, - description: Optional[str] = None, - tags: Optional[dict[str, str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = IndexType.MANAGED_AZURE_SEARCH # type: ignore - - -class MCPApprovalRequestItemParam(ItemParam, discriminator="mcp_approval_request"): - """A request for human approval of a tool invocation. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.MCP_APPROVAL_REQUEST - :ivar server_label: The label of the MCP server making the request. Required. - :vartype server_label: str - :ivar name: The name of the tool to run. Required. - :vartype name: str - :ivar arguments: A JSON string of arguments for the tool. Required. - :vartype arguments: str - """ - - type: Literal[ItemType.MCP_APPROVAL_REQUEST] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The label of the MCP server making the request. Required.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the tool to run. Required.""" - arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A JSON string of arguments for the tool. Required.""" - - @overload - def __init__( - self, - *, - server_label: str, - name: str, - arguments: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.MCP_APPROVAL_REQUEST # type: ignore - - -class MCPApprovalRequestItemResource(ItemResource, discriminator="mcp_approval_request"): - """A request for human approval of a tool invocation. - - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.MCP_APPROVAL_REQUEST - :ivar server_label: The label of the MCP server making the request. Required. - :vartype server_label: str - :ivar name: The name of the tool to run. Required. - :vartype name: str - :ivar arguments: A JSON string of arguments for the tool. Required. - :vartype arguments: str - """ - - type: Literal[ItemType.MCP_APPROVAL_REQUEST] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The label of the MCP server making the request. Required.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the tool to run. Required.""" - arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A JSON string of arguments for the tool. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - server_label: str, - name: str, - arguments: str, - created_by: Optional["_models.CreatedBy"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.MCP_APPROVAL_REQUEST # type: ignore - - -class MCPApprovalResponseItemParam(ItemParam, discriminator="mcp_approval_response"): - """A response to an MCP approval request. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.MCP_APPROVAL_RESPONSE - :ivar approval_request_id: The ID of the approval request being answered. Required. - :vartype approval_request_id: str - :ivar approve: Whether the request was approved. Required. - :vartype approve: bool - :ivar reason: Optional reason for the decision. - :vartype reason: str - """ - - type: Literal[ItemType.MCP_APPROVAL_RESPONSE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - approval_request_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the approval request being answered. Required.""" - approve: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Whether the request was approved. Required.""" - reason: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Optional reason for the decision.""" - - @overload - def __init__( - self, - *, - approval_request_id: str, - approve: bool, - reason: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.MCP_APPROVAL_RESPONSE # type: ignore - - -class MCPApprovalResponseItemResource(ItemResource, discriminator="mcp_approval_response"): - """A response to an MCP approval request. - - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.MCP_APPROVAL_RESPONSE - :ivar approval_request_id: The ID of the approval request being answered. Required. - :vartype approval_request_id: str - :ivar approve: Whether the request was approved. Required. - :vartype approve: bool - :ivar reason: Optional reason for the decision. - :vartype reason: str - """ - - type: Literal[ItemType.MCP_APPROVAL_RESPONSE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - approval_request_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the approval request being answered. Required.""" - approve: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Whether the request was approved. Required.""" - reason: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Optional reason for the decision.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - approval_request_id: str, - approve: bool, - created_by: Optional["_models.CreatedBy"] = None, - reason: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.MCP_APPROVAL_RESPONSE # type: ignore - - -class MCPCallItemParam(ItemParam, discriminator="mcp_call"): - """An invocation of a tool on an MCP server. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.MCP_CALL - :ivar server_label: The label of the MCP server running the tool. Required. - :vartype server_label: str - :ivar name: The name of the tool that was run. Required. - :vartype name: str - :ivar arguments: A JSON string of the arguments passed to the tool. Required. - :vartype arguments: str - :ivar output: The output from the tool call. - :vartype output: str - :ivar error: The error from the tool call, if any. - :vartype error: str - """ - - type: Literal[ItemType.MCP_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The label of the MCP server running the tool. Required.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the tool that was run. Required.""" - arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A JSON string of the arguments passed to the tool. Required.""" - output: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The output from the tool call.""" - error: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The error from the tool call, if any.""" - - @overload - def __init__( - self, - *, - server_label: str, - name: str, - arguments: str, - output: Optional[str] = None, - error: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.MCP_CALL # type: ignore - - -class MCPCallItemResource(ItemResource, discriminator="mcp_call"): - """An invocation of a tool on an MCP server. - - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.MCP_CALL - :ivar server_label: The label of the MCP server running the tool. Required. - :vartype server_label: str - :ivar name: The name of the tool that was run. Required. - :vartype name: str - :ivar arguments: A JSON string of the arguments passed to the tool. Required. - :vartype arguments: str - :ivar output: The output from the tool call. - :vartype output: str - :ivar error: The error from the tool call, if any. - :vartype error: str - """ - - type: Literal[ItemType.MCP_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The label of the MCP server running the tool. Required.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the tool that was run. Required.""" - arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A JSON string of the arguments passed to the tool. Required.""" - output: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The output from the tool call.""" - error: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The error from the tool call, if any.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - server_label: str, - name: str, - arguments: str, - created_by: Optional["_models.CreatedBy"] = None, - output: Optional[str] = None, - error: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.MCP_CALL # type: ignore - - -class MCPListToolsItemParam(ItemParam, discriminator="mcp_list_tools"): - """A list of tools available on an MCP server. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.MCP_LIST_TOOLS - :ivar server_label: The label of the MCP server. Required. - :vartype server_label: str - :ivar tools: The tools available on the server. Required. - :vartype tools: list[~azure.ai.projects.models.MCPListToolsTool] - :ivar error: Error message if the server could not list tools. - :vartype error: str - """ - - type: Literal[ItemType.MCP_LIST_TOOLS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The label of the MCP server. Required.""" - tools: list["_models.MCPListToolsTool"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The tools available on the server. Required.""" - error: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Error message if the server could not list tools.""" - - @overload - def __init__( - self, - *, - server_label: str, - tools: list["_models.MCPListToolsTool"], - error: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.MCP_LIST_TOOLS # type: ignore - - -class MCPListToolsItemResource(ItemResource, discriminator="mcp_list_tools"): - """A list of tools available on an MCP server. - - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.MCP_LIST_TOOLS - :ivar server_label: The label of the MCP server. Required. - :vartype server_label: str - :ivar tools: The tools available on the server. Required. - :vartype tools: list[~azure.ai.projects.models.MCPListToolsTool] - :ivar error: Error message if the server could not list tools. - :vartype error: str - """ - - type: Literal[ItemType.MCP_LIST_TOOLS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The label of the MCP server. Required.""" - tools: list["_models.MCPListToolsTool"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The tools available on the server. Required.""" - error: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Error message if the server could not list tools.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - server_label: str, - tools: list["_models.MCPListToolsTool"], - created_by: Optional["_models.CreatedBy"] = None, - error: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.MCP_LIST_TOOLS # type: ignore - - -class MCPListToolsTool(_Model): - """A tool available on an MCP server. - - :ivar name: The name of the tool. Required. - :vartype name: str - :ivar description: The description of the tool. - :vartype description: str - :ivar input_schema: The JSON schema describing the tool's input. Required. - :vartype input_schema: any - :ivar annotations: Additional annotations about the tool. - :vartype annotations: any - """ - - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the tool. Required.""" - description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The description of the tool.""" - input_schema: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The JSON schema describing the tool's input. Required.""" - annotations: Optional[Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Additional annotations about the tool.""" - - @overload - def __init__( - self, - *, - name: str, - input_schema: Any, - description: Optional[str] = None, - annotations: Optional[Any] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class MCPTool(Tool, discriminator="mcp"): - """Give the model access to additional tools via remote Model Context Protocol - (MCP) servers. `Learn more about MCP `_. - - :ivar type: The type of the MCP tool. Always ``mcp``. Required. - :vartype type: str or ~azure.ai.projects.models.MCP - :ivar server_label: A label for this MCP server, used to identify it in tool calls. Required. - :vartype server_label: str - :ivar server_url: The URL for the MCP server. Required. - :vartype server_url: str - :ivar headers: Optional HTTP headers to send to the MCP server. Use for authentication - or other purposes. - :vartype headers: dict[str, str] - :ivar allowed_tools: List of allowed tool names or a filter object. Is either a [str] type or a - MCPToolAllowedTools1 type. - :vartype allowed_tools: list[str] or ~azure.ai.projects.models.MCPToolAllowedTools1 - :ivar require_approval: Specify which of the MCP server's tools require approval. Is one of the - following types: MCPToolRequireApproval1, Literal["always"], Literal["never"] - :vartype require_approval: ~azure.ai.projects.models.MCPToolRequireApproval1 or str or str - :ivar project_connection_id: The connection ID in the project for the MCP server. The - connection stores authentication and other connection details needed to connect to the MCP - server. - :vartype project_connection_id: str - """ - - type: Literal[ToolType.MCP] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the MCP tool. Always ``mcp``. Required.""" - server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A label for this MCP server, used to identify it in tool calls. Required.""" - server_url: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The URL for the MCP server. Required.""" - headers: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Optional HTTP headers to send to the MCP server. Use for authentication - or other purposes.""" - allowed_tools: Optional[Union[list[str], "_models.MCPToolAllowedTools1"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """List of allowed tool names or a filter object. Is either a [str] type or a MCPToolAllowedTools1 - type.""" - require_approval: Optional[Union["_models.MCPToolRequireApproval1", Literal["always"], Literal["never"]]] = ( - rest_field(visibility=["read", "create", "update", "delete", "query"]) - ) - """Specify which of the MCP server's tools require approval. Is one of the following types: - MCPToolRequireApproval1, Literal[\"always\"], Literal[\"never\"]""" - project_connection_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The connection ID in the project for the MCP server. The connection stores authentication and - other connection details needed to connect to the MCP server.""" - - @overload - def __init__( - self, - *, - server_label: str, - server_url: str, - headers: Optional[dict[str, str]] = None, - allowed_tools: Optional[Union[list[str], "_models.MCPToolAllowedTools1"]] = None, - require_approval: Optional[ - Union["_models.MCPToolRequireApproval1", Literal["always"], Literal["never"]] - ] = None, - project_connection_id: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolType.MCP # type: ignore - - -class MCPToolAllowedTools1(_Model): - """MCPToolAllowedTools1. - - :ivar tool_names: List of allowed tool names. - :vartype tool_names: list[str] - """ - - tool_names: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """List of allowed tool names.""" - - @overload - def __init__( - self, - *, - tool_names: Optional[list[str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class MCPToolRequireApproval1(_Model): - """MCPToolRequireApproval1. - - :ivar always: A list of tools that always require approval. - :vartype always: ~azure.ai.projects.models.MCPToolRequireApprovalAlways - :ivar never: A list of tools that never require approval. - :vartype never: ~azure.ai.projects.models.MCPToolRequireApprovalNever - """ - - always: Optional["_models.MCPToolRequireApprovalAlways"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """A list of tools that always require approval.""" - never: Optional["_models.MCPToolRequireApprovalNever"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """A list of tools that never require approval.""" - - @overload - def __init__( - self, - *, - always: Optional["_models.MCPToolRequireApprovalAlways"] = None, - never: Optional["_models.MCPToolRequireApprovalNever"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class MCPToolRequireApprovalAlways(_Model): - """MCPToolRequireApprovalAlways. - - :ivar tool_names: List of tools that require approval. - :vartype tool_names: list[str] - """ - - tool_names: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """List of tools that require approval.""" - - @overload - def __init__( - self, - *, - tool_names: Optional[list[str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class MCPToolRequireApprovalNever(_Model): - """MCPToolRequireApprovalNever. - - :ivar tool_names: List of tools that do not require approval. - :vartype tool_names: list[str] - """ - - tool_names: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """List of tools that do not require approval.""" - - @overload - def __init__( - self, - *, - tool_names: Optional[list[str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class MemoryOperation(_Model): - """Represents a single memory operation (create, update, or delete) performed on a memory item. - - :ivar kind: The type of memory operation being performed. Required. Known values are: "create", - "update", and "delete". - :vartype kind: str or ~azure.ai.projects.models.MemoryOperationKind - :ivar memory_item: The memory item to create, update, or delete. Required. - :vartype memory_item: ~azure.ai.projects.models.MemoryItem - """ - - kind: Union[str, "_models.MemoryOperationKind"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The type of memory operation being performed. Required. Known values are: \"create\", - \"update\", and \"delete\".""" - memory_item: "_models.MemoryItem" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The memory item to create, update, or delete. Required.""" - - @overload - def __init__( - self, - *, - kind: Union[str, "_models.MemoryOperationKind"], - memory_item: "_models.MemoryItem", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class MemorySearchItem(_Model): - """A retrieved memory item from memory search. - - :ivar memory_item: Retrieved memory item. Required. - :vartype memory_item: ~azure.ai.projects.models.MemoryItem - """ - - memory_item: "_models.MemoryItem" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Retrieved memory item. Required.""" - - @overload - def __init__( - self, - *, - memory_item: "_models.MemoryItem", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class MemorySearchOptions(_Model): - """Memory search options. - - :ivar max_memories: Maximum number of memory items to return. - :vartype max_memories: int - """ - - max_memories: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Maximum number of memory items to return.""" - - @overload - def __init__( - self, - *, - max_memories: Optional[int] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class MemorySearchTool(Tool, discriminator="memory_search"): - """A tool for integrating memories into the agent. - - :ivar type: The type of the tool. Always ``memory_search``. Required. - :vartype type: str or ~azure.ai.projects.models.MEMORY_SEARCH - :ivar memory_store_name: The name of the memory store to use. Required. - :vartype memory_store_name: str - :ivar scope: The namespace used to group and isolate memories, such as a user ID. - Limits which memories can be retrieved or updated. - Use special variable ``{{$userId}}`` to scope memories to the current signed-in user. Required. - :vartype scope: str - :ivar search_options: Options for searching the memory store. - :vartype search_options: ~azure.ai.projects.models.MemorySearchOptions - :ivar update_delay: The amount of time to wait after inactivity before updating memories with - messages from the call (e.g., '0s', '5m'). Defaults to '60s'. - :vartype update_delay: ~datetime.timedelta - """ - - type: Literal[ToolType.MEMORY_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the tool. Always ``memory_search``. Required.""" - memory_store_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the memory store to use. Required.""" - scope: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The namespace used to group and isolate memories, such as a user ID. - Limits which memories can be retrieved or updated. - Use special variable ``{{$userId}}`` to scope memories to the current signed-in user. Required.""" - search_options: Optional["_models.MemorySearchOptions"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Options for searching the memory store.""" - update_delay: Optional[datetime.timedelta] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The amount of time to wait after inactivity before updating memories with messages from the - call (e.g., '0s', '5m'). Defaults to '60s'.""" - - @overload - def __init__( - self, - *, - memory_store_name: str, - scope: str, - search_options: Optional["_models.MemorySearchOptions"] = None, - update_delay: Optional[datetime.timedelta] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolType.MEMORY_SEARCH # type: ignore - - -class MemorySearchToolCallItemParam(ItemParam, discriminator="memory_search_call"): - """MemorySearchToolCallItemParam. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.MEMORY_SEARCH_CALL - :ivar results: The results returned from the memory search. - :vartype results: list[~azure.ai.projects.models.MemorySearchItem] - """ - - type: Literal[ItemType.MEMORY_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - results: Optional[list["_models.MemorySearchItem"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The results returned from the memory search.""" - - @overload - def __init__( - self, - *, - results: Optional[list["_models.MemorySearchItem"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.MEMORY_SEARCH_CALL # type: ignore - - -class MemorySearchToolCallItemResource(ItemResource, discriminator="memory_search_call"): - """MemorySearchToolCallItemResource. - - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.MEMORY_SEARCH_CALL - :ivar status: The status of the memory search tool call. One of ``in_progress``, - ``searching``, ``completed``, ``incomplete`` or ``failed``,. Required. Is one of the following - types: Literal["in_progress"], Literal["searching"], Literal["completed"], - Literal["incomplete"], Literal["failed"] - :vartype status: str or str or str or str or str - :ivar results: The results returned from the memory search. - :vartype results: list[~azure.ai.projects.models.MemorySearchItem] - """ - - type: Literal[ItemType.MEMORY_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - status: Literal["in_progress", "searching", "completed", "incomplete", "failed"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the memory search tool call. One of ``in_progress``, - ``searching``, ``completed``, ``incomplete`` or ``failed``,. Required. Is one of the following - types: Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"], - Literal[\"incomplete\"], Literal[\"failed\"]""" - results: Optional[list["_models.MemorySearchItem"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The results returned from the memory search.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "searching", "completed", "incomplete", "failed"], - created_by: Optional["_models.CreatedBy"] = None, - results: Optional[list["_models.MemorySearchItem"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.MEMORY_SEARCH_CALL # type: ignore - - -class MemoryStoreDefinition(_Model): - """Base definition for memory store configurations. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - MemoryStoreDefaultDefinition - - :ivar kind: The kind of the memory store. Required. "default" - :vartype kind: str or ~azure.ai.projects.models.MemoryStoreKind - """ - - __mapping__: dict[str, _Model] = {} - kind: str = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) - """The kind of the memory store. Required. \"default\"""" - - @overload - def __init__( - self, - *, - kind: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class MemoryStoreDefaultDefinition(MemoryStoreDefinition, discriminator="default"): - """Default memory store implementation. - - :ivar kind: The kind of the memory store. Required. The default memory store implementation. - :vartype kind: str or ~azure.ai.projects.models.DEFAULT - :ivar chat_model: The name or identifier of the chat completion model deployment used for - memory processing. Required. - :vartype chat_model: str - :ivar embedding_model: The name or identifier of the embedding model deployment used for memory - processing. Required. - :vartype embedding_model: str - :ivar options: Default memory store options. - :vartype options: ~azure.ai.projects.models.MemoryStoreDefaultOptions - """ - - kind: Literal[MemoryStoreKind.DEFAULT] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The kind of the memory store. Required. The default memory store implementation.""" - chat_model: str = rest_field(visibility=["read", "create"]) - """The name or identifier of the chat completion model deployment used for memory processing. - Required.""" - embedding_model: str = rest_field(visibility=["read", "create"]) - """The name or identifier of the embedding model deployment used for memory processing. Required.""" - options: Optional["_models.MemoryStoreDefaultOptions"] = rest_field(visibility=["read", "create"]) - """Default memory store options.""" - - @overload - def __init__( - self, - *, - chat_model: str, - embedding_model: str, - options: Optional["_models.MemoryStoreDefaultOptions"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.kind = MemoryStoreKind.DEFAULT # type: ignore - - -class MemoryStoreDefaultOptions(_Model): - """Default memory store configurations. - - :ivar user_profile_enabled: Whether to enable user profile extraction and storage. Default is - true. Required. - :vartype user_profile_enabled: bool - :ivar user_profile_details: Specific categories or types of user profile information to extract - and store. - :vartype user_profile_details: str - :ivar chat_summary_enabled: Whether to enable chat summary extraction and storage. Default is - true. Required. - :vartype chat_summary_enabled: bool - """ - - user_profile_enabled: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Whether to enable user profile extraction and storage. Default is true. Required.""" - user_profile_details: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Specific categories or types of user profile information to extract and store.""" - chat_summary_enabled: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Whether to enable chat summary extraction and storage. Default is true. Required.""" - - @overload - def __init__( - self, - *, - user_profile_enabled: bool, - chat_summary_enabled: bool, - user_profile_details: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class MemoryStoreDeleteScopeResponse(_Model): - """Response for deleting memories from a scope. - - :ivar object: The object type. Always 'memory_store.scope.deleted'. Required. Default value is - "memory_store.scope.deleted". - :vartype object: str - :ivar name: The name of the memory store. Required. - :vartype name: str - :ivar scope: The scope from which memories were deleted. Required. - :vartype scope: str - :ivar deleted: Whether the deletion operation was successful. Required. - :vartype deleted: bool - """ - - object: Literal["memory_store.scope.deleted"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The object type. Always 'memory_store.scope.deleted'. Required. Default value is - \"memory_store.scope.deleted\".""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the memory store. Required.""" - scope: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The scope from which memories were deleted. Required.""" - deleted: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Whether the deletion operation was successful. Required.""" - - @overload - def __init__( - self, - *, - name: str, - scope: str, - deleted: bool, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.object: Literal["memory_store.scope.deleted"] = "memory_store.scope.deleted" - - -class MemoryStoreObject(_Model): - """A memory store that can store and retrieve user memories. - - :ivar object: The object type, which is always 'memory_store'. Required. Default value is - "memory_store". - :vartype object: str - :ivar id: The unique identifier of the memory store. Required. - :vartype id: str - :ivar created_at: The Unix timestamp (seconds) when the memory store was created. Required. - :vartype created_at: ~datetime.datetime - :ivar updated_at: The Unix timestamp (seconds) when the memory store was last updated. - Required. - :vartype updated_at: ~datetime.datetime - :ivar name: The name of the memory store. Required. - :vartype name: str - :ivar description: A human-readable description of the memory store. - :vartype description: str - :ivar metadata: Arbitrary key-value metadata to associate with the memory store. - :vartype metadata: dict[str, str] - :ivar definition: The definition of the memory store. Required. - :vartype definition: ~azure.ai.projects.models.MemoryStoreDefinition - """ - - object: Literal["memory_store"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The object type, which is always 'memory_store'. Required. Default value is \"memory_store\".""" - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the memory store. Required.""" - created_at: datetime.datetime = rest_field( - visibility=["read", "create", "update", "delete", "query"], format="unix-timestamp" - ) - """The Unix timestamp (seconds) when the memory store was created. Required.""" - updated_at: datetime.datetime = rest_field( - visibility=["read", "create", "update", "delete", "query"], format="unix-timestamp" - ) - """The Unix timestamp (seconds) when the memory store was last updated. Required.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the memory store. Required.""" - description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A human-readable description of the memory store.""" - metadata: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Arbitrary key-value metadata to associate with the memory store.""" - definition: "_models.MemoryStoreDefinition" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The definition of the memory store. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - created_at: datetime.datetime, - updated_at: datetime.datetime, - name: str, - definition: "_models.MemoryStoreDefinition", - description: Optional[str] = None, - metadata: Optional[dict[str, str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.object: Literal["memory_store"] = "memory_store" - - -class MemoryStoreOperationUsage(_Model): - """Usage statistics of a memory store operation. - - :ivar embedding_tokens: The number of embedding tokens. Required. - :vartype embedding_tokens: int - :ivar input_tokens: The number of input tokens. Required. - :vartype input_tokens: int - :ivar input_tokens_details: A detailed breakdown of the input tokens. Required. - :vartype input_tokens_details: - ~azure.ai.projects.models.MemoryStoreOperationUsageInputTokensDetails - :ivar output_tokens: The number of output tokens. Required. - :vartype output_tokens: int - :ivar output_tokens_details: A detailed breakdown of the output tokens. Required. - :vartype output_tokens_details: - ~azure.ai.projects.models.MemoryStoreOperationUsageOutputTokensDetails - :ivar total_tokens: The total number of tokens used. Required. - :vartype total_tokens: int - """ - - embedding_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The number of embedding tokens. Required.""" - input_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The number of input tokens. Required.""" - input_tokens_details: "_models.MemoryStoreOperationUsageInputTokensDetails" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """A detailed breakdown of the input tokens. Required.""" - output_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The number of output tokens. Required.""" - output_tokens_details: "_models.MemoryStoreOperationUsageOutputTokensDetails" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """A detailed breakdown of the output tokens. Required.""" - total_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The total number of tokens used. Required.""" - - @overload - def __init__( - self, - *, - embedding_tokens: int, - input_tokens: int, - input_tokens_details: "_models.MemoryStoreOperationUsageInputTokensDetails", - output_tokens: int, - output_tokens_details: "_models.MemoryStoreOperationUsageOutputTokensDetails", - total_tokens: int, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class MemoryStoreOperationUsageInputTokensDetails(_Model): # pylint: disable=name-too-long - """MemoryStoreOperationUsageInputTokensDetails. - - :ivar cached_tokens: The number of tokens that were retrieved from the cache. - `More on prompt caching `_. Required. - :vartype cached_tokens: int - """ - - cached_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The number of tokens that were retrieved from the cache. - `More on prompt caching `_. Required.""" - - @overload - def __init__( - self, - *, - cached_tokens: int, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class MemoryStoreOperationUsageOutputTokensDetails(_Model): # pylint: disable=name-too-long - """MemoryStoreOperationUsageOutputTokensDetails. - - :ivar reasoning_tokens: The number of reasoning tokens. Required. - :vartype reasoning_tokens: int - """ - - reasoning_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The number of reasoning tokens. Required.""" - - @overload - def __init__( - self, - *, - reasoning_tokens: int, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class MemoryStoreSearchResponse(_Model): - """Memory search response. - - :ivar search_id: The unique ID of this search request. Use this value as previous_search_id in - subsequent requests to perform incremental searches. Required. - :vartype search_id: str - :ivar memories: Related memory items found during the search operation. Required. - :vartype memories: list[~azure.ai.projects.models.MemorySearchItem] - :ivar usage: Usage statistics associated with the memory search operation. Required. - :vartype usage: ~azure.ai.projects.models.MemoryStoreOperationUsage - """ - - search_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of this search request. Use this value as previous_search_id in subsequent - requests to perform incremental searches. Required.""" - memories: list["_models.MemorySearchItem"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Related memory items found during the search operation. Required.""" - usage: "_models.MemoryStoreOperationUsage" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Usage statistics associated with the memory search operation. Required.""" - - @overload - def __init__( - self, - *, - search_id: str, - memories: list["_models.MemorySearchItem"], - usage: "_models.MemoryStoreOperationUsage", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class MemoryStoreUpdateResponse(_Model): - """Provides the status of a memory store update operation. - - :ivar update_id: The unique ID of this update request. Use this value as previous_update_id in - subsequent requests to perform incremental updates. Required. - :vartype update_id: str - :ivar status: The status of the memory update operation. One of "queued", "in_progress", - "completed", "failed", or "superseded". Required. Known values are: "queued", "in_progress", - "completed", "failed", and "superseded". - :vartype status: str or ~azure.ai.projects.models.MemoryStoreUpdateStatus - :ivar superseded_by: The update_id the operation was superseded by when status is "superseded". - :vartype superseded_by: str - :ivar result: The result of memory store update operation when status is "completed". - :vartype result: ~azure.ai.projects.models.MemoryStoreUpdateResult - :ivar error: Error object that describes the error when status is "failed". - :vartype error: ~azure.ai.projects.models.ApiError - """ - - update_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of this update request. Use this value as previous_update_id in subsequent - requests to perform incremental updates. Required.""" - status: Union[str, "_models.MemoryStoreUpdateStatus"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the memory update operation. One of \"queued\", \"in_progress\", \"completed\", - \"failed\", or \"superseded\". Required. Known values are: \"queued\", \"in_progress\", - \"completed\", \"failed\", and \"superseded\".""" - superseded_by: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The update_id the operation was superseded by when status is \"superseded\".""" - result: Optional["_models.MemoryStoreUpdateResult"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The result of memory store update operation when status is \"completed\".""" - error: Optional["_models.ApiError"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Error object that describes the error when status is \"failed\".""" - - @overload - def __init__( - self, - *, - update_id: str, - status: Union[str, "_models.MemoryStoreUpdateStatus"], - superseded_by: Optional[str] = None, - result: Optional["_models.MemoryStoreUpdateResult"] = None, - error: Optional["_models.ApiError"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class MemoryStoreUpdateResult(_Model): - """Memory update result. - - :ivar memory_operations: A list of individual memory operations that were performed during the - update. Required. - :vartype memory_operations: list[~azure.ai.projects.models.MemoryOperation] - :ivar usage: Usage statistics associated with the memory update operation. Required. - :vartype usage: ~azure.ai.projects.models.MemoryStoreOperationUsage - """ - - memory_operations: list["_models.MemoryOperation"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """A list of individual memory operations that were performed during the update. Required.""" - usage: "_models.MemoryStoreOperationUsage" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Usage statistics associated with the memory update operation. Required.""" - - @overload - def __init__( - self, - *, - memory_operations: list["_models.MemoryOperation"], - usage: "_models.MemoryStoreOperationUsage", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class MicrosoftFabricAgentTool(Tool, discriminator="fabric_dataagent_preview"): - """The input definition information for a Microsoft Fabric tool as used to configure an agent. - - :ivar type: The object type, which is always 'fabric_dataagent'. Required. - :vartype type: str or ~azure.ai.projects.models.FABRIC_DATAAGENT_PREVIEW - :ivar fabric_dataagent_preview: The fabric data agent tool parameters. Required. - :vartype fabric_dataagent_preview: ~azure.ai.projects.models.FabricDataAgentToolParameters - """ - - type: Literal[ToolType.FABRIC_DATAAGENT_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'fabric_dataagent'. Required.""" - fabric_dataagent_preview: "_models.FabricDataAgentToolParameters" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The fabric data agent tool parameters. Required.""" - - @overload - def __init__( - self, - *, - fabric_dataagent_preview: "_models.FabricDataAgentToolParameters", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolType.FABRIC_DATAAGENT_PREVIEW # type: ignore - - -class ModelDeployment(Deployment, discriminator="ModelDeployment"): - """Model Deployment Definition. - - :ivar name: Name of the deployment. Required. - :vartype name: str - :ivar type: The type of the deployment. Required. Model deployment - :vartype type: str or ~azure.ai.projects.models.MODEL_DEPLOYMENT - :ivar model_name: Publisher-specific name of the deployed model. Required. - :vartype model_name: str - :ivar model_version: Publisher-specific version of the deployed model. Required. - :vartype model_version: str - :ivar model_publisher: Name of the deployed model's publisher. Required. - :vartype model_publisher: str - :ivar capabilities: Capabilities of deployed model. Required. - :vartype capabilities: dict[str, str] - :ivar sku: Sku of the model deployment. Required. - :vartype sku: ~azure.ai.projects.models.ModelDeploymentSku - :ivar connection_name: Name of the connection the deployment comes from. - :vartype connection_name: str - """ - - type: Literal[DeploymentType.MODEL_DEPLOYMENT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the deployment. Required. Model deployment""" - model_name: str = rest_field(name="modelName", visibility=["read"]) - """Publisher-specific name of the deployed model. Required.""" - model_version: str = rest_field(name="modelVersion", visibility=["read"]) - """Publisher-specific version of the deployed model. Required.""" - model_publisher: str = rest_field(name="modelPublisher", visibility=["read"]) - """Name of the deployed model's publisher. Required.""" - capabilities: dict[str, str] = rest_field(visibility=["read"]) - """Capabilities of deployed model. Required.""" - sku: "_models.ModelDeploymentSku" = rest_field(visibility=["read"]) - """Sku of the model deployment. Required.""" - connection_name: Optional[str] = rest_field(name="connectionName", visibility=["read"]) - """Name of the connection the deployment comes from.""" - - @overload - def __init__( - self, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = DeploymentType.MODEL_DEPLOYMENT # type: ignore - - -class ModelDeploymentSku(_Model): - """Sku information. - - :ivar capacity: Sku capacity. Required. - :vartype capacity: int - :ivar family: Sku family. Required. - :vartype family: str - :ivar name: Sku name. Required. - :vartype name: str - :ivar size: Sku size. Required. - :vartype size: str - :ivar tier: Sku tier. Required. - :vartype tier: str - """ - - capacity: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Sku capacity. Required.""" - family: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Sku family. Required.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Sku name. Required.""" - size: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Sku size. Required.""" - tier: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Sku tier. Required.""" - - @overload - def __init__( - self, - *, - capacity: int, - family: str, - name: str, - size: str, - tier: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class MonthlyRecurrenceSchedule(RecurrenceSchedule, discriminator="Monthly"): - """Monthly recurrence schedule. - - :ivar type: Monthly recurrence type. Required. Monthly recurrence pattern. - :vartype type: str or ~azure.ai.projects.models.MONTHLY - :ivar days_of_month: Days of the month for the recurrence schedule. Required. - :vartype days_of_month: list[int] - """ - - type: Literal[RecurrenceType.MONTHLY] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Monthly recurrence type. Required. Monthly recurrence pattern.""" - days_of_month: list[int] = rest_field( - name="daysOfMonth", visibility=["read", "create", "update", "delete", "query"] - ) - """Days of the month for the recurrence schedule. Required.""" - - @overload - def __init__( - self, - *, - days_of_month: list[int], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = RecurrenceType.MONTHLY # type: ignore - - -class NoAuthenticationCredentials(BaseCredentials, discriminator="None"): - """Credentials that do not require authentication. - - :ivar type: The credential type. Required. No credential - :vartype type: str or ~azure.ai.projects.models.NONE - """ - - type: Literal[CredentialType.NONE] = rest_discriminator(name="type", visibility=["read"]) # type: ignore - """The credential type. Required. No credential""" - - @overload - def __init__( - self, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = CredentialType.NONE # type: ignore - - -class OAuthConsentRequestItemResource(ItemResource, discriminator="oauth_consent_request"): - """Request from the service for the user to perform OAuth consent. - - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar id: Required. - :vartype id: str - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.OAUTH_CONSENT_REQUEST - :ivar consent_link: The link the user can use to perform OAuth consent. Required. - :vartype consent_link: str - :ivar server_label: The server label for the OAuth consent request. Required. - :vartype server_label: str - """ - - type: Literal[ItemType.OAUTH_CONSENT_REQUEST] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - consent_link: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The link the user can use to perform OAuth consent. Required.""" - server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The server label for the OAuth consent request. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - consent_link: str, - server_label: str, - created_by: Optional["_models.CreatedBy"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.OAUTH_CONSENT_REQUEST # type: ignore - - -class OneTimeTrigger(Trigger, discriminator="OneTime"): - """One-time trigger. - - :ivar type: Required. One-time trigger. - :vartype type: str or ~azure.ai.projects.models.ONE_TIME - :ivar trigger_at: Date and time for the one-time trigger in ISO 8601 format. Required. - :vartype trigger_at: str - :ivar time_zone: Time zone for the one-time trigger. - :vartype time_zone: str - """ - - type: Literal[TriggerType.ONE_TIME] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required. One-time trigger.""" - trigger_at: str = rest_field(name="triggerAt", visibility=["read", "create", "update", "delete", "query"]) - """Date and time for the one-time trigger in ISO 8601 format. Required.""" - time_zone: Optional[str] = rest_field(name="timeZone", visibility=["read", "create", "update", "delete", "query"]) - """Time zone for the one-time trigger.""" - - @overload - def __init__( - self, - *, - trigger_at: str, - time_zone: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = TriggerType.ONE_TIME # type: ignore - - -class OpenApiAgentTool(Tool, discriminator="openapi"): - """The input definition information for an OpenAPI tool as used to configure an agent. - - :ivar type: The object type, which is always 'openapi'. Required. - :vartype type: str or ~azure.ai.projects.models.OPENAPI - :ivar openapi: The openapi function definition. Required. - :vartype openapi: ~azure.ai.projects.models.OpenApiFunctionDefinition - """ - - type: Literal[ToolType.OPENAPI] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'openapi'. Required.""" - openapi: "_models.OpenApiFunctionDefinition" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The openapi function definition. Required.""" - - @overload - def __init__( - self, - *, - openapi: "_models.OpenApiFunctionDefinition", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolType.OPENAPI # type: ignore - - -class OpenApiAuthDetails(_Model): - """authentication details for OpenApiFunctionDefinition. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - OpenApiAnonymousAuthDetails, OpenApiManagedAuthDetails, OpenApiProjectConnectionAuthDetails - - :ivar type: The type of authentication, must be anonymous/project_connection/managed_identity. - Required. Known values are: "anonymous", "project_connection", and "managed_identity". - :vartype type: str or ~azure.ai.projects.models.OpenApiAuthType - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """The type of authentication, must be anonymous/project_connection/managed_identity. Required. - Known values are: \"anonymous\", \"project_connection\", and \"managed_identity\".""" - - @overload - def __init__( - self, - *, - type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class OpenApiAnonymousAuthDetails(OpenApiAuthDetails, discriminator="anonymous"): - """Security details for OpenApi anonymous authentication. - - :ivar type: The object type, which is always 'anonymous'. Required. - :vartype type: str or ~azure.ai.projects.models.ANONYMOUS - """ - - type: Literal[OpenApiAuthType.ANONYMOUS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'anonymous'. Required.""" - - @overload - def __init__( - self, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = OpenApiAuthType.ANONYMOUS # type: ignore - - -class OpenApiFunctionDefinition(_Model): - """The input definition information for an openapi function. - - :ivar name: The name of the function to be called. Required. - :vartype name: str - :ivar description: A description of what the function does, used by the model to choose when - and how to call the function. - :vartype description: str - :ivar spec: The openapi function shape, described as a JSON Schema object. Required. - :vartype spec: any - :ivar auth: Open API authentication details. Required. - :vartype auth: ~azure.ai.projects.models.OpenApiAuthDetails - :ivar default_params: List of OpenAPI spec parameters that will use user-provided defaults. - :vartype default_params: list[str] - :ivar functions: List of function definitions used by OpenApi tool. - :vartype functions: list[~azure.ai.projects.models.OpenApiFunctionDefinitionFunction] - """ - - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the function to be called. Required.""" - description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A description of what the function does, used by the model to choose when and how to call the - function.""" - spec: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The openapi function shape, described as a JSON Schema object. Required.""" - auth: "_models.OpenApiAuthDetails" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Open API authentication details. Required.""" - default_params: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """List of OpenAPI spec parameters that will use user-provided defaults.""" - functions: Optional[list["_models.OpenApiFunctionDefinitionFunction"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """List of function definitions used by OpenApi tool.""" - - @overload - def __init__( - self, - *, - name: str, - spec: Any, - auth: "_models.OpenApiAuthDetails", - description: Optional[str] = None, - default_params: Optional[list[str]] = None, - functions: Optional[list["_models.OpenApiFunctionDefinitionFunction"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class OpenApiFunctionDefinitionFunction(_Model): - """OpenApiFunctionDefinitionFunction. - - :ivar name: The name of the function to be called. Required. - :vartype name: str - :ivar description: A description of what the function does, used by the model to choose when - and how to call the function. - :vartype description: str - :ivar parameters: The parameters the functions accepts, described as a JSON Schema object. - Required. - :vartype parameters: any - """ - - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the function to be called. Required.""" - description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A description of what the function does, used by the model to choose when and how to call the - function.""" - parameters: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The parameters the functions accepts, described as a JSON Schema object. Required.""" - - @overload - def __init__( - self, - *, - name: str, - parameters: Any, - description: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class OpenApiManagedAuthDetails(OpenApiAuthDetails, discriminator="managed_identity"): - """Security details for OpenApi managed_identity authentication. - - :ivar type: The object type, which is always 'managed_identity'. Required. - :vartype type: str or ~azure.ai.projects.models.MANAGED_IDENTITY - :ivar security_scheme: Connection auth security details. Required. - :vartype security_scheme: ~azure.ai.projects.models.OpenApiManagedSecurityScheme - """ - - type: Literal[OpenApiAuthType.MANAGED_IDENTITY] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'managed_identity'. Required.""" - security_scheme: "_models.OpenApiManagedSecurityScheme" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Connection auth security details. Required.""" - - @overload - def __init__( - self, - *, - security_scheme: "_models.OpenApiManagedSecurityScheme", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = OpenApiAuthType.MANAGED_IDENTITY # type: ignore - - -class OpenApiManagedSecurityScheme(_Model): - """Security scheme for OpenApi managed_identity authentication. - - :ivar audience: Authentication scope for managed_identity auth type. Required. - :vartype audience: str - """ - - audience: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Authentication scope for managed_identity auth type. Required.""" - - @overload - def __init__( - self, - *, - audience: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class OpenApiProjectConnectionAuthDetails(OpenApiAuthDetails, discriminator="project_connection"): - """Security details for OpenApi project connection authentication. - - :ivar type: The object type, which is always 'project_connection'. Required. - :vartype type: str or ~azure.ai.projects.models.PROJECT_CONNECTION - :ivar security_scheme: Project connection auth security details. Required. - :vartype security_scheme: ~azure.ai.projects.models.OpenApiProjectConnectionSecurityScheme - """ - - type: Literal[OpenApiAuthType.PROJECT_CONNECTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'project_connection'. Required.""" - security_scheme: "_models.OpenApiProjectConnectionSecurityScheme" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Project connection auth security details. Required.""" - - @overload - def __init__( - self, - *, - security_scheme: "_models.OpenApiProjectConnectionSecurityScheme", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = OpenApiAuthType.PROJECT_CONNECTION # type: ignore - - -class OpenApiProjectConnectionSecurityScheme(_Model): - """Security scheme for OpenApi managed_identity authentication. - - :ivar project_connection_id: Project connection id for Project Connection auth type. Required. - :vartype project_connection_id: str - """ - - project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Project connection id for Project Connection auth type. Required.""" - - @overload - def __init__( - self, - *, - project_connection_id: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class PagedScheduleRun(_Model): - """Paged collection of ScheduleRun items. - - :ivar value: The ScheduleRun items on this page. Required. - :vartype value: list[~azure.ai.projects.models.ScheduleRun] - :ivar next_link: The link to the next page of items. - :vartype next_link: str - """ - - value: list["_models.ScheduleRun"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ScheduleRun items on this page. Required.""" - next_link: Optional[str] = rest_field(name="nextLink", visibility=["read", "create", "update", "delete", "query"]) - """The link to the next page of items.""" - - @overload - def __init__( - self, - *, - value: list["_models.ScheduleRun"], - next_link: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class PendingUploadRequest(_Model): - """Represents a request for a pending upload. - - :ivar pending_upload_id: If PendingUploadId is not provided, a random GUID will be used. - :vartype pending_upload_id: str - :ivar connection_name: Azure Storage Account connection name to use for generating temporary - SAS token. - :vartype connection_name: str - :ivar pending_upload_type: BlobReference is the only supported type. Required. Blob Reference - is the only supported type. - :vartype pending_upload_type: str or ~azure.ai.projects.models.BLOB_REFERENCE - """ - - pending_upload_id: Optional[str] = rest_field( - name="pendingUploadId", visibility=["read", "create", "update", "delete", "query"] - ) - """If PendingUploadId is not provided, a random GUID will be used.""" - connection_name: Optional[str] = rest_field( - name="connectionName", visibility=["read", "create", "update", "delete", "query"] - ) - """Azure Storage Account connection name to use for generating temporary SAS token.""" - pending_upload_type: Literal[PendingUploadType.BLOB_REFERENCE] = rest_field( - name="pendingUploadType", visibility=["read", "create", "update", "delete", "query"] - ) - """BlobReference is the only supported type. Required. Blob Reference is the only supported type.""" - - @overload - def __init__( - self, - *, - pending_upload_type: Literal[PendingUploadType.BLOB_REFERENCE], - pending_upload_id: Optional[str] = None, - connection_name: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class PendingUploadResponse(_Model): - """Represents the response for a pending upload request. - - :ivar blob_reference: Container-level read, write, list SAS. Required. - :vartype blob_reference: ~azure.ai.projects.models.BlobReference - :ivar pending_upload_id: ID for this upload request. Required. - :vartype pending_upload_id: str - :ivar version: Version of asset to be created if user did not specify version when initially - creating upload. - :vartype version: str - :ivar pending_upload_type: BlobReference is the only supported type. Required. Blob Reference - is the only supported type. - :vartype pending_upload_type: str or ~azure.ai.projects.models.BLOB_REFERENCE - """ - - blob_reference: "_models.BlobReference" = rest_field( - name="blobReference", visibility=["read", "create", "update", "delete", "query"] - ) - """Container-level read, write, list SAS. Required.""" - pending_upload_id: str = rest_field( - name="pendingUploadId", visibility=["read", "create", "update", "delete", "query"] - ) - """ID for this upload request. Required.""" - version: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Version of asset to be created if user did not specify version when initially creating upload.""" - pending_upload_type: Literal[PendingUploadType.BLOB_REFERENCE] = rest_field( - name="pendingUploadType", visibility=["read", "create", "update", "delete", "query"] - ) - """BlobReference is the only supported type. Required. Blob Reference is the only supported type.""" - - @overload - def __init__( - self, - *, - blob_reference: "_models.BlobReference", - pending_upload_id: str, - pending_upload_type: Literal[PendingUploadType.BLOB_REFERENCE], - version: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class Prompt(_Model): - """Reference to a prompt template and its variables. - `Learn more `_. - - :ivar id: The unique identifier of the prompt template to use. Required. - :vartype id: str - :ivar version: Optional version of the prompt template. - :vartype version: str - :ivar variables: - :vartype variables: ~azure.ai.projects.models.ResponsePromptVariables - """ - - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the prompt template to use. Required.""" - version: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Optional version of the prompt template.""" - variables: Optional["_models.ResponsePromptVariables"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - version: Optional[str] = None, - variables: Optional["_models.ResponsePromptVariables"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class PromptAgentDefinition(AgentDefinition, discriminator="prompt"): - """The prompt agent definition. - - :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. - :vartype rai_config: ~azure.ai.projects.models.RaiConfig - :ivar kind: Required. - :vartype kind: str or ~azure.ai.projects.models.PROMPT - :ivar model: The model deployment to use for this agent. Required. - :vartype model: str - :ivar instructions: A system (or developer) message inserted into the model's context. - :vartype instructions: str - :ivar temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 - will make the output more random, while lower values like 0.2 will make it more focused and - deterministic. - We generally recommend altering this or ``top_p`` but not both. - :vartype temperature: float - :ivar top_p: An alternative to sampling with temperature, called nucleus sampling, - where the model considers the results of the tokens with top_p probability - mass. So 0.1 means only the tokens comprising the top 10% probability mass - are considered. - We generally recommend altering this or ``temperature`` but not both. - :vartype top_p: float - :ivar reasoning: - :vartype reasoning: ~azure.ai.projects.models.Reasoning - :ivar tools: An array of tools the model may call while generating a response. You - can specify which tool to use by setting the ``tool_choice`` parameter. - :vartype tools: list[~azure.ai.projects.models.Tool] - :ivar text: Configuration options for a text response from the model. Can be plain text or - structured JSON data. - :vartype text: ~azure.ai.projects.models.PromptAgentDefinitionText - :ivar structured_inputs: Set of structured inputs that can participate in prompt template - substitution or tool argument bindings. - :vartype structured_inputs: dict[str, ~azure.ai.projects.models.StructuredInputDefinition] - """ - - kind: Literal[AgentKind.PROMPT] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - model: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The model deployment to use for this agent. Required.""" - instructions: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A system (or developer) message inserted into the model's context.""" - temperature: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output - more random, while lower values like 0.2 will make it more focused and deterministic. - We generally recommend altering this or ``top_p`` but not both.""" - top_p: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An alternative to sampling with temperature, called nucleus sampling, - where the model considers the results of the tokens with top_p probability - mass. So 0.1 means only the tokens comprising the top 10% probability mass - are considered. - We generally recommend altering this or ``temperature`` but not both.""" - reasoning: Optional["_models.Reasoning"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - tools: Optional[list["_models.Tool"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An array of tools the model may call while generating a response. You - can specify which tool to use by setting the ``tool_choice`` parameter.""" - text: Optional["_models.PromptAgentDefinitionText"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Configuration options for a text response from the model. Can be plain text or structured JSON - data.""" - structured_inputs: Optional[dict[str, "_models.StructuredInputDefinition"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Set of structured inputs that can participate in prompt template substitution or tool argument - bindings.""" - - @overload - def __init__( - self, - *, - model: str, - rai_config: Optional["_models.RaiConfig"] = None, - instructions: Optional[str] = None, - temperature: Optional[float] = None, - top_p: Optional[float] = None, - reasoning: Optional["_models.Reasoning"] = None, - tools: Optional[list["_models.Tool"]] = None, - text: Optional["_models.PromptAgentDefinitionText"] = None, - structured_inputs: Optional[dict[str, "_models.StructuredInputDefinition"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.kind = AgentKind.PROMPT # type: ignore - - -class PromptAgentDefinitionText(_Model): - """PromptAgentDefinitionText. - - :ivar format: - :vartype format: ~azure.ai.projects.models.ResponseTextFormatConfiguration - """ - - format: Optional["_models.ResponseTextFormatConfiguration"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - - @overload - def __init__( - self, - *, - format: Optional["_models.ResponseTextFormatConfiguration"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class PromptBasedEvaluatorDefinition(EvaluatorDefinition, discriminator="prompt"): - """Prompt-based evaluator. - - :ivar init_parameters: The JSON schema (Draft 2020-12) for the evaluator's input parameters. - This includes parameters like type, properties, required. - :vartype init_parameters: any - :ivar data_schema: The JSON schema (Draft 2020-12) for the evaluator's input data. This - includes parameters like type, properties, required. - :vartype data_schema: any - :ivar metrics: List of output metrics produced by this evaluator. - :vartype metrics: dict[str, ~azure.ai.projects.models.EvaluatorMetric] - :ivar type: Required. Prompt-based definition - :vartype type: str or ~azure.ai.projects.models.PROMPT - :ivar prompt_text: The prompt text used for evaluation. Required. - :vartype prompt_text: str - """ - - type: Literal[EvaluatorDefinitionType.PROMPT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required. Prompt-based definition""" - prompt_text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The prompt text used for evaluation. Required.""" - - @overload - def __init__( - self, - *, - prompt_text: str, - init_parameters: Optional[Any] = None, - data_schema: Optional[Any] = None, - metrics: Optional[dict[str, "_models.EvaluatorMetric"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = EvaluatorDefinitionType.PROMPT # type: ignore - - -class ProtocolVersionRecord(_Model): - """A record mapping for a single protocol and its version. - - :ivar protocol: The protocol type. Required. Known values are: "activity_protocol" and - "responses". - :vartype protocol: str or ~azure.ai.projects.models.AgentProtocol - :ivar version: The version string for the protocol, e.g. 'v0.1.1'. Required. - :vartype version: str - """ - - protocol: Union[str, "_models.AgentProtocol"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The protocol type. Required. Known values are: \"activity_protocol\" and \"responses\".""" - version: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The version string for the protocol, e.g. 'v0.1.1'. Required.""" - - @overload - def __init__( - self, - *, - protocol: Union[str, "_models.AgentProtocol"], - version: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class RaiConfig(_Model): - """Configuration for Responsible AI (RAI) content filtering and safety features. - - :ivar rai_policy_name: The name of the RAI policy to apply. Required. - :vartype rai_policy_name: str - """ - - rai_policy_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the RAI policy to apply. Required.""" - - @overload - def __init__( - self, - *, - rai_policy_name: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class RankingOptions(_Model): - """RankingOptions. - - :ivar ranker: The ranker to use for the file search. Is either a Literal["auto"] type or a - Literal["default-2024-11-15"] type. - :vartype ranker: str or str - :ivar score_threshold: The score threshold for the file search, a number between 0 and 1. - Numbers closer to 1 will attempt to return only the most relevant results, but may return fewer - results. - :vartype score_threshold: float - """ - - ranker: Optional[Literal["auto", "default-2024-11-15"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The ranker to use for the file search. Is either a Literal[\"auto\"] type or a - Literal[\"default-2024-11-15\"] type.""" - score_threshold: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The score threshold for the file search, a number between 0 and 1. Numbers closer to 1 will - attempt to return only the most relevant results, but may return fewer results.""" - - @overload - def __init__( - self, - *, - ranker: Optional[Literal["auto", "default-2024-11-15"]] = None, - score_threshold: Optional[float] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class Reasoning(_Model): - """**o-series models only** - Configuration options for - `reasoning models `_. - - :ivar effort: Known values are: "low", "medium", and "high". - :vartype effort: str or ~azure.ai.projects.models.ReasoningEffort - :ivar summary: A summary of the reasoning performed by the model. This can be - useful for debugging and understanding the model's reasoning process. - One of ``auto``, ``concise``, or ``detailed``. Is one of the following types: Literal["auto"], - Literal["concise"], Literal["detailed"] - :vartype summary: str or str or str - :ivar generate_summary: **Deprecated:** use ``summary`` instead. - A summary of the reasoning performed by the model. This can be - useful for debugging and understanding the model's reasoning process. - One of ``auto``, ``concise``, or ``detailed``. Is one of the following types: Literal["auto"], - Literal["concise"], Literal["detailed"] - :vartype generate_summary: str or str or str - """ - - effort: Optional[Union[str, "_models.ReasoningEffort"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Known values are: \"low\", \"medium\", and \"high\".""" - summary: Optional[Literal["auto", "concise", "detailed"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """A summary of the reasoning performed by the model. This can be - useful for debugging and understanding the model's reasoning process. - One of ``auto``, ``concise``, or ``detailed``. Is one of the following types: - Literal[\"auto\"], Literal[\"concise\"], Literal[\"detailed\"]""" - generate_summary: Optional[Literal["auto", "concise", "detailed"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """**Deprecated:** use ``summary`` instead. - A summary of the reasoning performed by the model. This can be - useful for debugging and understanding the model's reasoning process. - One of ``auto``, ``concise``, or ``detailed``. Is one of the following types: - Literal[\"auto\"], Literal[\"concise\"], Literal[\"detailed\"]""" - - @overload - def __init__( - self, - *, - effort: Optional[Union[str, "_models.ReasoningEffort"]] = None, - summary: Optional[Literal["auto", "concise", "detailed"]] = None, - generate_summary: Optional[Literal["auto", "concise", "detailed"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ReasoningItemParam(ItemParam, discriminator="reasoning"): - """A description of the chain of thought used by a reasoning model while generating - a response. Be sure to include these items in your ``input`` to the Responses API - for subsequent turns of a conversation if you are manually - `managing context `_. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.REASONING - :ivar encrypted_content: The encrypted content of the reasoning item - populated when a - response is - generated with ``reasoning.encrypted_content`` in the ``include`` parameter. - :vartype encrypted_content: str - :ivar summary: Reasoning text contents. Required. - :vartype summary: list[~azure.ai.projects.models.ReasoningItemSummaryPart] - """ - - type: Literal[ItemType.REASONING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - encrypted_content: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The encrypted content of the reasoning item - populated when a response is - generated with ``reasoning.encrypted_content`` in the ``include`` parameter.""" - summary: list["_models.ReasoningItemSummaryPart"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Reasoning text contents. Required.""" - - @overload - def __init__( - self, - *, - summary: list["_models.ReasoningItemSummaryPart"], - encrypted_content: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.REASONING # type: ignore - - -class ReasoningItemResource(ItemResource, discriminator="reasoning"): - """A description of the chain of thought used by a reasoning model while generating - a response. Be sure to include these items in your ``input`` to the Responses API - for subsequent turns of a conversation if you are manually - `managing context `_. - - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.REASONING - :ivar encrypted_content: The encrypted content of the reasoning item - populated when a - response is - generated with ``reasoning.encrypted_content`` in the ``include`` parameter. - :vartype encrypted_content: str - :ivar summary: Reasoning text contents. Required. - :vartype summary: list[~azure.ai.projects.models.ReasoningItemSummaryPart] - """ - - type: Literal[ItemType.REASONING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - encrypted_content: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The encrypted content of the reasoning item - populated when a response is - generated with ``reasoning.encrypted_content`` in the ``include`` parameter.""" - summary: list["_models.ReasoningItemSummaryPart"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Reasoning text contents. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - summary: list["_models.ReasoningItemSummaryPart"], - created_by: Optional["_models.CreatedBy"] = None, - encrypted_content: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.REASONING # type: ignore - - -class ReasoningItemSummaryPart(_Model): - """ReasoningItemSummaryPart. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ReasoningItemSummaryTextPart - - :ivar type: Required. "summary_text" - :vartype type: str or ~azure.ai.projects.models.ReasoningItemSummaryPartType - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. \"summary_text\"""" - - @overload - def __init__( - self, - *, - type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ReasoningItemSummaryTextPart(ReasoningItemSummaryPart, discriminator="summary_text"): - """ReasoningItemSummaryTextPart. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.SUMMARY_TEXT - :ivar text: Required. - :vartype text: str - """ - - type: Literal[ReasoningItemSummaryPartType.SUMMARY_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - - @overload - def __init__( - self, - *, - text: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ReasoningItemSummaryPartType.SUMMARY_TEXT # type: ignore - - -class RecurrenceTrigger(Trigger, discriminator="Recurrence"): - """Recurrence based trigger. - - :ivar type: Type of the trigger. Required. Recurrence based trigger. - :vartype type: str or ~azure.ai.projects.models.RECURRENCE - :ivar start_time: Start time for the recurrence schedule in ISO 8601 format. - :vartype start_time: str - :ivar end_time: End time for the recurrence schedule in ISO 8601 format. - :vartype end_time: str - :ivar time_zone: Time zone for the recurrence schedule. - :vartype time_zone: str - :ivar interval: Interval for the recurrence schedule. Required. - :vartype interval: int - :ivar schedule: Recurrence schedule for the recurrence trigger. Required. - :vartype schedule: ~azure.ai.projects.models.RecurrenceSchedule - """ - - type: Literal[TriggerType.RECURRENCE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Type of the trigger. Required. Recurrence based trigger.""" - start_time: Optional[str] = rest_field(name="startTime", visibility=["read", "create", "update", "delete", "query"]) - """Start time for the recurrence schedule in ISO 8601 format.""" - end_time: Optional[str] = rest_field(name="endTime", visibility=["read", "create", "update", "delete", "query"]) - """End time for the recurrence schedule in ISO 8601 format.""" - time_zone: Optional[str] = rest_field(name="timeZone", visibility=["read", "create", "update", "delete", "query"]) - """Time zone for the recurrence schedule.""" - interval: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Interval for the recurrence schedule. Required.""" - schedule: "_models.RecurrenceSchedule" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Recurrence schedule for the recurrence trigger. Required.""" - - @overload - def __init__( - self, - *, - interval: int, - schedule: "_models.RecurrenceSchedule", - start_time: Optional[str] = None, - end_time: Optional[str] = None, - time_zone: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = TriggerType.RECURRENCE # type: ignore - - -class RedTeam(_Model): - """Red team details. - - :ivar name: Identifier of the red team run. Required. - :vartype name: str - :ivar display_name: Name of the red-team run. - :vartype display_name: str - :ivar num_turns: Number of simulation rounds. - :vartype num_turns: int - :ivar attack_strategies: List of attack strategies or nested lists of attack strategies. - :vartype attack_strategies: list[str or ~azure.ai.projects.models.AttackStrategy] - :ivar simulation_only: Simulation-only or Simulation + Evaluation. Default false, if true the - scan outputs conversation not evaluation result. - :vartype simulation_only: bool - :ivar risk_categories: List of risk categories to generate attack objectives for. - :vartype risk_categories: list[str or ~azure.ai.projects.models.RiskCategory] - :ivar application_scenario: Application scenario for the red team operation, to generate - scenario specific attacks. - :vartype application_scenario: str - :ivar tags: Red team's tags. Unlike properties, tags are fully mutable. - :vartype tags: dict[str, str] - :ivar properties: Red team's properties. Unlike tags, properties are add-only. Once added, a - property cannot be removed. - :vartype properties: dict[str, str] - :ivar status: Status of the red-team. It is set by service and is read-only. - :vartype status: str - :ivar target: Target configuration for the red-team run. Required. - :vartype target: ~azure.ai.projects.models.TargetConfig - """ - - name: str = rest_field(name="id", visibility=["read"]) - """Identifier of the red team run. Required.""" - display_name: Optional[str] = rest_field( - name="displayName", visibility=["read", "create", "update", "delete", "query"] - ) - """Name of the red-team run.""" - num_turns: Optional[int] = rest_field(name="numTurns", visibility=["read", "create", "update", "delete", "query"]) - """Number of simulation rounds.""" - attack_strategies: Optional[list[Union[str, "_models.AttackStrategy"]]] = rest_field( - name="attackStrategies", visibility=["read", "create", "update", "delete", "query"] - ) - """List of attack strategies or nested lists of attack strategies.""" - simulation_only: Optional[bool] = rest_field( - name="simulationOnly", visibility=["read", "create", "update", "delete", "query"] - ) - """Simulation-only or Simulation + Evaluation. Default false, if true the scan outputs - conversation not evaluation result.""" - risk_categories: Optional[list[Union[str, "_models.RiskCategory"]]] = rest_field( - name="riskCategories", visibility=["read", "create", "update", "delete", "query"] - ) - """List of risk categories to generate attack objectives for.""" - application_scenario: Optional[str] = rest_field( - name="applicationScenario", visibility=["read", "create", "update", "delete", "query"] - ) - """Application scenario for the red team operation, to generate scenario specific attacks.""" - tags: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Red team's tags. Unlike properties, tags are fully mutable.""" - properties: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Red team's properties. Unlike tags, properties are add-only. Once added, a property cannot be - removed.""" - status: Optional[str] = rest_field(visibility=["read"]) - """Status of the red-team. It is set by service and is read-only.""" - target: "_models.TargetConfig" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Target configuration for the red-team run. Required.""" - - @overload - def __init__( - self, - *, - target: "_models.TargetConfig", - display_name: Optional[str] = None, - num_turns: Optional[int] = None, - attack_strategies: Optional[list[Union[str, "_models.AttackStrategy"]]] = None, - simulation_only: Optional[bool] = None, - risk_categories: Optional[list[Union[str, "_models.RiskCategory"]]] = None, - application_scenario: Optional[str] = None, - tags: Optional[dict[str, str]] = None, - properties: Optional[dict[str, str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class Response(_Model): - """Response. - - :ivar metadata: Set of 16 key-value pairs that can be attached to an object. This can be -useful for storing additional information about the object in a structured -format, and querying for objects via API or the dashboard. -Keys are strings with a maximum length of 64 characters. Values are strings -with a maximum length of 512 characters. Required. - :vartype metadata: dict[str, str] - :ivar temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 - will make the output more random, while lower values like 0.2 will make it more focused and - deterministic. -We generally recommend altering this or ``top_p`` but not both. Required. - :vartype temperature: float - :ivar top_p: An alternative to sampling with temperature, called nucleus sampling, -where the model considers the results of the tokens with top_p probability -mass. So 0.1 means only the tokens comprising the top 10% probability mass -are considered. -We generally recommend altering this or ``temperature`` but not both. Required. - :vartype top_p: float - :ivar user: A unique identifier representing your end-user, which can help OpenAI to monitor - and detect abuse. `Learn more `_. Required. - :vartype user: str - :ivar service_tier: Note: service_tier is not applicable to Azure OpenAI. Known values are: - "auto", "default", "flex", "scale", and "priority". - :vartype service_tier: str or ~azure.ai.projects.models.ServiceTier - :ivar top_logprobs: An integer between 0 and 20 specifying the number of most likely tokens to - return at each token position, each with an associated log probability. - :vartype top_logprobs: int - :ivar previous_response_id: The unique ID of the previous response to the model. Use this to -create multi-turn conversations. Learn more about -`conversation state `_. - :vartype previous_response_id: str - :ivar model: The model deployment to use for the creation of this response. - :vartype model: str - :ivar reasoning: - :vartype reasoning: ~azure.ai.projects.models.Reasoning - :ivar background: Whether to run the model response in the background. -`Learn more `_. - :vartype background: bool - :ivar max_output_tokens: An upper bound for the number of tokens that can be generated for a - response, including visible output tokens and `reasoning tokens `_. - :vartype max_output_tokens: int - :ivar max_tool_calls: The maximum number of total calls to built-in tools that can be processed - in a response. This maximum number applies across all built-in tool calls, not per individual - tool. Any further attempts to call a tool by the model will be ignored. - :vartype max_tool_calls: int - :ivar text: Configuration options for a text response from the model. Can be plain -text or structured JSON data. Learn more: - * [Text inputs and outputs](/docs/guides/text) - * [Structured Outputs](/docs/guides/structured-outputs). - :vartype text: ~azure.ai.projects.models.ResponseText - :ivar tools: An array of tools the model may call while generating a response. You -can specify which tool to use by setting the ``tool_choice`` parameter. -The two categories of tools you can provide the model are: - * **Built-in tools**: Tools that are provided by OpenAI that extend the -model's capabilities, like [web search](/docs/guides/tools-web-search) -or [file search](/docs/guides/tools-file-search). Learn more about -[built-in tools](/docs/guides/tools). - * **Function calls (custom tools)**: Functions that are defined by you, -enabling the model to call your own code. Learn more about -[function calling](/docs/guides/function-calling). - :vartype tools: list[~azure.ai.projects.models.Tool] - :ivar tool_choice: How the model should select which tool (or tools) to use when generating -a response. See the ``tools`` parameter to see how to specify which tools -the model can call. Is either a Union[str, "_models.ToolChoiceOptions"] type or a - ToolChoiceObject type. - :vartype tool_choice: str or ~azure.ai.projects.models.ToolChoiceOptions or - ~azure.ai.projects.models.ToolChoiceObject - :ivar prompt: - :vartype prompt: ~azure.ai.projects.models.Prompt - :ivar truncation: The truncation strategy to use for the model response. - * `auto`: If the context of this response and previous ones exceeds -the model's context window size, the model will truncate the -response to fit the context window by dropping input items in the -middle of the conversation. - * `disabled` (default): If a model response will exceed the context window -size for a model, the request will fail with a 400 error. Is either a Literal["auto"] type or a - Literal["disabled"] type. - :vartype truncation: str or str - :ivar id: Unique identifier for this Response. Required. - :vartype id: str - :ivar object: The object type of this resource - always set to ``response``. Required. Default - value is "response". - :vartype object: str - :ivar status: The status of the response generation. One of ``completed``, ``failed``, -``in_progress``, ``cancelled``, ``queued``, or ``incomplete``. Is one of the following types: - Literal["completed"], Literal["failed"], Literal["in_progress"], Literal["cancelled"], - Literal["queued"], Literal["incomplete"] - :vartype status: str or str or str or str or str or str - :ivar created_at: Unix timestamp (in seconds) of when this Response was created. Required. - :vartype created_at: ~datetime.datetime - :ivar error: Required. - :vartype error: ~azure.ai.projects.models.ResponseError - :ivar incomplete_details: Details about why the response is incomplete. Required. - :vartype incomplete_details: ~azure.ai.projects.models.ResponseIncompleteDetails1 - :ivar output: An array of content items generated by the model. - * The length and order of items in the `output` array is dependent -on the model's response. - * Rather than accessing the first item in the `output` array and -assuming it's an `assistant` message with the content generated by -the model, you might consider using the `output_text` property where -supported in SDKs. Required. - :vartype output: list[~azure.ai.projects.models.ItemResource] - :ivar instructions: A system (or developer) message inserted into the model's context. -When using along with ``previous_response_id``, the instructions from a previous -response will not be carried over to the next response. This makes it simple -to swap out system (or developer) messages in new responses. Required. Is either a str type or - a [ItemParam] type. - :vartype instructions: str or list[~azure.ai.projects.models.ItemParam] - :ivar output_text: SDK-only convenience property that contains the aggregated text output -from all ``output_text`` items in the ``output`` array, if any are present. -Supported in the Python and JavaScript SDKs. - :vartype output_text: str - :ivar usage: - :vartype usage: ~azure.ai.projects.models.ResponseUsage - :ivar parallel_tool_calls: Whether to allow the model to run tool calls in parallel. Required. - :vartype parallel_tool_calls: bool - :ivar conversation: Required. - :vartype conversation: ~azure.ai.projects.models.ResponseConversation1 - :ivar agent: The agent used for this response. - :vartype agent: ~azure.ai.projects.models.AgentId - :ivar structured_inputs: The structured inputs to the response that can participate in prompt - template substitution or tool argument bindings. - :vartype structured_inputs: dict[str, any] - """ - - metadata: dict[str, str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Set of 16 key-value pairs that can be attached to an object. This can be - useful for storing additional information about the object in a structured - format, and querying for objects via API or the dashboard. - Keys are strings with a maximum length of 64 characters. Values are strings - with a maximum length of 512 characters. Required.""" - temperature: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output - more random, while lower values like 0.2 will make it more focused and deterministic. - We generally recommend altering this or ``top_p`` but not both. Required.""" - top_p: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An alternative to sampling with temperature, called nucleus sampling, - where the model considers the results of the tokens with top_p probability - mass. So 0.1 means only the tokens comprising the top 10% probability mass - are considered. - We generally recommend altering this or ``temperature`` but not both. Required.""" - user: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A unique identifier representing your end-user, which can help OpenAI to monitor and detect - abuse. `Learn more `_. Required.""" - service_tier: Optional[Union[str, "_models.ServiceTier"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Note: service_tier is not applicable to Azure OpenAI. Known values are: \"auto\", \"default\", - \"flex\", \"scale\", and \"priority\".""" - top_logprobs: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An integer between 0 and 20 specifying the number of most likely tokens to return at each token - position, each with an associated log probability.""" - previous_response_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the previous response to the model. Use this to - create multi-turn conversations. Learn more about - `conversation state `_.""" - model: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The model deployment to use for the creation of this response.""" - reasoning: Optional["_models.Reasoning"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - background: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Whether to run the model response in the background. - `Learn more `_.""" - max_output_tokens: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An upper bound for the number of tokens that can be generated for a response, including visible - output tokens and `reasoning tokens `_.""" - max_tool_calls: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The maximum number of total calls to built-in tools that can be processed in a response. This - maximum number applies across all built-in tool calls, not per individual tool. Any further - attempts to call a tool by the model will be ignored.""" - text: Optional["_models.ResponseText"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Configuration options for a text response from the model. Can be plain - text or structured JSON data. Learn more: - * [Text inputs and outputs](/docs/guides/text) - * [Structured Outputs](/docs/guides/structured-outputs).""" - tools: Optional[list["_models.Tool"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An array of tools the model may call while generating a response. You - can specify which tool to use by setting the ``tool_choice`` parameter. - The two categories of tools you can provide the model are: - * **Built-in tools**: Tools that are provided by OpenAI that extend the - model's capabilities, like [web search](/docs/guides/tools-web-search) - or [file search](/docs/guides/tools-file-search). Learn more about - [built-in tools](/docs/guides/tools). - * **Function calls (custom tools)**: Functions that are defined by you, - enabling the model to call your own code. Learn more about - [function calling](/docs/guides/function-calling).""" - tool_choice: Optional[Union[str, "_models.ToolChoiceOptions", "_models.ToolChoiceObject"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """How the model should select which tool (or tools) to use when generating - a response. See the ``tools`` parameter to see how to specify which tools - the model can call. Is either a Union[str, \"_models.ToolChoiceOptions\"] type or a - ToolChoiceObject type.""" - prompt: Optional["_models.Prompt"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - truncation: Optional[Literal["auto", "disabled"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The truncation strategy to use for the model response. - * `auto`: If the context of this response and previous ones exceeds - the model's context window size, the model will truncate the - response to fit the context window by dropping input items in the - middle of the conversation. - * `disabled` (default): If a model response will exceed the context window - size for a model, the request will fail with a 400 error. Is either a Literal[\"auto\"] type or - a Literal[\"disabled\"] type.""" - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Unique identifier for this Response. Required.""" - object: Literal["response"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The object type of this resource - always set to ``response``. Required. Default value is - \"response\".""" - status: Optional[Literal["completed", "failed", "in_progress", "cancelled", "queued", "incomplete"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the response generation. One of ``completed``, ``failed``, - ``in_progress``, ``cancelled``, ``queued``, or ``incomplete``. Is one of the following types: - Literal[\"completed\"], Literal[\"failed\"], Literal[\"in_progress\"], Literal[\"cancelled\"], - Literal[\"queued\"], Literal[\"incomplete\"]""" - created_at: datetime.datetime = rest_field( - visibility=["read", "create", "update", "delete", "query"], format="unix-timestamp" - ) - """Unix timestamp (in seconds) of when this Response was created. Required.""" - error: "_models.ResponseError" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - incomplete_details: "_models.ResponseIncompleteDetails1" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Details about why the response is incomplete. Required.""" - output: list["_models.ItemResource"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An array of content items generated by the model. - * The length and order of items in the `output` array is dependent - on the model's response. - * Rather than accessing the first item in the `output` array and - assuming it's an `assistant` message with the content generated by - the model, you might consider using the `output_text` property where - supported in SDKs. Required.""" - instructions: Union[str, list["_models.ItemParam"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """A system (or developer) message inserted into the model's context. - When using along with ``previous_response_id``, the instructions from a previous - response will not be carried over to the next response. This makes it simple - to swap out system (or developer) messages in new responses. Required. Is either a str type or - a [ItemParam] type.""" - output_text: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """SDK-only convenience property that contains the aggregated text output - from all ``output_text`` items in the ``output`` array, if any are present. - Supported in the Python and JavaScript SDKs.""" - usage: Optional["_models.ResponseUsage"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - parallel_tool_calls: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Whether to allow the model to run tool calls in parallel. Required.""" - conversation: "_models.ResponseConversation1" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Required.""" - agent: Optional["_models.AgentId"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The agent used for this response.""" - structured_inputs: Optional[dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The structured inputs to the response that can participate in prompt template substitution or - tool argument bindings.""" - - @overload - def __init__( # pylint: disable=too-many-locals - self, - *, - metadata: dict[str, str], - temperature: float, - top_p: float, - user: str, - id: str, # pylint: disable=redefined-builtin - created_at: datetime.datetime, - error: "_models.ResponseError", - incomplete_details: "_models.ResponseIncompleteDetails1", - output: list["_models.ItemResource"], - instructions: Union[str, list["_models.ItemParam"]], - parallel_tool_calls: bool, - conversation: "_models.ResponseConversation1", - service_tier: Optional[Union[str, "_models.ServiceTier"]] = None, - top_logprobs: Optional[int] = None, - previous_response_id: Optional[str] = None, - model: Optional[str] = None, - reasoning: Optional["_models.Reasoning"] = None, - background: Optional[bool] = None, - max_output_tokens: Optional[int] = None, - max_tool_calls: Optional[int] = None, - text: Optional["_models.ResponseText"] = None, - tools: Optional[list["_models.Tool"]] = None, - tool_choice: Optional[Union[str, "_models.ToolChoiceOptions", "_models.ToolChoiceObject"]] = None, - prompt: Optional["_models.Prompt"] = None, - truncation: Optional[Literal["auto", "disabled"]] = None, - status: Optional[Literal["completed", "failed", "in_progress", "cancelled", "queued", "incomplete"]] = None, - output_text: Optional[str] = None, - usage: Optional["_models.ResponseUsage"] = None, - agent: Optional["_models.AgentId"] = None, - structured_inputs: Optional[dict[str, Any]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.object: Literal["response"] = "response" - - -class ResponseStreamEvent(_Model): - """ResponseStreamEvent. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ResponseErrorEvent, ResponseCodeInterpreterCallCompletedEvent, - ResponseCodeInterpreterCallInProgressEvent, ResponseCodeInterpreterCallInterpretingEvent, - ResponseCodeInterpreterCallCodeDeltaEvent, ResponseCodeInterpreterCallCodeDoneEvent, - ResponseCompletedEvent, ResponseContentPartAddedEvent, ResponseContentPartDoneEvent, - ResponseCreatedEvent, ResponseFailedEvent, ResponseFileSearchCallCompletedEvent, - ResponseFileSearchCallInProgressEvent, ResponseFileSearchCallSearchingEvent, - ResponseFunctionCallArgumentsDeltaEvent, ResponseFunctionCallArgumentsDoneEvent, - ResponseImageGenCallCompletedEvent, ResponseImageGenCallGeneratingEvent, - ResponseImageGenCallInProgressEvent, ResponseImageGenCallPartialImageEvent, - ResponseInProgressEvent, ResponseIncompleteEvent, ResponseMCPCallArgumentsDeltaEvent, - ResponseMCPCallArgumentsDoneEvent, ResponseMCPCallCompletedEvent, ResponseMCPCallFailedEvent, - ResponseMCPCallInProgressEvent, ResponseMCPListToolsCompletedEvent, - ResponseMCPListToolsFailedEvent, ResponseMCPListToolsInProgressEvent, - ResponseOutputItemAddedEvent, ResponseOutputItemDoneEvent, ResponseTextDeltaEvent, - ResponseTextDoneEvent, ResponseQueuedEvent, ResponseReasoningDeltaEvent, - ResponseReasoningDoneEvent, ResponseReasoningSummaryDeltaEvent, - ResponseReasoningSummaryDoneEvent, ResponseReasoningSummaryPartAddedEvent, - ResponseReasoningSummaryPartDoneEvent, ResponseReasoningSummaryTextDeltaEvent, - ResponseReasoningSummaryTextDoneEvent, ResponseRefusalDeltaEvent, ResponseRefusalDoneEvent, - ResponseWebSearchCallCompletedEvent, ResponseWebSearchCallInProgressEvent, - ResponseWebSearchCallSearchingEvent - - :ivar type: Required. Known values are: "response.audio.delta", "response.audio.done", - "response.audio_transcript.delta", "response.audio_transcript.done", - "response.code_interpreter_call_code.delta", "response.code_interpreter_call_code.done", - "response.code_interpreter_call.completed", "response.code_interpreter_call.in_progress", - "response.code_interpreter_call.interpreting", "response.completed", - "response.content_part.added", "response.content_part.done", "response.created", "error", - "response.file_search_call.completed", "response.file_search_call.in_progress", - "response.file_search_call.searching", "response.function_call_arguments.delta", - "response.function_call_arguments.done", "response.in_progress", "response.failed", - "response.incomplete", "response.output_item.added", "response.output_item.done", - "response.refusal.delta", "response.refusal.done", "response.output_text.annotation.added", - "response.output_text.delta", "response.output_text.done", - "response.reasoning_summary_part.added", "response.reasoning_summary_part.done", - "response.reasoning_summary_text.delta", "response.reasoning_summary_text.done", - "response.web_search_call.completed", "response.web_search_call.in_progress", - "response.web_search_call.searching", "response.image_generation_call.completed", - "response.image_generation_call.generating", "response.image_generation_call.in_progress", - "response.image_generation_call.partial_image", "response.mcp_call.arguments_delta", - "response.mcp_call.arguments_done", "response.mcp_call.completed", "response.mcp_call.failed", - "response.mcp_call.in_progress", "response.mcp_list_tools.completed", - "response.mcp_list_tools.failed", "response.mcp_list_tools.in_progress", "response.queued", - "response.reasoning.delta", "response.reasoning.done", "response.reasoning_summary.delta", and - "response.reasoning_summary.done". - :vartype type: str or ~azure.ai.projects.models.ResponseStreamEventType - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. Known values are: \"response.audio.delta\", \"response.audio.done\", - \"response.audio_transcript.delta\", \"response.audio_transcript.done\", - \"response.code_interpreter_call_code.delta\", \"response.code_interpreter_call_code.done\", - \"response.code_interpreter_call.completed\", \"response.code_interpreter_call.in_progress\", - \"response.code_interpreter_call.interpreting\", \"response.completed\", - \"response.content_part.added\", \"response.content_part.done\", \"response.created\", - \"error\", \"response.file_search_call.completed\", \"response.file_search_call.in_progress\", - \"response.file_search_call.searching\", \"response.function_call_arguments.delta\", - \"response.function_call_arguments.done\", \"response.in_progress\", \"response.failed\", - \"response.incomplete\", \"response.output_item.added\", \"response.output_item.done\", - \"response.refusal.delta\", \"response.refusal.done\", - \"response.output_text.annotation.added\", \"response.output_text.delta\", - \"response.output_text.done\", \"response.reasoning_summary_part.added\", - \"response.reasoning_summary_part.done\", \"response.reasoning_summary_text.delta\", - \"response.reasoning_summary_text.done\", \"response.web_search_call.completed\", - \"response.web_search_call.in_progress\", \"response.web_search_call.searching\", - \"response.image_generation_call.completed\", \"response.image_generation_call.generating\", - \"response.image_generation_call.in_progress\", - \"response.image_generation_call.partial_image\", \"response.mcp_call.arguments_delta\", - \"response.mcp_call.arguments_done\", \"response.mcp_call.completed\", - \"response.mcp_call.failed\", \"response.mcp_call.in_progress\", - \"response.mcp_list_tools.completed\", \"response.mcp_list_tools.failed\", - \"response.mcp_list_tools.in_progress\", \"response.queued\", \"response.reasoning.delta\", - \"response.reasoning.done\", \"response.reasoning_summary.delta\", and - \"response.reasoning_summary.done\".""" - sequence_number: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The sequence number for this event. Required.""" - - @overload - def __init__( - self, - *, - type: str, - sequence_number: int, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ResponseCodeInterpreterCallCodeDeltaEvent( - ResponseStreamEvent, discriminator="response.code_interpreter_call_code.delta" -): # pylint: disable=name-too-long - """Emitted when a partial code snippet is streamed by the code interpreter. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.code_interpreter_call_code.delta``. - Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA - :ivar output_index: The index of the output item in the response for which the code is being - streamed. Required. - :vartype output_index: int - :ivar item_id: The unique identifier of the code interpreter tool call item. Required. - :vartype item_id: str - :ivar delta: The partial code snippet being streamed by the code interpreter. Required. - :vartype delta: str - """ - - type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.code_interpreter_call_code.delta``. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response for which the code is being streamed. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the code interpreter tool call item. Required.""" - delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The partial code snippet being streamed by the code interpreter. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - output_index: int, - item_id: str, - delta: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA # type: ignore - - -class ResponseCodeInterpreterCallCodeDoneEvent( - ResponseStreamEvent, discriminator="response.code_interpreter_call_code.done" -): - """Emitted when the code snippet is finalized by the code interpreter. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.code_interpreter_call_code.done``. - Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE - :ivar output_index: The index of the output item in the response for which the code is - finalized. Required. - :vartype output_index: int - :ivar item_id: The unique identifier of the code interpreter tool call item. Required. - :vartype item_id: str - :ivar code: The final code snippet output by the code interpreter. Required. - :vartype code: str - """ - - type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.code_interpreter_call_code.done``. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response for which the code is finalized. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the code interpreter tool call item. Required.""" - code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The final code snippet output by the code interpreter. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - output_index: int, - item_id: str, - code: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE # type: ignore - - -class ResponseCodeInterpreterCallCompletedEvent( - ResponseStreamEvent, discriminator="response.code_interpreter_call.completed" -): # pylint: disable=name-too-long - """Emitted when the code interpreter call is completed. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.code_interpreter_call.completed``. - Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_CODE_INTERPRETER_CALL_COMPLETED - :ivar output_index: The index of the output item in the response for which the code interpreter - call is completed. Required. - :vartype output_index: int - :ivar item_id: The unique identifier of the code interpreter tool call item. Required. - :vartype item_id: str - """ - - type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.code_interpreter_call.completed``. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response for which the code interpreter call is completed. - Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the code interpreter tool call item. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - output_index: int, - item_id: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_COMPLETED # type: ignore - - -class ResponseCodeInterpreterCallInProgressEvent( - ResponseStreamEvent, discriminator="response.code_interpreter_call.in_progress" -): # pylint: disable=name-too-long - """Emitted when a code interpreter call is in progress. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.code_interpreter_call.in_progress``. - Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS - :ivar output_index: The index of the output item in the response for which the code interpreter - call is in progress. Required. - :vartype output_index: int - :ivar item_id: The unique identifier of the code interpreter tool call item. Required. - :vartype item_id: str - """ - - type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.code_interpreter_call.in_progress``. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response for which the code interpreter call is in - progress. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the code interpreter tool call item. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - output_index: int, - item_id: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS # type: ignore - - -class ResponseCodeInterpreterCallInterpretingEvent( - ResponseStreamEvent, discriminator="response.code_interpreter_call.interpreting" -): # pylint: disable=name-too-long - """Emitted when the code interpreter is actively interpreting the code snippet. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.code_interpreter_call.interpreting``. - Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING - :ivar output_index: The index of the output item in the response for which the code interpreter - is interpreting code. Required. - :vartype output_index: int - :ivar item_id: The unique identifier of the code interpreter tool call item. Required. - :vartype item_id: str - """ - - type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.code_interpreter_call.interpreting``. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response for which the code interpreter is interpreting - code. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the code interpreter tool call item. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - output_index: int, - item_id: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING # type: ignore - - -class ResponseCompletedEvent(ResponseStreamEvent, discriminator="response.completed"): - """Emitted when the model response is complete. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.completed``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_COMPLETED - :ivar response: Properties of the completed response. Required. - :vartype response: ~azure.ai.projects.models.Response - """ - - type: Literal[ResponseStreamEventType.RESPONSE_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.completed``. Required.""" - response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Properties of the completed response. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - response: "_models.Response", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_COMPLETED # type: ignore - - -class ResponseContentPartAddedEvent(ResponseStreamEvent, discriminator="response.content_part.added"): - """Emitted when a new content part is added. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.content_part.added``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_CONTENT_PART_ADDED - :ivar item_id: The ID of the output item that the content part was added to. Required. - :vartype item_id: str - :ivar output_index: The index of the output item that the content part was added to. Required. - :vartype output_index: int - :ivar content_index: The index of the content part that was added. Required. - :vartype content_index: int - :ivar part: The content part that was added. Required. - :vartype part: ~azure.ai.projects.models.ItemContent - """ - - type: Literal[ResponseStreamEventType.RESPONSE_CONTENT_PART_ADDED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.content_part.added``. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the output item that the content part was added to. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that the content part was added to. Required.""" - content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the content part that was added. Required.""" - part: "_models.ItemContent" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The content part that was added. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - item_id: str, - output_index: int, - content_index: int, - part: "_models.ItemContent", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_CONTENT_PART_ADDED # type: ignore - - -class ResponseContentPartDoneEvent(ResponseStreamEvent, discriminator="response.content_part.done"): - """Emitted when a content part is done. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.content_part.done``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_CONTENT_PART_DONE - :ivar item_id: The ID of the output item that the content part was added to. Required. - :vartype item_id: str - :ivar output_index: The index of the output item that the content part was added to. Required. - :vartype output_index: int - :ivar content_index: The index of the content part that is done. Required. - :vartype content_index: int - :ivar part: The content part that is done. Required. - :vartype part: ~azure.ai.projects.models.ItemContent - """ - - type: Literal[ResponseStreamEventType.RESPONSE_CONTENT_PART_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.content_part.done``. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the output item that the content part was added to. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that the content part was added to. Required.""" - content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the content part that is done. Required.""" - part: "_models.ItemContent" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The content part that is done. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - item_id: str, - output_index: int, - content_index: int, - part: "_models.ItemContent", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_CONTENT_PART_DONE # type: ignore - - -class ResponseConversation1(_Model): - """ResponseConversation1. - - :ivar id: Required. - :vartype id: str - """ - - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ResponseCreatedEvent(ResponseStreamEvent, discriminator="response.created"): - """An event that is emitted when a response is created. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.created``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_CREATED - :ivar response: The response that was created. Required. - :vartype response: ~azure.ai.projects.models.Response - """ - - type: Literal[ResponseStreamEventType.RESPONSE_CREATED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.created``. Required.""" - response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The response that was created. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - response: "_models.Response", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_CREATED # type: ignore - - -class ResponseError(_Model): - """An error object returned when the model fails to generate a Response. - - :ivar code: Required. Known values are: "server_error", "rate_limit_exceeded", - "invalid_prompt", "vector_store_timeout", "invalid_image", "invalid_image_format", - "invalid_base64_image", "invalid_image_url", "image_too_large", "image_too_small", - "image_parse_error", "image_content_policy_violation", "invalid_image_mode", - "image_file_too_large", "unsupported_image_media_type", "empty_image_file", - "failed_to_download_image", and "image_file_not_found". - :vartype code: str or ~azure.ai.projects.models.ResponseErrorCode - :ivar message: A human-readable description of the error. Required. - :vartype message: str - """ - - code: Union[str, "_models.ResponseErrorCode"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Required. Known values are: \"server_error\", \"rate_limit_exceeded\", \"invalid_prompt\", - \"vector_store_timeout\", \"invalid_image\", \"invalid_image_format\", - \"invalid_base64_image\", \"invalid_image_url\", \"image_too_large\", \"image_too_small\", - \"image_parse_error\", \"image_content_policy_violation\", \"invalid_image_mode\", - \"image_file_too_large\", \"unsupported_image_media_type\", \"empty_image_file\", - \"failed_to_download_image\", and \"image_file_not_found\".""" - message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A human-readable description of the error. Required.""" - - @overload - def __init__( - self, - *, - code: Union[str, "_models.ResponseErrorCode"], - message: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ResponseErrorEvent(ResponseStreamEvent, discriminator="error"): - """Emitted when an error occurs. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``error``. Required. - :vartype type: str or ~azure.ai.projects.models.ERROR - :ivar code: The error code. Required. - :vartype code: str - :ivar message: The error message. Required. - :vartype message: str - :ivar param: The error parameter. Required. - :vartype param: str - """ - - type: Literal[ResponseStreamEventType.ERROR] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``error``. Required.""" - code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The error code. Required.""" - message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The error message. Required.""" - param: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The error parameter. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - code: str, - message: str, - param: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.ERROR # type: ignore - - -class ResponseFailedEvent(ResponseStreamEvent, discriminator="response.failed"): - """An event that is emitted when a response fails. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.failed``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_FAILED - :ivar response: The response that failed. Required. - :vartype response: ~azure.ai.projects.models.Response - """ - - type: Literal[ResponseStreamEventType.RESPONSE_FAILED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.failed``. Required.""" - response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The response that failed. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - response: "_models.Response", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_FAILED # type: ignore - - -class ResponseFileSearchCallCompletedEvent(ResponseStreamEvent, discriminator="response.file_search_call.completed"): - """Emitted when a file search call is completed (results found). - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.file_search_call.completed``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_FILE_SEARCH_CALL_COMPLETED - :ivar output_index: The index of the output item that the file search call is initiated. - Required. - :vartype output_index: int - :ivar item_id: The ID of the output item that the file search call is initiated. Required. - :vartype item_id: str - """ - - type: Literal[ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.file_search_call.completed``. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that the file search call is initiated. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the output item that the file search call is initiated. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - output_index: int, - item_id: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_COMPLETED # type: ignore - - -class ResponseFileSearchCallInProgressEvent(ResponseStreamEvent, discriminator="response.file_search_call.in_progress"): - """Emitted when a file search call is initiated. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.file_search_call.in_progress``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS - :ivar output_index: The index of the output item that the file search call is initiated. - Required. - :vartype output_index: int - :ivar item_id: The ID of the output item that the file search call is initiated. Required. - :vartype item_id: str - """ - - type: Literal[ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.file_search_call.in_progress``. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that the file search call is initiated. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the output item that the file search call is initiated. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - output_index: int, - item_id: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS # type: ignore - - -class ResponseFileSearchCallSearchingEvent(ResponseStreamEvent, discriminator="response.file_search_call.searching"): - """Emitted when a file search is currently searching. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.file_search_call.searching``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_FILE_SEARCH_CALL_SEARCHING - :ivar output_index: The index of the output item that the file search call is searching. - Required. - :vartype output_index: int - :ivar item_id: The ID of the output item that the file search call is initiated. Required. - :vartype item_id: str - """ - - type: Literal[ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_SEARCHING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.file_search_call.searching``. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that the file search call is searching. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the output item that the file search call is initiated. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - output_index: int, - item_id: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_SEARCHING # type: ignore - - -class ResponseFormatJsonSchemaSchema(_Model): - """The schema for the response format, described as a JSON Schema object. - Learn how to build JSON schemas `here `_. - - """ - - -class ResponseFunctionCallArgumentsDeltaEvent( - ResponseStreamEvent, discriminator="response.function_call_arguments.delta" -): - """Emitted when there is a partial function-call arguments delta. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.function_call_arguments.delta``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA - :ivar item_id: The ID of the output item that the function-call arguments delta is added to. - Required. - :vartype item_id: str - :ivar output_index: The index of the output item that the function-call arguments delta is - added to. Required. - :vartype output_index: int - :ivar delta: The function-call arguments delta that is added. Required. - :vartype delta: str - """ - - type: Literal[ResponseStreamEventType.RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.function_call_arguments.delta``. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the output item that the function-call arguments delta is added to. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that the function-call arguments delta is added to. Required.""" - delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The function-call arguments delta that is added. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - item_id: str, - output_index: int, - delta: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA # type: ignore - - -class ResponseFunctionCallArgumentsDoneEvent( - ResponseStreamEvent, discriminator="response.function_call_arguments.done" -): - """Emitted when function-call arguments are finalized. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE - :ivar item_id: The ID of the item. Required. - :vartype item_id: str - :ivar output_index: The index of the output item. Required. - :vartype output_index: int - :ivar arguments: The function-call arguments. Required. - :vartype arguments: str - """ - - type: Literal[ResponseStreamEventType.RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the item. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item. Required.""" - arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The function-call arguments. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - item_id: str, - output_index: int, - arguments: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE # type: ignore - - -class ResponseImageGenCallCompletedEvent(ResponseStreamEvent, discriminator="response.image_generation_call.completed"): - """Emitted when an image generation tool call has completed and the final image is available. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.image_generation_call.completed'. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_IMAGE_GENERATION_CALL_COMPLETED - :ivar output_index: The index of the output item in the response's output array. Required. - :vartype output_index: int - :ivar item_id: The unique identifier of the image generation item being processed. Required. - :vartype item_id: str - """ - - type: Literal[ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.image_generation_call.completed'. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response's output array. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the image generation item being processed. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - output_index: int, - item_id: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_COMPLETED # type: ignore - - -class ResponseImageGenCallGeneratingEvent( - ResponseStreamEvent, discriminator="response.image_generation_call.generating" -): - """Emitted when an image generation tool call is actively generating an image (intermediate - state). - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.image_generation_call.generating'. - Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_IMAGE_GENERATION_CALL_GENERATING - :ivar output_index: The index of the output item in the response's output array. Required. - :vartype output_index: int - :ivar item_id: The unique identifier of the image generation item being processed. Required. - :vartype item_id: str - """ - - type: Literal[ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_GENERATING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.image_generation_call.generating'. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response's output array. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the image generation item being processed. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - output_index: int, - item_id: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_GENERATING # type: ignore - - -class ResponseImageGenCallInProgressEvent( - ResponseStreamEvent, discriminator="response.image_generation_call.in_progress" -): - """Emitted when an image generation tool call is in progress. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.image_generation_call.in_progress'. - Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS - :ivar output_index: The index of the output item in the response's output array. Required. - :vartype output_index: int - :ivar item_id: The unique identifier of the image generation item being processed. Required. - :vartype item_id: str - """ - - type: Literal[ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.image_generation_call.in_progress'. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response's output array. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the image generation item being processed. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - output_index: int, - item_id: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS # type: ignore - - -class ResponseImageGenCallPartialImageEvent( - ResponseStreamEvent, discriminator="response.image_generation_call.partial_image" -): - """Emitted when a partial image is available during image generation streaming. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.image_generation_call.partial_image'. - Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE - :ivar output_index: The index of the output item in the response's output array. Required. - :vartype output_index: int - :ivar item_id: The unique identifier of the image generation item being processed. Required. - :vartype item_id: str - :ivar partial_image_index: 0-based index for the partial image (backend is 1-based, but this is - 0-based for the user). Required. - :vartype partial_image_index: int - :ivar partial_image_b64: Base64-encoded partial image data, suitable for rendering as an image. - Required. - :vartype partial_image_b64: str - """ - - type: Literal[ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.image_generation_call.partial_image'. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response's output array. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the image generation item being processed. Required.""" - partial_image_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """0-based index for the partial image (backend is 1-based, but this is 0-based for the user). - Required.""" - partial_image_b64: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Base64-encoded partial image data, suitable for rendering as an image. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - output_index: int, - item_id: str, - partial_image_index: int, - partial_image_b64: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE # type: ignore - - -class ResponseIncompleteDetails1(_Model): - """ResponseIncompleteDetails1. - - :ivar reason: The reason why the response is incomplete. Is either a - Literal["max_output_tokens"] type or a Literal["content_filter"] type. - :vartype reason: str or str - """ - - reason: Optional[Literal["max_output_tokens", "content_filter"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The reason why the response is incomplete. Is either a Literal[\"max_output_tokens\"] type or a - Literal[\"content_filter\"] type.""" - - @overload - def __init__( - self, - *, - reason: Optional[Literal["max_output_tokens", "content_filter"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ResponseIncompleteEvent(ResponseStreamEvent, discriminator="response.incomplete"): - """An event that is emitted when a response finishes as incomplete. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.incomplete``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_INCOMPLETE - :ivar response: The response that was incomplete. Required. - :vartype response: ~azure.ai.projects.models.Response - """ - - type: Literal[ResponseStreamEventType.RESPONSE_INCOMPLETE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.incomplete``. Required.""" - response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The response that was incomplete. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - response: "_models.Response", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_INCOMPLETE # type: ignore - - -class ResponseInProgressEvent(ResponseStreamEvent, discriminator="response.in_progress"): - """Emitted when the response is in progress. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.in_progress``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_IN_PROGRESS - :ivar response: The response that is in progress. Required. - :vartype response: ~azure.ai.projects.models.Response - """ - - type: Literal[ResponseStreamEventType.RESPONSE_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.in_progress``. Required.""" - response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The response that is in progress. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - response: "_models.Response", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_IN_PROGRESS # type: ignore - - -class ResponseMCPCallArgumentsDeltaEvent(ResponseStreamEvent, discriminator="response.mcp_call.arguments_delta"): - """Emitted when there is a delta (partial update) to the arguments of an MCP tool call. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.mcp_call.arguments_delta'. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_CALL_ARGUMENTS_DELTA - :ivar output_index: The index of the output item in the response's output array. Required. - :vartype output_index: int - :ivar item_id: The unique identifier of the MCP tool call item being processed. Required. - :vartype item_id: str - :ivar delta: The partial update to the arguments for the MCP tool call. Required. - :vartype delta: any - """ - - type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_ARGUMENTS_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.mcp_call.arguments_delta'. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response's output array. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the MCP tool call item being processed. Required.""" - delta: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The partial update to the arguments for the MCP tool call. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - output_index: int, - item_id: str, - delta: Any, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_ARGUMENTS_DELTA # type: ignore - - -class ResponseMCPCallArgumentsDoneEvent(ResponseStreamEvent, discriminator="response.mcp_call.arguments_done"): - """Emitted when the arguments for an MCP tool call are finalized. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.mcp_call.arguments_done'. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_CALL_ARGUMENTS_DONE - :ivar output_index: The index of the output item in the response's output array. Required. - :vartype output_index: int - :ivar item_id: The unique identifier of the MCP tool call item being processed. Required. - :vartype item_id: str - :ivar arguments: The finalized arguments for the MCP tool call. Required. - :vartype arguments: any - """ - - type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_ARGUMENTS_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.mcp_call.arguments_done'. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response's output array. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the MCP tool call item being processed. Required.""" - arguments: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The finalized arguments for the MCP tool call. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - output_index: int, - item_id: str, - arguments: Any, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_ARGUMENTS_DONE # type: ignore - - -class ResponseMCPCallCompletedEvent(ResponseStreamEvent, discriminator="response.mcp_call.completed"): - """Emitted when an MCP tool call has completed successfully. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.mcp_call.completed'. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_CALL_COMPLETED - """ - - type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.mcp_call.completed'. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_COMPLETED # type: ignore - - -class ResponseMCPCallFailedEvent(ResponseStreamEvent, discriminator="response.mcp_call.failed"): - """Emitted when an MCP tool call has failed. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.mcp_call.failed'. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_CALL_FAILED - """ - - type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_FAILED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.mcp_call.failed'. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_FAILED # type: ignore - - -class ResponseMCPCallInProgressEvent(ResponseStreamEvent, discriminator="response.mcp_call.in_progress"): - """Emitted when an MCP tool call is in progress. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.mcp_call.in_progress'. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_CALL_IN_PROGRESS - :ivar output_index: The index of the output item in the response's output array. Required. - :vartype output_index: int - :ivar item_id: The unique identifier of the MCP tool call item being processed. Required. - :vartype item_id: str - """ - - type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.mcp_call.in_progress'. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response's output array. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the MCP tool call item being processed. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - output_index: int, - item_id: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_IN_PROGRESS # type: ignore - - -class ResponseMCPListToolsCompletedEvent(ResponseStreamEvent, discriminator="response.mcp_list_tools.completed"): - """Emitted when the list of available MCP tools has been successfully retrieved. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.mcp_list_tools.completed'. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_LIST_TOOLS_COMPLETED - """ - - type: Literal[ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.mcp_list_tools.completed'. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_COMPLETED # type: ignore - - -class ResponseMCPListToolsFailedEvent(ResponseStreamEvent, discriminator="response.mcp_list_tools.failed"): - """Emitted when the attempt to list available MCP tools has failed. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.mcp_list_tools.failed'. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_LIST_TOOLS_FAILED - """ - - type: Literal[ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_FAILED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.mcp_list_tools.failed'. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_FAILED # type: ignore - - -class ResponseMCPListToolsInProgressEvent(ResponseStreamEvent, discriminator="response.mcp_list_tools.in_progress"): - """Emitted when the system is in the process of retrieving the list of available MCP tools. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.mcp_list_tools.in_progress'. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS - """ - - type: Literal[ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.mcp_list_tools.in_progress'. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS # type: ignore - - -class ResponseOutputItemAddedEvent(ResponseStreamEvent, discriminator="response.output_item.added"): - """Emitted when a new output item is added. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.output_item.added``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_OUTPUT_ITEM_ADDED - :ivar output_index: The index of the output item that was added. Required. - :vartype output_index: int - :ivar item: The output item that was added. Required. - :vartype item: ~azure.ai.projects.models.ItemResource - """ - - type: Literal[ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_ADDED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.output_item.added``. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that was added. Required.""" - item: "_models.ItemResource" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The output item that was added. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - output_index: int, - item: "_models.ItemResource", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_ADDED # type: ignore - - -class ResponseOutputItemDoneEvent(ResponseStreamEvent, discriminator="response.output_item.done"): - """Emitted when an output item is marked done. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.output_item.done``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_OUTPUT_ITEM_DONE - :ivar output_index: The index of the output item that was marked done. Required. - :vartype output_index: int - :ivar item: The output item that was marked done. Required. - :vartype item: ~azure.ai.projects.models.ItemResource - """ - - type: Literal[ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.output_item.done``. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that was marked done. Required.""" - item: "_models.ItemResource" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The output item that was marked done. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - output_index: int, - item: "_models.ItemResource", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_DONE # type: ignore - - -class ResponsePromptVariables(_Model): - """Optional map of values to substitute in for variables in your - prompt. The substitution values can either be strings, or other - Response input types like images or files. - - """ - - -class ResponseQueuedEvent(ResponseStreamEvent, discriminator="response.queued"): - """Emitted when a response is queued and waiting to be processed. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.queued'. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_QUEUED - :ivar response: The full response object that is queued. Required. - :vartype response: ~azure.ai.projects.models.Response - """ - - type: Literal[ResponseStreamEventType.RESPONSE_QUEUED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.queued'. Required.""" - response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The full response object that is queued. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - response: "_models.Response", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_QUEUED # type: ignore - - -class ResponseReasoningDeltaEvent(ResponseStreamEvent, discriminator="response.reasoning.delta"): - """Emitted when there is a delta (partial update) to the reasoning content. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.reasoning.delta'. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_DELTA - :ivar item_id: The unique identifier of the item for which reasoning is being updated. - Required. - :vartype item_id: str - :ivar output_index: The index of the output item in the response's output array. Required. - :vartype output_index: int - :ivar content_index: The index of the reasoning content part within the output item. Required. - :vartype content_index: int - :ivar delta: The partial update to the reasoning content. Required. - :vartype delta: any - """ - - type: Literal[ResponseStreamEventType.RESPONSE_REASONING_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.reasoning.delta'. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the item for which reasoning is being updated. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response's output array. Required.""" - content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the reasoning content part within the output item. Required.""" - delta: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The partial update to the reasoning content. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - item_id: str, - output_index: int, - content_index: int, - delta: Any, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_REASONING_DELTA # type: ignore - - -class ResponseReasoningDoneEvent(ResponseStreamEvent, discriminator="response.reasoning.done"): - """Emitted when the reasoning content is finalized for an item. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.reasoning.done'. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_DONE - :ivar item_id: The unique identifier of the item for which reasoning is finalized. Required. - :vartype item_id: str - :ivar output_index: The index of the output item in the response's output array. Required. - :vartype output_index: int - :ivar content_index: The index of the reasoning content part within the output item. Required. - :vartype content_index: int - :ivar text: The finalized reasoning text. Required. - :vartype text: str - """ - - type: Literal[ResponseStreamEventType.RESPONSE_REASONING_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.reasoning.done'. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the item for which reasoning is finalized. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response's output array. Required.""" - content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the reasoning content part within the output item. Required.""" - text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The finalized reasoning text. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - item_id: str, - output_index: int, - content_index: int, - text: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_REASONING_DONE # type: ignore - - -class ResponseReasoningSummaryDeltaEvent(ResponseStreamEvent, discriminator="response.reasoning_summary.delta"): - """Emitted when there is a delta (partial update) to the reasoning summary content. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.reasoning_summary.delta'. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_SUMMARY_DELTA - :ivar item_id: The unique identifier of the item for which the reasoning summary is being - updated. Required. - :vartype item_id: str - :ivar output_index: The index of the output item in the response's output array. Required. - :vartype output_index: int - :ivar summary_index: The index of the summary part within the output item. Required. - :vartype summary_index: int - :ivar delta: The partial update to the reasoning summary content. Required. - :vartype delta: any - """ - - type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.reasoning_summary.delta'. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the item for which the reasoning summary is being updated. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response's output array. Required.""" - summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the summary part within the output item. Required.""" - delta: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The partial update to the reasoning summary content. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - item_id: str, - output_index: int, - summary_index: int, - delta: Any, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_DELTA # type: ignore - - -class ResponseReasoningSummaryDoneEvent(ResponseStreamEvent, discriminator="response.reasoning_summary.done"): - """Emitted when the reasoning summary content is finalized for an item. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.reasoning_summary.done'. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_SUMMARY_DONE - :ivar item_id: The unique identifier of the item for which the reasoning summary is finalized. - Required. - :vartype item_id: str - :ivar output_index: The index of the output item in the response's output array. Required. - :vartype output_index: int - :ivar summary_index: The index of the summary part within the output item. Required. - :vartype summary_index: int - :ivar text: The finalized reasoning summary text. Required. - :vartype text: str - """ - - type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.reasoning_summary.done'. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the item for which the reasoning summary is finalized. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response's output array. Required.""" - summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the summary part within the output item. Required.""" - text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The finalized reasoning summary text. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - item_id: str, - output_index: int, - summary_index: int, - text: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_DONE # type: ignore - - -class ResponseReasoningSummaryPartAddedEvent( - ResponseStreamEvent, discriminator="response.reasoning_summary_part.added" -): - """Emitted when a new reasoning summary part is added. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.reasoning_summary_part.added``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_SUMMARY_PART_ADDED - :ivar item_id: The ID of the item this summary part is associated with. Required. - :vartype item_id: str - :ivar output_index: The index of the output item this summary part is associated with. - Required. - :vartype output_index: int - :ivar summary_index: The index of the summary part within the reasoning summary. Required. - :vartype summary_index: int - :ivar part: The summary part that was added. Required. - :vartype part: ~azure.ai.projects.models.ReasoningItemSummaryPart - """ - - type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_PART_ADDED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.reasoning_summary_part.added``. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the item this summary part is associated with. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item this summary part is associated with. Required.""" - summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the summary part within the reasoning summary. Required.""" - part: "_models.ReasoningItemSummaryPart" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The summary part that was added. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - item_id: str, - output_index: int, - summary_index: int, - part: "_models.ReasoningItemSummaryPart", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_PART_ADDED # type: ignore - - -class ResponseReasoningSummaryPartDoneEvent(ResponseStreamEvent, discriminator="response.reasoning_summary_part.done"): - """Emitted when a reasoning summary part is completed. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.reasoning_summary_part.done``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_SUMMARY_PART_DONE - :ivar item_id: The ID of the item this summary part is associated with. Required. - :vartype item_id: str - :ivar output_index: The index of the output item this summary part is associated with. - Required. - :vartype output_index: int - :ivar summary_index: The index of the summary part within the reasoning summary. Required. - :vartype summary_index: int - :ivar part: The completed summary part. Required. - :vartype part: ~azure.ai.projects.models.ReasoningItemSummaryPart - """ - - type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_PART_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.reasoning_summary_part.done``. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the item this summary part is associated with. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item this summary part is associated with. Required.""" - summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the summary part within the reasoning summary. Required.""" - part: "_models.ReasoningItemSummaryPart" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The completed summary part. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - item_id: str, - output_index: int, - summary_index: int, - part: "_models.ReasoningItemSummaryPart", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_PART_DONE # type: ignore - - -class ResponseReasoningSummaryTextDeltaEvent( - ResponseStreamEvent, discriminator="response.reasoning_summary_text.delta" -): - """Emitted when a delta is added to a reasoning summary text. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.reasoning_summary_text.delta``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_SUMMARY_TEXT_DELTA - :ivar item_id: The ID of the item this summary text delta is associated with. Required. - :vartype item_id: str - :ivar output_index: The index of the output item this summary text delta is associated with. - Required. - :vartype output_index: int - :ivar summary_index: The index of the summary part within the reasoning summary. Required. - :vartype summary_index: int - :ivar delta: The text delta that was added to the summary. Required. - :vartype delta: str - """ - - type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_TEXT_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.reasoning_summary_text.delta``. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the item this summary text delta is associated with. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item this summary text delta is associated with. Required.""" - summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the summary part within the reasoning summary. Required.""" - delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The text delta that was added to the summary. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - item_id: str, - output_index: int, - summary_index: int, - delta: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_TEXT_DELTA # type: ignore - - -class ResponseReasoningSummaryTextDoneEvent(ResponseStreamEvent, discriminator="response.reasoning_summary_text.done"): - """Emitted when a reasoning summary text is completed. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.reasoning_summary_text.done``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_SUMMARY_TEXT_DONE - :ivar item_id: The ID of the item this summary text is associated with. Required. - :vartype item_id: str - :ivar output_index: The index of the output item this summary text is associated with. - Required. - :vartype output_index: int - :ivar summary_index: The index of the summary part within the reasoning summary. Required. - :vartype summary_index: int - :ivar text: The full text of the completed reasoning summary. Required. - :vartype text: str - """ - - type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_TEXT_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.reasoning_summary_text.done``. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the item this summary text is associated with. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item this summary text is associated with. Required.""" - summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the summary part within the reasoning summary. Required.""" - text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The full text of the completed reasoning summary. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - item_id: str, - output_index: int, - summary_index: int, - text: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_TEXT_DONE # type: ignore - - -class ResponseRefusalDeltaEvent(ResponseStreamEvent, discriminator="response.refusal.delta"): - """Emitted when there is a partial refusal text. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.refusal.delta``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_REFUSAL_DELTA - :ivar item_id: The ID of the output item that the refusal text is added to. Required. - :vartype item_id: str - :ivar output_index: The index of the output item that the refusal text is added to. Required. - :vartype output_index: int - :ivar content_index: The index of the content part that the refusal text is added to. Required. - :vartype content_index: int - :ivar delta: The refusal text that is added. Required. - :vartype delta: str - """ - - type: Literal[ResponseStreamEventType.RESPONSE_REFUSAL_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.refusal.delta``. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the output item that the refusal text is added to. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that the refusal text is added to. Required.""" - content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the content part that the refusal text is added to. Required.""" - delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The refusal text that is added. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - item_id: str, - output_index: int, - content_index: int, - delta: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_REFUSAL_DELTA # type: ignore - - -class ResponseRefusalDoneEvent(ResponseStreamEvent, discriminator="response.refusal.done"): - """Emitted when refusal text is finalized. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.refusal.done``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_REFUSAL_DONE - :ivar item_id: The ID of the output item that the refusal text is finalized. Required. - :vartype item_id: str - :ivar output_index: The index of the output item that the refusal text is finalized. Required. - :vartype output_index: int - :ivar content_index: The index of the content part that the refusal text is finalized. - Required. - :vartype content_index: int - :ivar refusal: The refusal text that is finalized. Required. - :vartype refusal: str - """ - - type: Literal[ResponseStreamEventType.RESPONSE_REFUSAL_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.refusal.done``. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the output item that the refusal text is finalized. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that the refusal text is finalized. Required.""" - content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the content part that the refusal text is finalized. Required.""" - refusal: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The refusal text that is finalized. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - item_id: str, - output_index: int, - content_index: int, - refusal: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_REFUSAL_DONE # type: ignore - - -class ResponsesMessageItemParam(ItemParam, discriminator="message"): - """A response message item, representing a role and content, as provided as client request - parameters. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ResponsesAssistantMessageItemParam, ResponsesDeveloperMessageItemParam, - ResponsesSystemMessageItemParam, ResponsesUserMessageItemParam - - :ivar type: The type of the responses item, which is always 'message'. Required. - :vartype type: str or ~azure.ai.projects.models.MESSAGE - :ivar role: The role associated with the message. Required. Known values are: "system", - "developer", "user", and "assistant". - :vartype role: str or ~azure.ai.projects.models.ResponsesMessageRole - """ - - __mapping__: dict[str, _Model] = {} - type: Literal[ItemType.MESSAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the responses item, which is always 'message'. Required.""" - role: str = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) - """The role associated with the message. Required. Known values are: \"system\", \"developer\", - \"user\", and \"assistant\".""" - - @overload - def __init__( - self, - *, - role: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.MESSAGE # type: ignore - - -class ResponsesAssistantMessageItemParam(ResponsesMessageItemParam, discriminator="assistant"): - """A message parameter item with the ``assistant`` role. - - :ivar type: The type of the responses item, which is always 'message'. Required. - :vartype type: str or ~azure.ai.projects.models.MESSAGE - :ivar role: The role of the message, which is always ``assistant``. Required. - :vartype role: str or ~azure.ai.projects.models.ASSISTANT - :ivar content: The content associated with the message. Required. Is either a str type or a - [ItemContent] type. - :vartype content: str or list[~azure.ai.projects.models.ItemContent] - """ - - __mapping__: dict[str, _Model] = {} - role: Literal[ResponsesMessageRole.ASSISTANT] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The role of the message, which is always ``assistant``. Required.""" - content: Union["str", list["_models.ItemContent"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The content associated with the message. Required. Is either a str type or a [ItemContent] - type.""" - - @overload - def __init__( - self, - *, - content: Union[str, list["_models.ItemContent"]], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.role = ResponsesMessageRole.ASSISTANT # type: ignore - - -class ResponsesMessageItemResource(ItemResource, discriminator="message"): - """A response message resource item, representing a role and content, as provided on service - responses. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ResponsesAssistantMessageItemResource, ResponsesDeveloperMessageItemResource, - ResponsesSystemMessageItemResource, ResponsesUserMessageItemResource - - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: The type of the responses item, which is always 'message'. Required. - :vartype type: str or ~azure.ai.projects.models.MESSAGE - :ivar status: The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - :ivar role: The role associated with the message. Required. Known values are: "system", - "developer", "user", and "assistant". - :vartype role: str or ~azure.ai.projects.models.ResponsesMessageRole - """ - - __mapping__: dict[str, _Model] = {} - type: Literal[ItemType.MESSAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the responses item, which is always 'message'. Required.""" - status: Literal["in_progress", "completed", "incomplete"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" - role: str = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) - """The role associated with the message. Required. Known values are: \"system\", \"developer\", - \"user\", and \"assistant\".""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "completed", "incomplete"], - role: str, - created_by: Optional["_models.CreatedBy"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.MESSAGE # type: ignore - - -class ResponsesAssistantMessageItemResource(ResponsesMessageItemResource, discriminator="assistant"): - """A message resource item with the ``assistant`` role. - - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: The type of the responses item, which is always 'message'. Required. - :vartype type: str or ~azure.ai.projects.models.MESSAGE - :ivar status: The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - :ivar role: The role of the message, which is always ``assistant``. Required. - :vartype role: str or ~azure.ai.projects.models.ASSISTANT - :ivar content: The content associated with the message. Required. - :vartype content: list[~azure.ai.projects.models.ItemContent] - """ - - __mapping__: dict[str, _Model] = {} - role: Literal[ResponsesMessageRole.ASSISTANT] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The role of the message, which is always ``assistant``. Required.""" - content: list["_models.ItemContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The content associated with the message. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "completed", "incomplete"], - content: list["_models.ItemContent"], - created_by: Optional["_models.CreatedBy"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.role = ResponsesMessageRole.ASSISTANT # type: ignore - - -class ResponsesDeveloperMessageItemParam(ResponsesMessageItemParam, discriminator="developer"): - """A message parameter item with the ``developer`` role. - - :ivar type: The type of the responses item, which is always 'message'. Required. - :vartype type: str or ~azure.ai.projects.models.MESSAGE - :ivar role: The role of the message, which is always ``developer``. Required. - :vartype role: str or ~azure.ai.projects.models.DEVELOPER - :ivar content: The content associated with the message. Required. Is either a str type or a - [ItemContent] type. - :vartype content: str or list[~azure.ai.projects.models.ItemContent] - """ - - __mapping__: dict[str, _Model] = {} - role: Literal[ResponsesMessageRole.DEVELOPER] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The role of the message, which is always ``developer``. Required.""" - content: Union["str", list["_models.ItemContent"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The content associated with the message. Required. Is either a str type or a [ItemContent] - type.""" - - @overload - def __init__( - self, - *, - content: Union[str, list["_models.ItemContent"]], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.role = ResponsesMessageRole.DEVELOPER # type: ignore - - -class ResponsesDeveloperMessageItemResource(ResponsesMessageItemResource, discriminator="developer"): - """A message resource item with the ``developer`` role. - - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: The type of the responses item, which is always 'message'. Required. - :vartype type: str or ~azure.ai.projects.models.MESSAGE - :ivar status: The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - :ivar role: The role of the message, which is always ``developer``. Required. - :vartype role: str or ~azure.ai.projects.models.DEVELOPER - :ivar content: The content associated with the message. Required. - :vartype content: list[~azure.ai.projects.models.ItemContent] - """ - - __mapping__: dict[str, _Model] = {} - role: Literal[ResponsesMessageRole.DEVELOPER] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The role of the message, which is always ``developer``. Required.""" - content: list["_models.ItemContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The content associated with the message. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "completed", "incomplete"], - content: list["_models.ItemContent"], - created_by: Optional["_models.CreatedBy"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.role = ResponsesMessageRole.DEVELOPER # type: ignore - - -class ResponsesSystemMessageItemParam(ResponsesMessageItemParam, discriminator="system"): - """A message parameter item with the ``system`` role. - - :ivar type: The type of the responses item, which is always 'message'. Required. - :vartype type: str or ~azure.ai.projects.models.MESSAGE - :ivar role: The role of the message, which is always ``system``. Required. - :vartype role: str or ~azure.ai.projects.models.SYSTEM - :ivar content: The content associated with the message. Required. Is either a str type or a - [ItemContent] type. - :vartype content: str or list[~azure.ai.projects.models.ItemContent] - """ - - __mapping__: dict[str, _Model] = {} - role: Literal[ResponsesMessageRole.SYSTEM] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The role of the message, which is always ``system``. Required.""" - content: Union["str", list["_models.ItemContent"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The content associated with the message. Required. Is either a str type or a [ItemContent] - type.""" - - @overload - def __init__( - self, - *, - content: Union[str, list["_models.ItemContent"]], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.role = ResponsesMessageRole.SYSTEM # type: ignore - - -class ResponsesSystemMessageItemResource(ResponsesMessageItemResource, discriminator="system"): - """A message resource item with the ``system`` role. - - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: The type of the responses item, which is always 'message'. Required. - :vartype type: str or ~azure.ai.projects.models.MESSAGE - :ivar status: The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - :ivar role: The role of the message, which is always ``system``. Required. - :vartype role: str or ~azure.ai.projects.models.SYSTEM - :ivar content: The content associated with the message. Required. - :vartype content: list[~azure.ai.projects.models.ItemContent] - """ - - __mapping__: dict[str, _Model] = {} - role: Literal[ResponsesMessageRole.SYSTEM] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The role of the message, which is always ``system``. Required.""" - content: list["_models.ItemContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The content associated with the message. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "completed", "incomplete"], - content: list["_models.ItemContent"], - created_by: Optional["_models.CreatedBy"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.role = ResponsesMessageRole.SYSTEM # type: ignore - - -class ResponsesUserMessageItemParam(ResponsesMessageItemParam, discriminator="user"): - """A message parameter item with the ``user`` role. - - :ivar type: The type of the responses item, which is always 'message'. Required. - :vartype type: str or ~azure.ai.projects.models.MESSAGE - :ivar role: The role of the message, which is always ``user``. Required. - :vartype role: str or ~azure.ai.projects.models.USER - :ivar content: The content associated with the message. Required. Is either a str type or a - [ItemContent] type. - :vartype content: str or list[~azure.ai.projects.models.ItemContent] - """ - - __mapping__: dict[str, _Model] = {} - role: Literal[ResponsesMessageRole.USER] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The role of the message, which is always ``user``. Required.""" - content: Union["str", list["_models.ItemContent"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The content associated with the message. Required. Is either a str type or a [ItemContent] - type.""" - - @overload - def __init__( - self, - *, - content: Union[str, list["_models.ItemContent"]], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.role = ResponsesMessageRole.USER # type: ignore - - -class ResponsesUserMessageItemResource(ResponsesMessageItemResource, discriminator="user"): - """A message resource item with the ``user`` role. - - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: The type of the responses item, which is always 'message'. Required. - :vartype type: str or ~azure.ai.projects.models.MESSAGE - :ivar status: The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - :ivar role: The role of the message, which is always ``user``. Required. - :vartype role: str or ~azure.ai.projects.models.USER - :ivar content: The content associated with the message. Required. - :vartype content: list[~azure.ai.projects.models.ItemContent] - """ - - __mapping__: dict[str, _Model] = {} - role: Literal[ResponsesMessageRole.USER] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The role of the message, which is always ``user``. Required.""" - content: list["_models.ItemContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The content associated with the message. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "completed", "incomplete"], - content: list["_models.ItemContent"], - created_by: Optional["_models.CreatedBy"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.role = ResponsesMessageRole.USER # type: ignore - - -class ResponseText(_Model): - """ResponseText. - - :ivar format: - :vartype format: ~azure.ai.projects.models.ResponseTextFormatConfiguration - """ - - format: Optional["_models.ResponseTextFormatConfiguration"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - - @overload - def __init__( - self, - *, - format: Optional["_models.ResponseTextFormatConfiguration"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ResponseTextDeltaEvent(ResponseStreamEvent, discriminator="response.output_text.delta"): - """Emitted when there is an additional text delta. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.output_text.delta``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_OUTPUT_TEXT_DELTA - :ivar item_id: The ID of the output item that the text delta was added to. Required. - :vartype item_id: str - :ivar output_index: The index of the output item that the text delta was added to. Required. - :vartype output_index: int - :ivar content_index: The index of the content part that the text delta was added to. Required. - :vartype content_index: int - :ivar delta: The text delta that was added. Required. - :vartype delta: str - """ - - type: Literal[ResponseStreamEventType.RESPONSE_OUTPUT_TEXT_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.output_text.delta``. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the output item that the text delta was added to. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that the text delta was added to. Required.""" - content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the content part that the text delta was added to. Required.""" - delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The text delta that was added. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - item_id: str, - output_index: int, - content_index: int, - delta: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_OUTPUT_TEXT_DELTA # type: ignore - - -class ResponseTextDoneEvent(ResponseStreamEvent, discriminator="response.output_text.done"): - """Emitted when text content is finalized. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.output_text.done``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_OUTPUT_TEXT_DONE - :ivar item_id: The ID of the output item that the text content is finalized. Required. - :vartype item_id: str - :ivar output_index: The index of the output item that the text content is finalized. Required. - :vartype output_index: int - :ivar content_index: The index of the content part that the text content is finalized. - Required. - :vartype content_index: int - :ivar text: The text content that is finalized. Required. - :vartype text: str - """ - - type: Literal[ResponseStreamEventType.RESPONSE_OUTPUT_TEXT_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.output_text.done``. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the output item that the text content is finalized. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that the text content is finalized. Required.""" - content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the content part that the text content is finalized. Required.""" - text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The text content that is finalized. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - item_id: str, - output_index: int, - content_index: int, - text: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_OUTPUT_TEXT_DONE # type: ignore - - -class ResponseTextFormatConfiguration(_Model): - """ResponseTextFormatConfiguration. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ResponseTextFormatConfigurationJsonObject, ResponseTextFormatConfigurationJsonSchema, - ResponseTextFormatConfigurationText - - :ivar type: Required. Known values are: "text", "json_schema", and "json_object". - :vartype type: str or ~azure.ai.projects.models.ResponseTextFormatConfigurationType - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. Known values are: \"text\", \"json_schema\", and \"json_object\".""" - - @overload - def __init__( - self, - *, - type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ResponseTextFormatConfigurationJsonObject( - ResponseTextFormatConfiguration, discriminator="json_object" -): # pylint: disable=name-too-long - """ResponseTextFormatConfigurationJsonObject. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.JSON_OBJECT - """ - - type: Literal[ResponseTextFormatConfigurationType.JSON_OBJECT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - - @overload - def __init__( - self, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseTextFormatConfigurationType.JSON_OBJECT # type: ignore - - -class ResponseTextFormatConfigurationJsonSchema( - ResponseTextFormatConfiguration, discriminator="json_schema" -): # pylint: disable=name-too-long - """JSON Schema response format. Used to generate structured JSON responses. - Learn more about `Structured Outputs `_. - - :ivar type: The type of response format being defined. Always ``json_schema``. Required. - :vartype type: str or ~azure.ai.projects.models.JSON_SCHEMA - :ivar description: A description of what the response format is for, used by the model to - determine how to respond in the format. - :vartype description: str - :ivar name: The name of the response format. Must be a-z, A-Z, 0-9, or contain - underscores and dashes, with a maximum length of 64. Required. - :vartype name: str - :ivar schema: Required. - :vartype schema: ~azure.ai.projects.models.ResponseFormatJsonSchemaSchema - :ivar strict: Whether to enable strict schema adherence when generating the output. - If set to true, the model will always follow the exact schema defined - in the ``schema`` field. Only a subset of JSON Schema is supported when - ``strict`` is ``true``. To learn more, read the `Structured Outputs - guide `_. - :vartype strict: bool - """ - - type: Literal[ResponseTextFormatConfigurationType.JSON_SCHEMA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of response format being defined. Always ``json_schema``. Required.""" - description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A description of what the response format is for, used by the model to - determine how to respond in the format.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the response format. Must be a-z, A-Z, 0-9, or contain - underscores and dashes, with a maximum length of 64. Required.""" - schema: "_models.ResponseFormatJsonSchemaSchema" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Required.""" - strict: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Whether to enable strict schema adherence when generating the output. - If set to true, the model will always follow the exact schema defined - in the ``schema`` field. Only a subset of JSON Schema is supported when - ``strict`` is ``true``. To learn more, read the `Structured Outputs - guide `_.""" - - @overload - def __init__( - self, - *, - name: str, - schema: "_models.ResponseFormatJsonSchemaSchema", - description: Optional[str] = None, - strict: Optional[bool] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseTextFormatConfigurationType.JSON_SCHEMA # type: ignore - - -class ResponseTextFormatConfigurationText(ResponseTextFormatConfiguration, discriminator="text"): - """ResponseTextFormatConfigurationText. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.TEXT - """ - - type: Literal[ResponseTextFormatConfigurationType.TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - - @overload - def __init__( - self, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseTextFormatConfigurationType.TEXT # type: ignore - - -class ResponseUsage(_Model): - """Represents token usage details including input tokens, output tokens, - a breakdown of output tokens, and the total tokens used. - - :ivar input_tokens: The number of input tokens. Required. - :vartype input_tokens: int - :ivar input_tokens_details: A detailed breakdown of the input tokens. Required. - :vartype input_tokens_details: - ~azure.ai.projects.models.MemoryStoreOperationUsageInputTokensDetails - :ivar output_tokens: The number of output tokens. Required. - :vartype output_tokens: int - :ivar output_tokens_details: A detailed breakdown of the output tokens. Required. - :vartype output_tokens_details: - ~azure.ai.projects.models.MemoryStoreOperationUsageOutputTokensDetails - :ivar total_tokens: The total number of tokens used. Required. - :vartype total_tokens: int - """ - - input_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The number of input tokens. Required.""" - input_tokens_details: "_models.MemoryStoreOperationUsageInputTokensDetails" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """A detailed breakdown of the input tokens. Required.""" - output_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The number of output tokens. Required.""" - output_tokens_details: "_models.MemoryStoreOperationUsageOutputTokensDetails" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """A detailed breakdown of the output tokens. Required.""" - total_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The total number of tokens used. Required.""" - - @overload - def __init__( - self, - *, - input_tokens: int, - input_tokens_details: "_models.MemoryStoreOperationUsageInputTokensDetails", - output_tokens: int, - output_tokens_details: "_models.MemoryStoreOperationUsageOutputTokensDetails", - total_tokens: int, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ResponseWebSearchCallCompletedEvent(ResponseStreamEvent, discriminator="response.web_search_call.completed"): - """Note: web_search is not yet available via Azure OpenAI. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.web_search_call.completed``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_WEB_SEARCH_CALL_COMPLETED - :ivar output_index: The index of the output item that the web search call is associated with. - Required. - :vartype output_index: int - :ivar item_id: Unique ID for the output item associated with the web search call. Required. - :vartype item_id: str - """ - - type: Literal[ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.web_search_call.completed``. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that the web search call is associated with. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Unique ID for the output item associated with the web search call. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - output_index: int, - item_id: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_COMPLETED # type: ignore - - -class ResponseWebSearchCallInProgressEvent(ResponseStreamEvent, discriminator="response.web_search_call.in_progress"): - """Note: web_search is not yet available via Azure OpenAI. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.web_search_call.in_progress``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS - :ivar output_index: The index of the output item that the web search call is associated with. - Required. - :vartype output_index: int - :ivar item_id: Unique ID for the output item associated with the web search call. Required. - :vartype item_id: str - """ - - type: Literal[ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.web_search_call.in_progress``. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that the web search call is associated with. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Unique ID for the output item associated with the web search call. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - output_index: int, - item_id: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS # type: ignore - - -class ResponseWebSearchCallSearchingEvent(ResponseStreamEvent, discriminator="response.web_search_call.searching"): - """Note: web_search is not yet available via Azure OpenAI. - - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.web_search_call.searching``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_WEB_SEARCH_CALL_SEARCHING - :ivar output_index: The index of the output item that the web search call is associated with. - Required. - :vartype output_index: int - :ivar item_id: Unique ID for the output item associated with the web search call. Required. - :vartype item_id: str - """ - - type: Literal[ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_SEARCHING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.web_search_call.searching``. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that the web search call is associated with. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Unique ID for the output item associated with the web search call. Required.""" - - @overload - def __init__( - self, - *, - sequence_number: int, - output_index: int, - item_id: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_SEARCHING # type: ignore - - -class SASCredentials(BaseCredentials, discriminator="SAS"): - """Shared Access Signature (SAS) credential definition. - - :ivar type: The credential type. Required. Shared Access Signature (SAS) credential - :vartype type: str or ~azure.ai.projects.models.SAS - :ivar sas_token: SAS token. - :vartype sas_token: str - """ - - type: Literal[CredentialType.SAS] = rest_discriminator(name="type", visibility=["read"]) # type: ignore - """The credential type. Required. Shared Access Signature (SAS) credential""" - sas_token: Optional[str] = rest_field(name="SAS", visibility=["read"]) - """SAS token.""" - - @overload - def __init__( - self, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = CredentialType.SAS # type: ignore - - -class Schedule(_Model): - """Schedule model. - - :ivar id: Identifier of the schedule. Required. - :vartype id: str - :ivar display_name: Name of the schedule. - :vartype display_name: str - :ivar description: Description of the schedule. - :vartype description: str - :ivar enabled: Enabled status of the schedule. Required. - :vartype enabled: bool - :ivar provisioning_status: Provisioning status of the schedule. Known values are: "Creating", - "Updating", "Deleting", "Succeeded", and "Failed". - :vartype provisioning_status: str or ~azure.ai.projects.models.ScheduleProvisioningStatus - :ivar trigger: Trigger for the schedule. Required. - :vartype trigger: ~azure.ai.projects.models.Trigger - :ivar task: Task for the schedule. Required. - :vartype task: ~azure.ai.projects.models.ScheduleTask - :ivar tags: Schedule's tags. Unlike properties, tags are fully mutable. - :vartype tags: dict[str, str] - :ivar properties: Schedule's properties. Unlike tags, properties are add-only. Once added, a - property cannot be removed. - :vartype properties: dict[str, str] - :ivar system_data: System metadata for the resource. Required. - :vartype system_data: dict[str, str] - """ - - id: str = rest_field(visibility=["read"]) - """Identifier of the schedule. Required.""" - display_name: Optional[str] = rest_field( - name="displayName", visibility=["read", "create", "update", "delete", "query"] - ) - """Name of the schedule.""" - description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Description of the schedule.""" - enabled: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Enabled status of the schedule. Required.""" - provisioning_status: Optional[Union[str, "_models.ScheduleProvisioningStatus"]] = rest_field( - name="provisioningStatus", visibility=["read"] - ) - """Provisioning status of the schedule. Known values are: \"Creating\", \"Updating\", - \"Deleting\", \"Succeeded\", and \"Failed\".""" - trigger: "_models.Trigger" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Trigger for the schedule. Required.""" - task: "_models.ScheduleTask" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Task for the schedule. Required.""" - tags: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Schedule's tags. Unlike properties, tags are fully mutable.""" - properties: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Schedule's properties. Unlike tags, properties are add-only. Once added, a property cannot be - removed.""" - system_data: dict[str, str] = rest_field(name="systemData", visibility=["read"]) - """System metadata for the resource. Required.""" - - @overload - def __init__( - self, - *, - enabled: bool, - trigger: "_models.Trigger", - task: "_models.ScheduleTask", - display_name: Optional[str] = None, - description: Optional[str] = None, - tags: Optional[dict[str, str]] = None, - properties: Optional[dict[str, str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ScheduleRun(_Model): - """Schedule run model. - - :ivar id: Identifier of the schedule run. Required. - :vartype id: str - :ivar schedule_id: Identifier of the schedule. Required. - :vartype schedule_id: str - :ivar success: Trigger success status of the schedule run. Required. - :vartype success: bool - :ivar trigger_time: Trigger time of the schedule run. - :vartype trigger_time: str - :ivar error: Error information for the schedule run. - :vartype error: str - :ivar properties: Properties of the schedule run. Required. - :vartype properties: dict[str, str] - """ - - id: str = rest_field(visibility=["read"]) - """Identifier of the schedule run. Required.""" - schedule_id: str = rest_field(name="scheduleId", visibility=["read", "create", "update", "delete", "query"]) - """Identifier of the schedule. Required.""" - success: bool = rest_field(visibility=["read"]) - """Trigger success status of the schedule run. Required.""" - trigger_time: Optional[str] = rest_field( - name="triggerTime", visibility=["read", "create", "update", "delete", "query"] - ) - """Trigger time of the schedule run.""" - error: Optional[str] = rest_field(visibility=["read"]) - """Error information for the schedule run.""" - properties: dict[str, str] = rest_field(visibility=["read"]) - """Properties of the schedule run. Required.""" - - @overload - def __init__( - self, - *, - schedule_id: str, - trigger_time: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class SharepointAgentTool(Tool, discriminator="sharepoint_grounding_preview"): - """The input definition information for a sharepoint tool as used to configure an agent. - - :ivar type: The object type, which is always 'sharepoint_grounding'. Required. - :vartype type: str or ~azure.ai.projects.models.SHAREPOINT_GROUNDING_PREVIEW - :ivar sharepoint_grounding_preview: The sharepoint grounding tool parameters. Required. - :vartype sharepoint_grounding_preview: - ~azure.ai.projects.models.SharepointGroundingToolParameters - """ - - type: Literal[ToolType.SHAREPOINT_GROUNDING_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'sharepoint_grounding'. Required.""" - sharepoint_grounding_preview: "_models.SharepointGroundingToolParameters" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The sharepoint grounding tool parameters. Required.""" - - @overload - def __init__( - self, - *, - sharepoint_grounding_preview: "_models.SharepointGroundingToolParameters", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolType.SHAREPOINT_GROUNDING_PREVIEW # type: ignore - - -class SharepointGroundingToolParameters(_Model): - """The sharepoint grounding tool parameters. - - :ivar project_connections: The project connections attached to this tool. There can be a - maximum of 1 connection - resource attached to the tool. - :vartype project_connections: list[~azure.ai.projects.models.ToolProjectConnection] - """ - - project_connections: Optional[list["_models.ToolProjectConnection"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The project connections attached to this tool. There can be a maximum of 1 connection - resource attached to the tool.""" - - @overload - def __init__( - self, - *, - project_connections: Optional[list["_models.ToolProjectConnection"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class StructuredInputDefinition(_Model): - """An structured input that can participate in prompt template substitutions and tool argument - binding. - - :ivar description: A human-readable description of the input. - :vartype description: str - :ivar default_value: The default value for the input if no run-time value is provided. - :vartype default_value: any - :ivar tool_argument_bindings: When provided, the input value is bound to the specified tool - arguments. - :vartype tool_argument_bindings: list[~azure.ai.projects.models.ToolArgumentBinding] - :ivar schema: The JSON schema for the structured input (optional). - :vartype schema: any - :ivar required: Whether the input property is required when the agent is invoked. - :vartype required: bool - """ - - description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A human-readable description of the input.""" - default_value: Optional[Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The default value for the input if no run-time value is provided.""" - tool_argument_bindings: Optional[list["_models.ToolArgumentBinding"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """When provided, the input value is bound to the specified tool arguments.""" - schema: Optional[Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The JSON schema for the structured input (optional).""" - required: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Whether the input property is required when the agent is invoked.""" - - @overload - def __init__( - self, - *, - description: Optional[str] = None, - default_value: Optional[Any] = None, - tool_argument_bindings: Optional[list["_models.ToolArgumentBinding"]] = None, - schema: Optional[Any] = None, - required: Optional[bool] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class StructuredOutputDefinition(_Model): - """A structured output that can be produced by the agent. - - :ivar name: The name of the structured output. Required. - :vartype name: str - :ivar description: A description of the output to emit. Used by the model to determine when to - emit the output. Required. - :vartype description: str - :ivar schema: The JSON schema for the structured output. Required. - :vartype schema: dict[str, any] - :ivar strict: Whether to enforce strict validation. Default ``true``. Required. - :vartype strict: bool - """ - - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the structured output. Required.""" - description: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A description of the output to emit. Used by the model to determine when to emit the output. - Required.""" - schema: dict[str, Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The JSON schema for the structured output. Required.""" - strict: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Whether to enforce strict validation. Default ``true``. Required.""" - - @overload - def __init__( - self, - *, - name: str, - description: str, - schema: dict[str, Any], - strict: bool, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class StructuredOutputsItemResource(ItemResource, discriminator="structured_outputs"): - """StructuredOutputsItemResource. - - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.STRUCTURED_OUTPUTS - :ivar output: The structured output captured during the response. Required. - :vartype output: any - """ - - type: Literal[ItemType.STRUCTURED_OUTPUTS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - output: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The structured output captured during the response. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - output: Any, - created_by: Optional["_models.CreatedBy"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.STRUCTURED_OUTPUTS # type: ignore - - -class TaxonomyCategory(_Model): - """Taxonomy category definition. - - :ivar id: Unique identifier of the taxonomy category. Required. - :vartype id: str - :ivar name: Name of the taxonomy category. Required. - :vartype name: str - :ivar description: Description of the taxonomy category. - :vartype description: str - :ivar risk_category: Risk category associated with this taxonomy category. Required. Known - values are: "HateUnfairness", "Violence", "Sexual", and "SelfHarm". - :vartype risk_category: str or ~azure.ai.projects.models.RiskCategory - :ivar sub_categories: List of taxonomy sub categories. Required. - :vartype sub_categories: list[~azure.ai.projects.models.TaxonomySubCategory] - :ivar properties: Additional properties for the taxonomy category. - :vartype properties: dict[str, str] - """ - - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Unique identifier of the taxonomy category. Required.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Name of the taxonomy category. Required.""" - description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Description of the taxonomy category.""" - risk_category: Union[str, "_models.RiskCategory"] = rest_field( - name="riskCategory", visibility=["read", "create", "update", "delete", "query"] - ) - """Risk category associated with this taxonomy category. Required. Known values are: - \"HateUnfairness\", \"Violence\", \"Sexual\", and \"SelfHarm\".""" - sub_categories: list["_models.TaxonomySubCategory"] = rest_field( - name="subCategories", visibility=["read", "create", "update", "delete", "query"] - ) - """List of taxonomy sub categories. Required.""" - properties: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Additional properties for the taxonomy category.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - name: str, - risk_category: Union[str, "_models.RiskCategory"], - sub_categories: list["_models.TaxonomySubCategory"], - description: Optional[str] = None, - properties: Optional[dict[str, str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class TaxonomySubCategory(_Model): - """Taxonomy sub-category definition. - - :ivar id: Unique identifier of the taxonomy sub-category. Required. - :vartype id: str - :ivar name: Name of the taxonomy sub-category. Required. - :vartype name: str - :ivar description: Description of the taxonomy sub-category. - :vartype description: str - :ivar enabled: List of taxonomy items under this sub-category. Required. - :vartype enabled: bool - :ivar properties: Additional properties for the taxonomy sub-category. - :vartype properties: dict[str, str] - """ - - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Unique identifier of the taxonomy sub-category. Required.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Name of the taxonomy sub-category. Required.""" - description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Description of the taxonomy sub-category.""" - enabled: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """List of taxonomy items under this sub-category. Required.""" - properties: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Additional properties for the taxonomy sub-category.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - name: str, - enabled: bool, - description: Optional[str] = None, - properties: Optional[dict[str, str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ToolArgumentBinding(_Model): - """ToolArgumentBinding. - - :ivar tool_name: The name of the tool to participate in the argument binding. If not provided, - then all tools with matching arguments will participate in binding. - :vartype tool_name: str - :ivar argument_name: The name of the argument within the tool. Required. - :vartype argument_name: str - """ - - tool_name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the tool to participate in the argument binding. If not provided, then all tools - with matching arguments will participate in binding.""" - argument_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the argument within the tool. Required.""" - - @overload - def __init__( - self, - *, - argument_name: str, - tool_name: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ToolChoiceObject(_Model): - """ToolChoiceObject. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ToolChoiceObjectCodeInterpreter, ToolChoiceObjectComputer, ToolChoiceObjectFileSearch, - ToolChoiceObjectFunction, ToolChoiceObjectImageGen, ToolChoiceObjectMCP, - ToolChoiceObjectWebSearch - - :ivar type: Required. Known values are: "file_search", "function", "computer_use_preview", - "web_search_preview", "image_generation", "code_interpreter", and "mcp". - :vartype type: str or ~azure.ai.projects.models.ToolChoiceObjectType - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. Known values are: \"file_search\", \"function\", \"computer_use_preview\", - \"web_search_preview\", \"image_generation\", \"code_interpreter\", and \"mcp\".""" - - @overload - def __init__( - self, - *, - type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ToolChoiceObjectCodeInterpreter(ToolChoiceObject, discriminator="code_interpreter"): - """ToolChoiceObjectCodeInterpreter. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.CODE_INTERPRETER - """ - - type: Literal[ToolChoiceObjectType.CODE_INTERPRETER] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - - @overload - def __init__( - self, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolChoiceObjectType.CODE_INTERPRETER # type: ignore - - -class ToolChoiceObjectComputer(ToolChoiceObject, discriminator="computer_use_preview"): - """ToolChoiceObjectComputer. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.COMPUTER - """ - - type: Literal[ToolChoiceObjectType.COMPUTER] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - - @overload - def __init__( - self, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolChoiceObjectType.COMPUTER # type: ignore - - -class ToolChoiceObjectFileSearch(ToolChoiceObject, discriminator="file_search"): - """ToolChoiceObjectFileSearch. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.FILE_SEARCH - """ - - type: Literal[ToolChoiceObjectType.FILE_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - - @overload - def __init__( - self, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolChoiceObjectType.FILE_SEARCH # type: ignore - - -class ToolChoiceObjectFunction(ToolChoiceObject, discriminator="function"): - """Use this option to force the model to call a specific function. - - :ivar type: For function calling, the type is always ``function``. Required. - :vartype type: str or ~azure.ai.projects.models.FUNCTION - :ivar name: The name of the function to call. Required. - :vartype name: str - """ - - type: Literal[ToolChoiceObjectType.FUNCTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """For function calling, the type is always ``function``. Required.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the function to call. Required.""" - - @overload - def __init__( - self, - *, - name: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolChoiceObjectType.FUNCTION # type: ignore - - -class ToolChoiceObjectImageGen(ToolChoiceObject, discriminator="image_generation"): - """ToolChoiceObjectImageGen. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.IMAGE_GENERATION - """ - - type: Literal[ToolChoiceObjectType.IMAGE_GENERATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - - @overload - def __init__( - self, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolChoiceObjectType.IMAGE_GENERATION # type: ignore - - -class ToolChoiceObjectMCP(ToolChoiceObject, discriminator="mcp"): - """Use this option to force the model to call a specific tool on a remote MCP server. - - :ivar type: For MCP tools, the type is always ``mcp``. Required. - :vartype type: str or ~azure.ai.projects.models.MCP - :ivar server_label: The label of the MCP server to use. Required. - :vartype server_label: str - :ivar name: The name of the tool to call on the server. - :vartype name: str - """ - - type: Literal[ToolChoiceObjectType.MCP] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """For MCP tools, the type is always ``mcp``. Required.""" - server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The label of the MCP server to use. Required.""" - name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the tool to call on the server.""" - - @overload - def __init__( - self, - *, - server_label: str, - name: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolChoiceObjectType.MCP # type: ignore - - -class ToolChoiceObjectWebSearch(ToolChoiceObject, discriminator="web_search_preview"): - """Note: web_search is not yet available via Azure OpenAI. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.WEB_SEARCH - """ - - type: Literal[ToolChoiceObjectType.WEB_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - - @overload - def __init__( - self, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolChoiceObjectType.WEB_SEARCH # type: ignore - - -class ToolDescription(_Model): - """Description of a tool that can be used by an agent. - - :ivar name: The name of the tool. - :vartype name: str - :ivar description: A brief description of the tool's purpose. - :vartype description: str - """ - - name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the tool.""" - description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A brief description of the tool's purpose.""" - - @overload - def __init__( - self, - *, - name: Optional[str] = None, - description: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ToolProjectConnection(_Model): - """A project connection resource. - - :ivar project_connection_id: A project connection in a ToolProjectConnectionList attached to - this tool. Required. - :vartype project_connection_id: str - """ - - project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A project connection in a ToolProjectConnectionList attached to this tool. Required.""" - - @overload - def __init__( - self, - *, - project_connection_id: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ToolProjectConnectionList(_Model): - """A set of project connection resources currently used by either the ``bing_grounding``, - ``fabric_dataagent``, or ``sharepoint_grounding`` tools. - - :ivar project_connections: The project connections attached to this tool. There can be a - maximum of 1 connection - resource attached to the tool. - :vartype project_connections: list[~azure.ai.projects.models.ToolProjectConnection] - """ - - project_connections: Optional[list["_models.ToolProjectConnection"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The project connections attached to this tool. There can be a maximum of 1 connection - resource attached to the tool.""" - - @overload - def __init__( - self, - *, - project_connections: Optional[list["_models.ToolProjectConnection"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class TopLogProb(_Model): - """The top log probability of a token. - - :ivar token: Required. - :vartype token: str - :ivar logprob: Required. - :vartype logprob: float - :ivar bytes: Required. - :vartype bytes: list[int] - """ - - token: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - logprob: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - bytes: list[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - - @overload - def __init__( - self, - *, - token: str, - logprob: float, - bytes: list[int], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class UserProfileMemoryItem(MemoryItem, discriminator="user_profile"): - """A memory item specifically containing user profile information extracted from conversations, - such as preferences, interests, and personal details. - - :ivar memory_id: The unique ID of the memory item. Required. - :vartype memory_id: str - :ivar updated_at: The last update time of the memory item. Required. - :vartype updated_at: ~datetime.datetime - :ivar scope: The namespace that logically groups and isolates memories, such as a user ID. - Required. - :vartype scope: str - :ivar content: The content of the memory. Required. - :vartype content: str - :ivar kind: The kind of the memory item. Required. User profile information extracted from - conversations. - :vartype kind: str or ~azure.ai.projects.models.USER_PROFILE - """ - - kind: Literal[MemoryItemKind.USER_PROFILE] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The kind of the memory item. Required. User profile information extracted from conversations.""" - - @overload - def __init__( - self, - *, - memory_id: str, - updated_at: datetime.datetime, - scope: str, - content: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.kind = MemoryItemKind.USER_PROFILE # type: ignore - - -class VectorStoreFileAttributes(_Model): - """Set of 16 key-value pairs that can be attached to an object. This can be - useful for storing additional information about the object in a structured - format, and querying for objects via API or the dashboard. Keys are strings - with a maximum length of 64 characters. Values are strings with a maximum - length of 512 characters, booleans, or numbers. - - """ - - -class WebSearchAction(_Model): - """WebSearchAction. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - WebSearchActionFind, WebSearchActionOpenPage, WebSearchActionSearch - - :ivar type: Required. Known values are: "search", "open_page", and "find". - :vartype type: str or ~azure.ai.projects.models.WebSearchActionType - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. Known values are: \"search\", \"open_page\", and \"find\".""" - - @overload - def __init__( - self, - *, - type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class WebSearchActionFind(WebSearchAction, discriminator="find"): - """Action type "find": Searches for a pattern within a loaded page. - - :ivar type: The action type. Required. - :vartype type: str or ~azure.ai.projects.models.FIND - :ivar url: The URL of the page searched for the pattern. Required. - :vartype url: str - :ivar pattern: The pattern or text to search for within the page. Required. - :vartype pattern: str - """ - - type: Literal[WebSearchActionType.FIND] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The action type. Required.""" - url: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The URL of the page searched for the pattern. Required.""" - pattern: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The pattern or text to search for within the page. Required.""" - - @overload - def __init__( - self, - *, - url: str, - pattern: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = WebSearchActionType.FIND # type: ignore - - -class WebSearchActionOpenPage(WebSearchAction, discriminator="open_page"): - """Action type "open_page" - Opens a specific URL from search results. - - :ivar type: The action type. Required. - :vartype type: str or ~azure.ai.projects.models.OPEN_PAGE - :ivar url: The URL opened by the model. Required. - :vartype url: str - """ - - type: Literal[WebSearchActionType.OPEN_PAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The action type. Required.""" - url: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The URL opened by the model. Required.""" - - @overload - def __init__( - self, - *, - url: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = WebSearchActionType.OPEN_PAGE # type: ignore - - -class WebSearchActionSearch(WebSearchAction, discriminator="search"): - """Action type "search" - Performs a web search query. - - :ivar type: The action type. Required. - :vartype type: str or ~azure.ai.projects.models.SEARCH - :ivar query: The search query. Required. - :vartype query: str - """ - - type: Literal[WebSearchActionType.SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The action type. Required.""" - query: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The search query. Required.""" - - @overload - def __init__( - self, - *, - query: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = WebSearchActionType.SEARCH # type: ignore - - -class WebSearchPreviewTool(Tool, discriminator="web_search_preview"): - """Note: web_search is not yet available via Azure OpenAI. - - :ivar type: The type of the web search tool. One of ``web_search_preview`` or - ``web_search_preview_2025_03_11``. Required. - :vartype type: str or ~azure.ai.projects.models.WEB_SEARCH_PREVIEW - :ivar user_location: The user's location. - :vartype user_location: ~azure.ai.projects.models.Location - :ivar search_context_size: High level guidance for the amount of context window space to use - for the search. One of ``low``, ``medium``, or ``high``. ``medium`` is the default. Is one of - the following types: Literal["low"], Literal["medium"], Literal["high"] - :vartype search_context_size: str or str or str - """ - - type: Literal[ToolType.WEB_SEARCH_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the web search tool. One of ``web_search_preview`` or - ``web_search_preview_2025_03_11``. Required.""" - user_location: Optional["_models.Location"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The user's location.""" - search_context_size: Optional[Literal["low", "medium", "high"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """High level guidance for the amount of context window space to use for the search. One of - ``low``, ``medium``, or ``high``. ``medium`` is the default. Is one of the following types: - Literal[\"low\"], Literal[\"medium\"], Literal[\"high\"]""" - - @overload - def __init__( - self, - *, - user_location: Optional["_models.Location"] = None, - search_context_size: Optional[Literal["low", "medium", "high"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolType.WEB_SEARCH_PREVIEW # type: ignore - - -class WebSearchToolCallItemParam(ItemParam, discriminator="web_search_call"): - """The results of a web search tool call. See the - `web search guide `_ for more information. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.WEB_SEARCH_CALL - :ivar action: An object describing the specific action taken in this web search call. - Includes details on how the model used the web (search, open_page, find). Required. - :vartype action: ~azure.ai.projects.models.WebSearchAction - """ - - type: Literal[ItemType.WEB_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - action: "_models.WebSearchAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An object describing the specific action taken in this web search call. - Includes details on how the model used the web (search, open_page, find). Required.""" - - @overload - def __init__( - self, - *, - action: "_models.WebSearchAction", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.WEB_SEARCH_CALL # type: ignore - - -class WebSearchToolCallItemResource(ItemResource, discriminator="web_search_call"): - """The results of a web search tool call. See the - `web search guide `_ for more information. - - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.WEB_SEARCH_CALL - :ivar status: The status of the web search tool call. Required. Is one of the following types: - Literal["in_progress"], Literal["searching"], Literal["completed"], Literal["failed"] - :vartype status: str or str or str or str - :ivar action: An object describing the specific action taken in this web search call. - Includes details on how the model used the web (search, open_page, find). Required. - :vartype action: ~azure.ai.projects.models.WebSearchAction - """ - - type: Literal[ItemType.WEB_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - status: Literal["in_progress", "searching", "completed", "failed"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the web search tool call. Required. Is one of the following types: - Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"], Literal[\"failed\"]""" - action: "_models.WebSearchAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An object describing the specific action taken in this web search call. - Includes details on how the model used the web (search, open_page, find). Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "searching", "completed", "failed"], - action: "_models.WebSearchAction", - created_by: Optional["_models.CreatedBy"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.WEB_SEARCH_CALL # type: ignore - - -class WeeklyRecurrenceSchedule(RecurrenceSchedule, discriminator="Weekly"): - """Weekly recurrence schedule. - - :ivar type: Weekly recurrence type. Required. Weekly recurrence pattern. - :vartype type: str or ~azure.ai.projects.models.WEEKLY - :ivar days_of_week: Days of the week for the recurrence schedule. Required. - :vartype days_of_week: list[str or ~azure.ai.projects.models.DayOfWeek] - """ - - type: Literal[RecurrenceType.WEEKLY] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Weekly recurrence type. Required. Weekly recurrence pattern.""" - days_of_week: list[Union[str, "_models.DayOfWeek"]] = rest_field( - name="daysOfWeek", visibility=["read", "create", "update", "delete", "query"] - ) - """Days of the week for the recurrence schedule. Required.""" - - @overload - def __init__( - self, - *, - days_of_week: list[Union[str, "_models.DayOfWeek"]], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = RecurrenceType.WEEKLY # type: ignore - - -class WorkflowDefinition(AgentDefinition, discriminator="workflow"): - """The workflow specification in CSDL format. - - :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. - :vartype rai_config: ~azure.ai.projects.models.RaiConfig - :ivar kind: Required. - :vartype kind: str or ~azure.ai.projects.models.WORKFLOW - :ivar trigger: (Deprecated) The CSDL trigger definition. Use ``workflow`` property instead to - send CSDL yaml definition inline. - :vartype trigger: dict[str, any] - :ivar workflow: The CSDL YAML definition of the workflow. - :vartype workflow: str - """ - - kind: Literal[AgentKind.WORKFLOW] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - trigger: Optional[dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """(Deprecated) The CSDL trigger definition. Use ``workflow`` property instead to send CSDL yaml - definition inline.""" - workflow: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The CSDL YAML definition of the workflow.""" - - @overload - def __init__( - self, - *, - rai_config: Optional["_models.RaiConfig"] = None, - trigger: Optional[dict[str, Any]] = None, - workflow: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.kind = AgentKind.WORKFLOW # type: ignore diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_patch.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_patch.py deleted file mode 100644 index 6cd95db87150..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_patch.py +++ /dev/null @@ -1,39 +0,0 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -"""Customize generated code here. - -Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize -""" -from typing import List, Dict -from ._patch_evaluations import EvaluatorIds -from ._models import CustomCredential as CustomCredentialGenerated - - -class CustomCredential(CustomCredentialGenerated): - """Custom credential definition. - - :ivar type: The credential type. Always equals CredentialType.CUSTOM. Required. - :vartype type: str or ~azure.ai.projects.models.CredentialType - :ivar credential_keys: The secret custom credential keys. Required. - :vartype credential_keys: dict[str, str] - """ - - credential_keys: Dict[str, str] = {} - """The secret custom credential keys. Required.""" - - -__all__: List[str] = [ - "EvaluatorIds", - "CustomCredential", -] # Add all objects you want publicly available to users at this package level - - -def patch_sdk(): - """Do not remove from this file. - - `patch_sdk` is a last resort escape hatch that allows you to do customizations - you can't accomplish using the techniques described in - https://aka.ms/azsdk/python/dpcodegen/python/customize - """ diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_patch_evaluations.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_patch_evaluations.py deleted file mode 100644 index d362c28d0d8a..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_patch_evaluations.py +++ /dev/null @@ -1,48 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -"""Customize generated code here. - -Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize -""" -from enum import Enum - -from azure.core import CaseInsensitiveEnumMeta - - -class EvaluatorIds(str, Enum, metaclass=CaseInsensitiveEnumMeta): - RELEVANCE = "azureai://built-in/evaluators/relevance" - HATE_UNFAIRNESS = "azureai://built-in/evaluators/hate_unfairness" - VIOLENCE = "azureai://built-in/evaluators/violence" - GROUNDEDNESS = "azureai://built-in/evaluators/groundedness" - GROUNDEDNESS_PRO = "azureai://built-in/evaluators/groundedness_pro" - BLEU_SCORE = "azureai://built-in/evaluators/bleu_score" - CODE_VULNERABILITY = "azureai://built-in/evaluators/code_vulnerability" - COHERENCE = "azureai://built-in/evaluators/coherence" - CONTENT_SAFETY = "azureai://built-in/evaluators/content_safety" - F1_SCORE = "azureai://built-in/evaluators/f1_score" - FLUENCY = "azureai://built-in/evaluators/fluency" - GLEU_SCORE = "azureai://built-in/evaluators/gleu_score" - INDIRECT_ATTACK = "azureai://built-in/evaluators/indirect_attack" - INTENT_RESOLUTION = "azureai://built-in/evaluators/intent_resolution" - METEOR_SCORE = "azureai://built-in/evaluators/meteor_score" - PROTECTED_MATERIAL = "azureai://built-in/evaluators/protected_material" - RETRIEVAL = "azureai://built-in/evaluators/retrieval" - ROUGE_SCORE = "azureai://built-in/evaluators/rouge_score" - SELF_HARM = "azureai://built-in/evaluators/self_harm" - SEXUAL = "azureai://built-in/evaluators/sexual" - SIMILARITY = "azureai://built-in/evaluators/similarity" - QA = "azureai://built-in/evaluators/qa" - DOCUMENT_RETRIEVAL = "azureai://built-in/evaluators/document_retrieval" - TASK_ADHERENCE = "azureai://built-in/evaluators/task_adherence" - TOOL_CALL_ACCURACY = "azureai://built-in/evaluators/tool_call_accuracy" - UNGROUNDED_ATTRIBUTES = "azureai://built-in/evaluators/ungrounded_attributes" - RESPONSE_COMPLETENESS = "azureai://built-in/evaluators/response_completeness" - # AOAI Graders - LABEL_GRADER = "azureai://built-in/evaluators/azure-openai/label_grader" - STRING_CHECK_GRADER = "azureai://built-in/evaluators/azure-openai/string_check_grader" - TEXT_SIMILARITY_GRADER = "azureai://built-in/evaluators/azure-openai/text_similarity_grader" - GENERAL_GRADER = "azureai://built-in/evaluators/azure-openai/custom_grader" - SCORE_MODEL_GRADER = "azureai://built-in/evaluators/azure-openai/score_model_grader" diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/__init__.py deleted file mode 100644 index 8026245c2abc..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) Python Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/model_base.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/model_base.py deleted file mode 100644 index 03b8c4ce34a0..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/model_base.py +++ /dev/null @@ -1,1237 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression,too-many-lines -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) Python Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -# pylint: disable=protected-access, broad-except, import-error, no-value-for-parameter - -import copy -import calendar -import decimal -import functools -import sys -import logging -import base64 -import re -import typing -import enum -import email.utils -from datetime import datetime, date, time, timedelta, timezone -from json import JSONEncoder -import xml.etree.ElementTree as ET -from collections.abc import MutableMapping -from typing_extensions import Self -import isodate -from azure.core.exceptions import DeserializationError -from azure.core import CaseInsensitiveEnumMeta -from azure.core.pipeline import PipelineResponse -from azure.core.serialization import _Null -from azure.core.rest import HttpResponse - -_LOGGER = logging.getLogger(__name__) - -__all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"] - -TZ_UTC = timezone.utc -_T = typing.TypeVar("_T") - - -def _timedelta_as_isostr(td: timedelta) -> str: - """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S' - - Function adapted from the Tin Can Python project: https://github.com/RusticiSoftware/TinCanPython - - :param timedelta td: The timedelta to convert - :rtype: str - :return: ISO8601 version of this timedelta - """ - - # Split seconds to larger units - seconds = td.total_seconds() - minutes, seconds = divmod(seconds, 60) - hours, minutes = divmod(minutes, 60) - days, hours = divmod(hours, 24) - - days, hours, minutes = list(map(int, (days, hours, minutes))) - seconds = round(seconds, 6) - - # Build date - date_str = "" - if days: - date_str = "%sD" % days - - if hours or minutes or seconds: - # Build time - time_str = "T" - - # Hours - bigger_exists = date_str or hours - if bigger_exists: - time_str += "{:02}H".format(hours) - - # Minutes - bigger_exists = bigger_exists or minutes - if bigger_exists: - time_str += "{:02}M".format(minutes) - - # Seconds - try: - if seconds.is_integer(): - seconds_string = "{:02}".format(int(seconds)) - else: - # 9 chars long w/ leading 0, 6 digits after decimal - seconds_string = "%09.6f" % seconds - # Remove trailing zeros - seconds_string = seconds_string.rstrip("0") - except AttributeError: # int.is_integer() raises - seconds_string = "{:02}".format(seconds) - - time_str += "{}S".format(seconds_string) - else: - time_str = "" - - return "P" + date_str + time_str - - -def _serialize_bytes(o, format: typing.Optional[str] = None) -> str: - encoded = base64.b64encode(o).decode() - if format == "base64url": - return encoded.strip("=").replace("+", "-").replace("/", "_") - return encoded - - -def _serialize_datetime(o, format: typing.Optional[str] = None): - if hasattr(o, "year") and hasattr(o, "hour"): - if format == "rfc7231": - return email.utils.format_datetime(o, usegmt=True) - if format == "unix-timestamp": - return int(calendar.timegm(o.utctimetuple())) - - # astimezone() fails for naive times in Python 2.7, so make make sure o is aware (tzinfo is set) - if not o.tzinfo: - iso_formatted = o.replace(tzinfo=TZ_UTC).isoformat() - else: - iso_formatted = o.astimezone(TZ_UTC).isoformat() - # Replace the trailing "+00:00" UTC offset with "Z" (RFC 3339: https://www.ietf.org/rfc/rfc3339.txt) - return iso_formatted.replace("+00:00", "Z") - # Next try datetime.date or datetime.time - return o.isoformat() - - -def _is_readonly(p): - try: - return p._visibility == ["read"] - except AttributeError: - return False - - -class SdkJSONEncoder(JSONEncoder): - """A JSON encoder that's capable of serializing datetime objects and bytes.""" - - def __init__(self, *args, exclude_readonly: bool = False, format: typing.Optional[str] = None, **kwargs): - super().__init__(*args, **kwargs) - self.exclude_readonly = exclude_readonly - self.format = format - - def default(self, o): # pylint: disable=too-many-return-statements - if _is_model(o): - if self.exclude_readonly: - readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] - return {k: v for k, v in o.items() if k not in readonly_props} - return dict(o.items()) - try: - return super(SdkJSONEncoder, self).default(o) - except TypeError: - if isinstance(o, _Null): - return None - if isinstance(o, decimal.Decimal): - return float(o) - if isinstance(o, (bytes, bytearray)): - return _serialize_bytes(o, self.format) - try: - # First try datetime.datetime - return _serialize_datetime(o, self.format) - except AttributeError: - pass - # Last, try datetime.timedelta - try: - return _timedelta_as_isostr(o) - except AttributeError: - # This will be raised when it hits value.total_seconds in the method above - pass - return super(SdkJSONEncoder, self).default(o) - - -_VALID_DATE = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" + r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") -_VALID_RFC7231 = re.compile( - r"(Mon|Tue|Wed|Thu|Fri|Sat|Sun),\s\d{2}\s" - r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT" -) - - -def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: - """Deserialize ISO-8601 formatted string into Datetime object. - - :param str attr: response string to be deserialized. - :rtype: ~datetime.datetime - :returns: The datetime object from that input - """ - if isinstance(attr, datetime): - # i'm already deserialized - return attr - attr = attr.upper() - match = _VALID_DATE.match(attr) - if not match: - raise ValueError("Invalid datetime string: " + attr) - - check_decimal = attr.split(".") - if len(check_decimal) > 1: - decimal_str = "" - for digit in check_decimal[1]: - if digit.isdigit(): - decimal_str += digit - else: - break - if len(decimal_str) > 6: - attr = attr.replace(decimal_str, decimal_str[0:6]) - - date_obj = isodate.parse_datetime(attr) - test_utc = date_obj.utctimetuple() - if test_utc.tm_year > 9999 or test_utc.tm_year < 1: - raise OverflowError("Hit max or min date") - return date_obj - - -def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime: - """Deserialize RFC7231 formatted string into Datetime object. - - :param str attr: response string to be deserialized. - :rtype: ~datetime.datetime - :returns: The datetime object from that input - """ - if isinstance(attr, datetime): - # i'm already deserialized - return attr - match = _VALID_RFC7231.match(attr) - if not match: - raise ValueError("Invalid datetime string: " + attr) - - return email.utils.parsedate_to_datetime(attr) - - -def _deserialize_datetime_unix_timestamp(attr: typing.Union[float, datetime]) -> datetime: - """Deserialize unix timestamp into Datetime object. - - :param str attr: response string to be deserialized. - :rtype: ~datetime.datetime - :returns: The datetime object from that input - """ - if isinstance(attr, datetime): - # i'm already deserialized - return attr - return datetime.fromtimestamp(attr, TZ_UTC) - - -def _deserialize_date(attr: typing.Union[str, date]) -> date: - """Deserialize ISO-8601 formatted string into Date object. - :param str attr: response string to be deserialized. - :rtype: date - :returns: The date object from that input - """ - # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. - if isinstance(attr, date): - return attr - return isodate.parse_date(attr, defaultmonth=None, defaultday=None) # type: ignore - - -def _deserialize_time(attr: typing.Union[str, time]) -> time: - """Deserialize ISO-8601 formatted string into time object. - - :param str attr: response string to be deserialized. - :rtype: datetime.time - :returns: The time object from that input - """ - if isinstance(attr, time): - return attr - return isodate.parse_time(attr) - - -def _deserialize_bytes(attr): - if isinstance(attr, (bytes, bytearray)): - return attr - return bytes(base64.b64decode(attr)) - - -def _deserialize_bytes_base64(attr): - if isinstance(attr, (bytes, bytearray)): - return attr - padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore - attr = attr + padding # type: ignore - encoded = attr.replace("-", "+").replace("_", "/") - return bytes(base64.b64decode(encoded)) - - -def _deserialize_duration(attr): - if isinstance(attr, timedelta): - return attr - return isodate.parse_duration(attr) - - -def _deserialize_decimal(attr): - if isinstance(attr, decimal.Decimal): - return attr - return decimal.Decimal(str(attr)) - - -def _deserialize_int_as_str(attr): - if isinstance(attr, int): - return attr - return int(attr) - - -_DESERIALIZE_MAPPING = { - datetime: _deserialize_datetime, - date: _deserialize_date, - time: _deserialize_time, - bytes: _deserialize_bytes, - bytearray: _deserialize_bytes, - timedelta: _deserialize_duration, - typing.Any: lambda x: x, - decimal.Decimal: _deserialize_decimal, -} - -_DESERIALIZE_MAPPING_WITHFORMAT = { - "rfc3339": _deserialize_datetime, - "rfc7231": _deserialize_datetime_rfc7231, - "unix-timestamp": _deserialize_datetime_unix_timestamp, - "base64": _deserialize_bytes, - "base64url": _deserialize_bytes_base64, -} - - -def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None): - if annotation is int and rf and rf._format == "str": - return _deserialize_int_as_str - if rf and rf._format: - return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format) - return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore - - -def _get_type_alias_type(module_name: str, alias_name: str): - types = { - k: v - for k, v in sys.modules[module_name].__dict__.items() - if isinstance(v, typing._GenericAlias) # type: ignore - } - if alias_name not in types: - return alias_name - return types[alias_name] - - -def _get_model(module_name: str, model_name: str): - models = {k: v for k, v in sys.modules[module_name].__dict__.items() if isinstance(v, type)} - module_end = module_name.rsplit(".", 1)[0] - models.update({k: v for k, v in sys.modules[module_end].__dict__.items() if isinstance(v, type)}) - if isinstance(model_name, str): - model_name = model_name.split(".")[-1] - if model_name not in models: - return model_name - return models[model_name] - - -_UNSET = object() - - -class _MyMutableMapping(MutableMapping[str, typing.Any]): - def __init__(self, data: dict[str, typing.Any]) -> None: - self._data = data - - def __contains__(self, key: typing.Any) -> bool: - return key in self._data - - def __getitem__(self, key: str) -> typing.Any: - return self._data.__getitem__(key) - - def __setitem__(self, key: str, value: typing.Any) -> None: - self._data.__setitem__(key, value) - - def __delitem__(self, key: str) -> None: - self._data.__delitem__(key) - - def __iter__(self) -> typing.Iterator[typing.Any]: - return self._data.__iter__() - - def __len__(self) -> int: - return self._data.__len__() - - def __ne__(self, other: typing.Any) -> bool: - return not self.__eq__(other) - - def keys(self) -> typing.KeysView[str]: - """ - :returns: a set-like object providing a view on D's keys - :rtype: ~typing.KeysView - """ - return self._data.keys() - - def values(self) -> typing.ValuesView[typing.Any]: - """ - :returns: an object providing a view on D's values - :rtype: ~typing.ValuesView - """ - return self._data.values() - - def items(self) -> typing.ItemsView[str, typing.Any]: - """ - :returns: set-like object providing a view on D's items - :rtype: ~typing.ItemsView - """ - return self._data.items() - - def get(self, key: str, default: typing.Any = None) -> typing.Any: - """ - Get the value for key if key is in the dictionary, else default. - :param str key: The key to look up. - :param any default: The value to return if key is not in the dictionary. Defaults to None - :returns: D[k] if k in D, else d. - :rtype: any - """ - try: - return self[key] - except KeyError: - return default - - @typing.overload - def pop(self, key: str) -> typing.Any: ... # pylint: disable=arguments-differ - - @typing.overload - def pop(self, key: str, default: _T) -> _T: ... # pylint: disable=signature-differs - - @typing.overload - def pop(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs - - def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any: - """ - Removes specified key and return the corresponding value. - :param str key: The key to pop. - :param any default: The value to return if key is not in the dictionary - :returns: The value corresponding to the key. - :rtype: any - :raises KeyError: If key is not found and default is not given. - """ - if default is _UNSET: - return self._data.pop(key) - return self._data.pop(key, default) - - def popitem(self) -> tuple[str, typing.Any]: - """ - Removes and returns some (key, value) pair - :returns: The (key, value) pair. - :rtype: tuple - :raises KeyError: if D is empty. - """ - return self._data.popitem() - - def clear(self) -> None: - """ - Remove all items from D. - """ - self._data.clear() - - def update(self, *args: typing.Any, **kwargs: typing.Any) -> None: # pylint: disable=arguments-differ - """ - Updates D from mapping/iterable E and F. - :param any args: Either a mapping object or an iterable of key-value pairs. - """ - self._data.update(*args, **kwargs) - - @typing.overload - def setdefault(self, key: str, default: None = None) -> None: ... - - @typing.overload - def setdefault(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs - - def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any: - """ - Same as calling D.get(k, d), and setting D[k]=d if k not found - :param str key: The key to look up. - :param any default: The value to set if key is not in the dictionary - :returns: D[k] if k in D, else d. - :rtype: any - """ - if default is _UNSET: - return self._data.setdefault(key) - return self._data.setdefault(key, default) - - def __eq__(self, other: typing.Any) -> bool: - try: - other_model = self.__class__(other) - except Exception: - return False - return self._data == other_model._data - - def __repr__(self) -> str: - return str(self._data) - - -def _is_model(obj: typing.Any) -> bool: - return getattr(obj, "_is_model", False) - - -def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements - if isinstance(o, list): - return [_serialize(x, format) for x in o] - if isinstance(o, dict): - return {k: _serialize(v, format) for k, v in o.items()} - if isinstance(o, set): - return {_serialize(x, format) for x in o} - if isinstance(o, tuple): - return tuple(_serialize(x, format) for x in o) - if isinstance(o, (bytes, bytearray)): - return _serialize_bytes(o, format) - if isinstance(o, decimal.Decimal): - return float(o) - if isinstance(o, enum.Enum): - return o.value - if isinstance(o, int): - if format == "str": - return str(o) - return o - try: - # First try datetime.datetime - return _serialize_datetime(o, format) - except AttributeError: - pass - # Last, try datetime.timedelta - try: - return _timedelta_as_isostr(o) - except AttributeError: - # This will be raised when it hits value.total_seconds in the method above - pass - return o - - -def _get_rest_field(attr_to_rest_field: dict[str, "_RestField"], rest_name: str) -> typing.Optional["_RestField"]: - try: - return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name) - except StopIteration: - return None - - -def _create_value(rf: typing.Optional["_RestField"], value: typing.Any) -> typing.Any: - if not rf: - return _serialize(value, None) - if rf._is_multipart_file_input: - return value - if rf._is_model: - return _deserialize(rf._type, value) - if isinstance(value, ET.Element): - value = _deserialize(rf._type, value) - return _serialize(value, rf._format) - - -class Model(_MyMutableMapping): - _is_model = True - # label whether current class's _attr_to_rest_field has been calculated - # could not see _attr_to_rest_field directly because subclass inherits it from parent class - _calculated: set[str] = set() - - def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: - class_name = self.__class__.__name__ - if len(args) > 1: - raise TypeError(f"{class_name}.__init__() takes 2 positional arguments but {len(args) + 1} were given") - dict_to_pass = { - rest_field._rest_name: rest_field._default - for rest_field in self._attr_to_rest_field.values() - if rest_field._default is not _UNSET - } - if args: # pylint: disable=too-many-nested-blocks - if isinstance(args[0], ET.Element): - existed_attr_keys = [] - model_meta = getattr(self, "_xml", {}) - - for rf in self._attr_to_rest_field.values(): - prop_meta = getattr(rf, "_xml", {}) - xml_name = prop_meta.get("name", rf._rest_name) - xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) - if xml_ns: - xml_name = "{" + xml_ns + "}" + xml_name - - # attribute - if prop_meta.get("attribute", False) and args[0].get(xml_name) is not None: - existed_attr_keys.append(xml_name) - dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].get(xml_name)) - continue - - # unwrapped element is array - if prop_meta.get("unwrapped", False): - # unwrapped array could either use prop items meta/prop meta - if prop_meta.get("itemsName"): - xml_name = prop_meta.get("itemsName") - xml_ns = prop_meta.get("itemNs") - if xml_ns: - xml_name = "{" + xml_ns + "}" + xml_name - items = args[0].findall(xml_name) # pyright: ignore - if len(items) > 0: - existed_attr_keys.append(xml_name) - dict_to_pass[rf._rest_name] = _deserialize(rf._type, items) - continue - - # text element is primitive type - if prop_meta.get("text", False): - if args[0].text is not None: - dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].text) - continue - - # wrapped element could be normal property or array, it should only have one element - item = args[0].find(xml_name) - if item is not None: - existed_attr_keys.append(xml_name) - dict_to_pass[rf._rest_name] = _deserialize(rf._type, item) - - # rest thing is additional properties - for e in args[0]: - if e.tag not in existed_attr_keys: - dict_to_pass[e.tag] = _convert_element(e) - else: - dict_to_pass.update( - {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()} - ) - else: - non_attr_kwargs = [k for k in kwargs if k not in self._attr_to_rest_field] - if non_attr_kwargs: - # actual type errors only throw the first wrong keyword arg they see, so following that. - raise TypeError(f"{class_name}.__init__() got an unexpected keyword argument '{non_attr_kwargs[0]}'") - dict_to_pass.update( - { - self._attr_to_rest_field[k]._rest_name: _create_value(self._attr_to_rest_field[k], v) - for k, v in kwargs.items() - if v is not None - } - ) - super().__init__(dict_to_pass) - - def copy(self) -> "Model": - return Model(self.__dict__) - - def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: - if f"{cls.__module__}.{cls.__qualname__}" not in cls._calculated: - # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping', - # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object' - mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order - attr_to_rest_field: dict[str, _RestField] = { # map attribute name to rest_field property - k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type") - } - annotations = { - k: v - for mro_class in mros - if hasattr(mro_class, "__annotations__") - for k, v in mro_class.__annotations__.items() - } - for attr, rf in attr_to_rest_field.items(): - rf._module = cls.__module__ - if not rf._type: - rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None)) - if not rf._rest_name_input: - rf._rest_name_input = attr - cls._attr_to_rest_field: dict[str, _RestField] = dict(attr_to_rest_field.items()) - cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}") - - return super().__new__(cls) - - def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None: - for base in cls.__bases__: - if hasattr(base, "__mapping__"): - base.__mapping__[discriminator or cls.__name__] = cls # type: ignore - - @classmethod - def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]: - for v in cls.__dict__.values(): - if isinstance(v, _RestField) and v._is_discriminator and v._rest_name not in exist_discriminators: - return v - return None - - @classmethod - def _deserialize(cls, data, exist_discriminators): - if not hasattr(cls, "__mapping__"): - return cls(data) - discriminator = cls._get_discriminator(exist_discriminators) - if discriminator is None: - return cls(data) - exist_discriminators.append(discriminator._rest_name) - if isinstance(data, ET.Element): - model_meta = getattr(cls, "_xml", {}) - prop_meta = getattr(discriminator, "_xml", {}) - xml_name = prop_meta.get("name", discriminator._rest_name) - xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) - if xml_ns: - xml_name = "{" + xml_ns + "}" + xml_name - - if data.get(xml_name) is not None: - discriminator_value = data.get(xml_name) - else: - discriminator_value = data.find(xml_name).text # pyright: ignore - else: - discriminator_value = data.get(discriminator._rest_name) - mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore # pylint: disable=no-member - return mapped_cls._deserialize(data, exist_discriminators) - - def as_dict(self, *, exclude_readonly: bool = False) -> dict[str, typing.Any]: - """Return a dict that can be turned into json using json.dump. - - :keyword bool exclude_readonly: Whether to remove the readonly properties. - :returns: A dict JSON compatible object - :rtype: dict - """ - - result = {} - readonly_props = [] - if exclude_readonly: - readonly_props = [p._rest_name for p in self._attr_to_rest_field.values() if _is_readonly(p)] - for k, v in self.items(): - if exclude_readonly and k in readonly_props: # pyright: ignore - continue - is_multipart_file_input = False - try: - is_multipart_file_input = next( - rf for rf in self._attr_to_rest_field.values() if rf._rest_name == k - )._is_multipart_file_input - except StopIteration: - pass - result[k] = v if is_multipart_file_input else Model._as_dict_value(v, exclude_readonly=exclude_readonly) - return result - - @staticmethod - def _as_dict_value(v: typing.Any, exclude_readonly: bool = False) -> typing.Any: - if v is None or isinstance(v, _Null): - return None - if isinstance(v, (list, tuple, set)): - return type(v)(Model._as_dict_value(x, exclude_readonly=exclude_readonly) for x in v) - if isinstance(v, dict): - return {dk: Model._as_dict_value(dv, exclude_readonly=exclude_readonly) for dk, dv in v.items()} - return v.as_dict(exclude_readonly=exclude_readonly) if hasattr(v, "as_dict") else v - - -def _deserialize_model(model_deserializer: typing.Optional[typing.Callable], obj): - if _is_model(obj): - return obj - return _deserialize(model_deserializer, obj) - - -def _deserialize_with_optional(if_obj_deserializer: typing.Optional[typing.Callable], obj): - if obj is None: - return obj - return _deserialize_with_callable(if_obj_deserializer, obj) - - -def _deserialize_with_union(deserializers, obj): - for deserializer in deserializers: - try: - return _deserialize(deserializer, obj) - except DeserializationError: - pass - raise DeserializationError() - - -def _deserialize_dict( - value_deserializer: typing.Optional[typing.Callable], - module: typing.Optional[str], - obj: dict[typing.Any, typing.Any], -): - if obj is None: - return obj - if isinstance(obj, ET.Element): - obj = {child.tag: child for child in obj} - return {k: _deserialize(value_deserializer, v, module) for k, v in obj.items()} - - -def _deserialize_multiple_sequence( - entry_deserializers: list[typing.Optional[typing.Callable]], - module: typing.Optional[str], - obj, -): - if obj is None: - return obj - return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers)) - - -def _deserialize_sequence( - deserializer: typing.Optional[typing.Callable], - module: typing.Optional[str], - obj, -): - if obj is None: - return obj - if isinstance(obj, ET.Element): - obj = list(obj) - return type(obj)(_deserialize(deserializer, entry, module) for entry in obj) - - -def _sorted_annotations(types: list[typing.Any]) -> list[typing.Any]: - return sorted( - types, - key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"), - ) - - -def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-statements, too-many-branches - annotation: typing.Any, - module: typing.Optional[str], - rf: typing.Optional["_RestField"] = None, -) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: - if not annotation: - return None - - # is it a type alias? - if isinstance(annotation, str): - if module is not None: - annotation = _get_type_alias_type(module, annotation) - - # is it a forward ref / in quotes? - if isinstance(annotation, (str, typing.ForwardRef)): - try: - model_name = annotation.__forward_arg__ # type: ignore - except AttributeError: - model_name = annotation - if module is not None: - annotation = _get_model(module, model_name) # type: ignore - - try: - if module and _is_model(annotation): - if rf: - rf._is_model = True - - return functools.partial(_deserialize_model, annotation) # pyright: ignore - except Exception: - pass - - # is it a literal? - try: - if annotation.__origin__ is typing.Literal: # pyright: ignore - return None - except AttributeError: - pass - - # is it optional? - try: - if any(a for a in annotation.__args__ if a == type(None)): # pyright: ignore - if len(annotation.__args__) <= 2: # pyright: ignore - if_obj_deserializer = _get_deserialize_callable_from_annotation( - next(a for a in annotation.__args__ if a != type(None)), module, rf # pyright: ignore - ) - - return functools.partial(_deserialize_with_optional, if_obj_deserializer) - # the type is Optional[Union[...]], we need to remove the None type from the Union - annotation_copy = copy.copy(annotation) - annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a != type(None)] # pyright: ignore - return _get_deserialize_callable_from_annotation(annotation_copy, module, rf) - except AttributeError: - pass - - # is it union? - if getattr(annotation, "__origin__", None) is typing.Union: - # initial ordering is we make `string` the last deserialization option, because it is often them most generic - deserializers = [ - _get_deserialize_callable_from_annotation(arg, module, rf) - for arg in _sorted_annotations(annotation.__args__) # pyright: ignore - ] - - return functools.partial(_deserialize_with_union, deserializers) - - try: - annotation_name = ( - annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore - ) - if annotation_name.lower() == "dict": - value_deserializer = _get_deserialize_callable_from_annotation( - annotation.__args__[1], module, rf # pyright: ignore - ) - - return functools.partial( - _deserialize_dict, - value_deserializer, - module, - ) - except (AttributeError, IndexError): - pass - try: - annotation_name = ( - annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore - ) - if annotation_name.lower() in ["list", "set", "tuple", "sequence"]: - if len(annotation.__args__) > 1: # pyright: ignore - entry_deserializers = [ - _get_deserialize_callable_from_annotation(dt, module, rf) - for dt in annotation.__args__ # pyright: ignore - ] - return functools.partial(_deserialize_multiple_sequence, entry_deserializers, module) - deserializer = _get_deserialize_callable_from_annotation( - annotation.__args__[0], module, rf # pyright: ignore - ) - - return functools.partial(_deserialize_sequence, deserializer, module) - except (TypeError, IndexError, AttributeError, SyntaxError): - pass - - def _deserialize_default( - deserializer, - obj, - ): - if obj is None: - return obj - try: - return _deserialize_with_callable(deserializer, obj) - except Exception: - pass - return obj - - if get_deserializer(annotation, rf): - return functools.partial(_deserialize_default, get_deserializer(annotation, rf)) - - return functools.partial(_deserialize_default, annotation) - - -def _deserialize_with_callable( - deserializer: typing.Optional[typing.Callable[[typing.Any], typing.Any]], - value: typing.Any, -): # pylint: disable=too-many-return-statements - try: - if value is None or isinstance(value, _Null): - return None - if isinstance(value, ET.Element): - if deserializer is str: - return value.text or "" - if deserializer is int: - return int(value.text) if value.text else None - if deserializer is float: - return float(value.text) if value.text else None - if deserializer is bool: - return value.text == "true" if value.text else None - if deserializer is None: - return value - if deserializer in [int, float, bool]: - return deserializer(value) - if isinstance(deserializer, CaseInsensitiveEnumMeta): - try: - return deserializer(value) - except ValueError: - # for unknown value, return raw value - return value - if isinstance(deserializer, type) and issubclass(deserializer, Model): - return deserializer._deserialize(value, []) - return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value) - except Exception as e: - raise DeserializationError() from e - - -def _deserialize( - deserializer: typing.Any, - value: typing.Any, - module: typing.Optional[str] = None, - rf: typing.Optional["_RestField"] = None, - format: typing.Optional[str] = None, -) -> typing.Any: - if isinstance(value, PipelineResponse): - value = value.http_response.json() - if rf is None and format: - rf = _RestField(format=format) - if not isinstance(deserializer, functools.partial): - deserializer = _get_deserialize_callable_from_annotation(deserializer, module, rf) - return _deserialize_with_callable(deserializer, value) - - -def _failsafe_deserialize( - deserializer: typing.Any, - response: HttpResponse, - module: typing.Optional[str] = None, - rf: typing.Optional["_RestField"] = None, - format: typing.Optional[str] = None, -) -> typing.Any: - try: - return _deserialize(deserializer, response.json(), module, rf, format) - except DeserializationError: - _LOGGER.warning( - "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True - ) - return None - - -def _failsafe_deserialize_xml( - deserializer: typing.Any, - response: HttpResponse, -) -> typing.Any: - try: - return _deserialize_xml(deserializer, response.text()) - except DeserializationError: - _LOGGER.warning( - "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True - ) - return None - - -class _RestField: - def __init__( - self, - *, - name: typing.Optional[str] = None, - type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin - is_discriminator: bool = False, - visibility: typing.Optional[list[str]] = None, - default: typing.Any = _UNSET, - format: typing.Optional[str] = None, - is_multipart_file_input: bool = False, - xml: typing.Optional[dict[str, typing.Any]] = None, - ): - self._type = type - self._rest_name_input = name - self._module: typing.Optional[str] = None - self._is_discriminator = is_discriminator - self._visibility = visibility - self._is_model = False - self._default = default - self._format = format - self._is_multipart_file_input = is_multipart_file_input - self._xml = xml if xml is not None else {} - - @property - def _class_type(self) -> typing.Any: - return getattr(self._type, "args", [None])[0] - - @property - def _rest_name(self) -> str: - if self._rest_name_input is None: - raise ValueError("Rest name was never set") - return self._rest_name_input - - def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin - # by this point, type and rest_name will have a value bc we default - # them in __new__ of the Model class - item = obj.get(self._rest_name) - if item is None: - return item - if self._is_model: - return item - return _deserialize(self._type, _serialize(item, self._format), rf=self) - - def __set__(self, obj: Model, value) -> None: - if value is None: - # we want to wipe out entries if users set attr to None - try: - obj.__delitem__(self._rest_name) - except KeyError: - pass - return - if self._is_model: - if not _is_model(value): - value = _deserialize(self._type, value) - obj.__setitem__(self._rest_name, value) - return - obj.__setitem__(self._rest_name, _serialize(value, self._format)) - - def _get_deserialize_callable_from_annotation( - self, annotation: typing.Any - ) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: - return _get_deserialize_callable_from_annotation(annotation, self._module, self) - - -def rest_field( - *, - name: typing.Optional[str] = None, - type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin - visibility: typing.Optional[list[str]] = None, - default: typing.Any = _UNSET, - format: typing.Optional[str] = None, - is_multipart_file_input: bool = False, - xml: typing.Optional[dict[str, typing.Any]] = None, -) -> typing.Any: - return _RestField( - name=name, - type=type, - visibility=visibility, - default=default, - format=format, - is_multipart_file_input=is_multipart_file_input, - xml=xml, - ) - - -def rest_discriminator( - *, - name: typing.Optional[str] = None, - type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin - visibility: typing.Optional[list[str]] = None, - xml: typing.Optional[dict[str, typing.Any]] = None, -) -> typing.Any: - return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml) - - -def serialize_xml(model: Model, exclude_readonly: bool = False) -> str: - """Serialize a model to XML. - - :param Model model: The model to serialize. - :param bool exclude_readonly: Whether to exclude readonly properties. - :returns: The XML representation of the model. - :rtype: str - """ - return ET.tostring(_get_element(model, exclude_readonly), encoding="unicode") # type: ignore - - -def _get_element( - o: typing.Any, - exclude_readonly: bool = False, - parent_meta: typing.Optional[dict[str, typing.Any]] = None, - wrapped_element: typing.Optional[ET.Element] = None, -) -> typing.Union[ET.Element, list[ET.Element]]: - if _is_model(o): - model_meta = getattr(o, "_xml", {}) - - # if prop is a model, then use the prop element directly, else generate a wrapper of model - if wrapped_element is None: - wrapped_element = _create_xml_element( - model_meta.get("name", o.__class__.__name__), - model_meta.get("prefix"), - model_meta.get("ns"), - ) - - readonly_props = [] - if exclude_readonly: - readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] - - for k, v in o.items(): - # do not serialize readonly properties - if exclude_readonly and k in readonly_props: - continue - - prop_rest_field = _get_rest_field(o._attr_to_rest_field, k) - if prop_rest_field: - prop_meta = getattr(prop_rest_field, "_xml").copy() - # use the wire name as xml name if no specific name is set - if prop_meta.get("name") is None: - prop_meta["name"] = k - else: - # additional properties will not have rest field, use the wire name as xml name - prop_meta = {"name": k} - - # if no ns for prop, use model's - if prop_meta.get("ns") is None and model_meta.get("ns"): - prop_meta["ns"] = model_meta.get("ns") - prop_meta["prefix"] = model_meta.get("prefix") - - if prop_meta.get("unwrapped", False): - # unwrapped could only set on array - wrapped_element.extend(_get_element(v, exclude_readonly, prop_meta)) - elif prop_meta.get("text", False): - # text could only set on primitive type - wrapped_element.text = _get_primitive_type_value(v) - elif prop_meta.get("attribute", False): - xml_name = prop_meta.get("name", k) - if prop_meta.get("ns"): - ET.register_namespace(prop_meta.get("prefix"), prop_meta.get("ns")) # pyright: ignore - xml_name = "{" + prop_meta.get("ns") + "}" + xml_name # pyright: ignore - # attribute should be primitive type - wrapped_element.set(xml_name, _get_primitive_type_value(v)) - else: - # other wrapped prop element - wrapped_element.append(_get_wrapped_element(v, exclude_readonly, prop_meta)) - return wrapped_element - if isinstance(o, list): - return [_get_element(x, exclude_readonly, parent_meta) for x in o] # type: ignore - if isinstance(o, dict): - result = [] - for k, v in o.items(): - result.append( - _get_wrapped_element( - v, - exclude_readonly, - { - "name": k, - "ns": parent_meta.get("ns") if parent_meta else None, - "prefix": parent_meta.get("prefix") if parent_meta else None, - }, - ) - ) - return result - - # primitive case need to create element based on parent_meta - if parent_meta: - return _get_wrapped_element( - o, - exclude_readonly, - { - "name": parent_meta.get("itemsName", parent_meta.get("name")), - "prefix": parent_meta.get("itemsPrefix", parent_meta.get("prefix")), - "ns": parent_meta.get("itemsNs", parent_meta.get("ns")), - }, - ) - - raise ValueError("Could not serialize value into xml: " + o) - - -def _get_wrapped_element( - v: typing.Any, - exclude_readonly: bool, - meta: typing.Optional[dict[str, typing.Any]], -) -> ET.Element: - wrapped_element = _create_xml_element( - meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None - ) - if isinstance(v, (dict, list)): - wrapped_element.extend(_get_element(v, exclude_readonly, meta)) - elif _is_model(v): - _get_element(v, exclude_readonly, meta, wrapped_element) - else: - wrapped_element.text = _get_primitive_type_value(v) - return wrapped_element - - -def _get_primitive_type_value(v) -> str: - if v is True: - return "true" - if v is False: - return "false" - if isinstance(v, _Null): - return "" - return str(v) - - -def _create_xml_element(tag, prefix=None, ns=None): - if prefix and ns: - ET.register_namespace(prefix, ns) - if ns: - return ET.Element("{" + ns + "}" + tag) - return ET.Element(tag) - - -def _deserialize_xml( - deserializer: typing.Any, - value: str, -) -> typing.Any: - element = ET.fromstring(value) # nosec - return _deserialize(deserializer, element) - - -def _convert_element(e: ET.Element): - # dict case - if len(e.attrib) > 0 or len({child.tag for child in e}) > 1: - dict_result: dict[str, typing.Any] = {} - for child in e: - if dict_result.get(child.tag) is not None: - if isinstance(dict_result[child.tag], list): - dict_result[child.tag].append(_convert_element(child)) - else: - dict_result[child.tag] = [dict_result[child.tag], _convert_element(child)] - else: - dict_result[child.tag] = _convert_element(child) - dict_result.update(e.attrib) - return dict_result - # array case - if len(e) > 0: - array_result: list[typing.Any] = [] - for child in e: - array_result.append(_convert_element(child)) - return array_result - # primitive case - return e.text diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/serialization.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/serialization.py deleted file mode 100644 index 45a3e44e45cb..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/serialization.py +++ /dev/null @@ -1,2030 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression,too-many-lines -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) Python Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -# pyright: reportUnnecessaryTypeIgnoreComment=false - -from base64 import b64decode, b64encode -import calendar -import datetime -import decimal -import email -from enum import Enum -import json -import logging -import re -import sys -import codecs -from typing import ( - Any, - cast, - Optional, - Union, - AnyStr, - IO, - Mapping, - Callable, - MutableMapping, -) - -try: - from urllib import quote # type: ignore -except ImportError: - from urllib.parse import quote -import xml.etree.ElementTree as ET - -import isodate # type: ignore -from typing_extensions import Self - -from azure.core.exceptions import DeserializationError, SerializationError -from azure.core.serialization import NULL as CoreNull - -_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") - -JSON = MutableMapping[str, Any] - - -class RawDeserializer: - - # Accept "text" because we're open minded people... - JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") - - # Name used in context - CONTEXT_NAME = "deserialized_data" - - @classmethod - def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any: - """Decode data according to content-type. - - Accept a stream of data as well, but will be load at once in memory for now. - - If no content-type, will return the string version (not bytes, not stream) - - :param data: Input, could be bytes or stream (will be decoded with UTF8) or text - :type data: str or bytes or IO - :param str content_type: The content type. - :return: The deserialized data. - :rtype: object - """ - if hasattr(data, "read"): - # Assume a stream - data = cast(IO, data).read() - - if isinstance(data, bytes): - data_as_str = data.decode(encoding="utf-8-sig") - else: - # Explain to mypy the correct type. - data_as_str = cast(str, data) - - # Remove Byte Order Mark if present in string - data_as_str = data_as_str.lstrip(_BOM) - - if content_type is None: - return data - - if cls.JSON_REGEXP.match(content_type): - try: - return json.loads(data_as_str) - except ValueError as err: - raise DeserializationError("JSON is invalid: {}".format(err), err) from err - elif "xml" in (content_type or []): - try: - - try: - if isinstance(data, unicode): # type: ignore - # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string - data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore - except NameError: - pass - - return ET.fromstring(data_as_str) # nosec - except ET.ParseError as err: - # It might be because the server has an issue, and returned JSON with - # content-type XML.... - # So let's try a JSON load, and if it's still broken - # let's flow the initial exception - def _json_attemp(data): - try: - return True, json.loads(data) - except ValueError: - return False, None # Don't care about this one - - success, json_result = _json_attemp(data) - if success: - return json_result - # If i'm here, it's not JSON, it's not XML, let's scream - # and raise the last context in this block (the XML exception) - # The function hack is because Py2.7 messes up with exception - # context otherwise. - _LOGGER.critical("Wasn't XML not JSON, failing") - raise DeserializationError("XML is invalid") from err - elif content_type.startswith("text/"): - return data_as_str - raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) - - @classmethod - def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any: - """Deserialize from HTTP response. - - Use bytes and headers to NOT use any requests/aiohttp or whatever - specific implementation. - Headers will tested for "content-type" - - :param bytes body_bytes: The body of the response. - :param dict headers: The headers of the response. - :returns: The deserialized data. - :rtype: object - """ - # Try to use content-type from headers if available - content_type = None - if "content-type" in headers: - content_type = headers["content-type"].split(";")[0].strip().lower() - # Ouch, this server did not declare what it sent... - # Let's guess it's JSON... - # Also, since Autorest was considering that an empty body was a valid JSON, - # need that test as well.... - else: - content_type = "application/json" - - if body_bytes: - return cls.deserialize_from_text(body_bytes, content_type) - return None - - -_LOGGER = logging.getLogger(__name__) - -try: - _long_type = long # type: ignore -except NameError: - _long_type = int - -TZ_UTC = datetime.timezone.utc - -_FLATTEN = re.compile(r"(? None: - self.additional_properties: Optional[dict[str, Any]] = {} - for k in kwargs: # pylint: disable=consider-using-dict-items - if k not in self._attribute_map: - _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) - elif k in self._validation and self._validation[k].get("readonly", False): - _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__) - else: - setattr(self, k, kwargs[k]) - - def __eq__(self, other: Any) -> bool: - """Compare objects by comparing all attributes. - - :param object other: The object to compare - :returns: True if objects are equal - :rtype: bool - """ - if isinstance(other, self.__class__): - return self.__dict__ == other.__dict__ - return False - - def __ne__(self, other: Any) -> bool: - """Compare objects by comparing all attributes. - - :param object other: The object to compare - :returns: True if objects are not equal - :rtype: bool - """ - return not self.__eq__(other) - - def __str__(self) -> str: - return str(self.__dict__) - - @classmethod - def enable_additional_properties_sending(cls) -> None: - cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"} - - @classmethod - def is_xml_model(cls) -> bool: - try: - cls._xml_map # type: ignore - except AttributeError: - return False - return True - - @classmethod - def _create_xml_node(cls): - """Create XML node. - - :returns: The XML node - :rtype: xml.etree.ElementTree.Element - """ - try: - xml_map = cls._xml_map # type: ignore - except AttributeError: - xml_map = {} - - return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) - - def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: - """Return the JSON that would be sent to server from this model. - - This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. - - If you want XML serialization, you can pass the kwargs is_xml=True. - - :param bool keep_readonly: If you want to serialize the readonly attributes - :returns: A dict JSON compatible object - :rtype: dict - """ - serializer = Serializer(self._infer_class_models()) - return serializer._serialize( # type: ignore # pylint: disable=protected-access - self, keep_readonly=keep_readonly, **kwargs - ) - - def as_dict( - self, - keep_readonly: bool = True, - key_transformer: Callable[[str, dict[str, Any], Any], Any] = attribute_transformer, - **kwargs: Any - ) -> JSON: - """Return a dict that can be serialized using json.dump. - - Advanced usage might optionally use a callback as parameter: - - .. code::python - - def my_key_transformer(key, attr_desc, value): - return key - - Key is the attribute name used in Python. Attr_desc - is a dict of metadata. Currently contains 'type' with the - msrest type and 'key' with the RestAPI encoded key. - Value is the current value in this object. - - The string returned will be used to serialize the key. - If the return type is a list, this is considered hierarchical - result dict. - - See the three examples in this file: - - - attribute_transformer - - full_restapi_key_transformer - - last_restapi_key_transformer - - If you want XML serialization, you can pass the kwargs is_xml=True. - - :param bool keep_readonly: If you want to serialize the readonly attributes - :param function key_transformer: A key transformer function. - :returns: A dict JSON compatible object - :rtype: dict - """ - serializer = Serializer(self._infer_class_models()) - return serializer._serialize( # type: ignore # pylint: disable=protected-access - self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs - ) - - @classmethod - def _infer_class_models(cls): - try: - str_models = cls.__module__.rsplit(".", 1)[0] - models = sys.modules[str_models] - client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} - if cls.__name__ not in client_models: - raise ValueError("Not Autorest generated code") - except Exception: # pylint: disable=broad-exception-caught - # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. - client_models = {cls.__name__: cls} - return client_models - - @classmethod - def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self: - """Parse a str using the RestAPI syntax and return a model. - - :param str data: A str using RestAPI structure. JSON by default. - :param str content_type: JSON by default, set application/xml if XML. - :returns: An instance of this model - :raises DeserializationError: if something went wrong - :rtype: Self - """ - deserializer = Deserializer(cls._infer_class_models()) - return deserializer(cls.__name__, data, content_type=content_type) # type: ignore - - @classmethod - def from_dict( - cls, - data: Any, - key_extractors: Optional[Callable[[str, dict[str, Any], Any], Any]] = None, - content_type: Optional[str] = None, - ) -> Self: - """Parse a dict using given key extractor return a model. - - By default consider key - extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor - and last_rest_key_case_insensitive_extractor) - - :param dict data: A dict using RestAPI structure - :param function key_extractors: A key extractor function. - :param str content_type: JSON by default, set application/xml if XML. - :returns: An instance of this model - :raises DeserializationError: if something went wrong - :rtype: Self - """ - deserializer = Deserializer(cls._infer_class_models()) - deserializer.key_extractors = ( # type: ignore - [ # type: ignore - attribute_key_case_insensitive_extractor, - rest_key_case_insensitive_extractor, - last_rest_key_case_insensitive_extractor, - ] - if key_extractors is None - else key_extractors - ) - return deserializer(cls.__name__, data, content_type=content_type) # type: ignore - - @classmethod - def _flatten_subtype(cls, key, objects): - if "_subtype_map" not in cls.__dict__: - return {} - result = dict(cls._subtype_map[key]) - for valuetype in cls._subtype_map[key].values(): - result |= objects[valuetype]._flatten_subtype(key, objects) # pylint: disable=protected-access - return result - - @classmethod - def _classify(cls, response, objects): - """Check the class _subtype_map for any child classes. - We want to ignore any inherited _subtype_maps. - - :param dict response: The initial data - :param dict objects: The class objects - :returns: The class to be used - :rtype: class - """ - for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): - subtype_value = None - - if not isinstance(response, ET.Element): - rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] - subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None) - else: - subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) - if subtype_value: - # Try to match base class. Can be class name only - # (bug to fix in Autorest to support x-ms-discriminator-name) - if cls.__name__ == subtype_value: - return cls - flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) - try: - return objects[flatten_mapping_type[subtype_value]] # type: ignore - except KeyError: - _LOGGER.warning( - "Subtype value %s has no mapping, use base class %s.", - subtype_value, - cls.__name__, - ) - break - else: - _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__) - break - return cls - - @classmethod - def _get_rest_key_parts(cls, attr_key): - """Get the RestAPI key of this attr, split it and decode part - :param str attr_key: Attribute key must be in attribute_map. - :returns: A list of RestAPI part - :rtype: list - """ - rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"]) - return [_decode_attribute_map_key(key_part) for key_part in rest_split_key] - - -def _decode_attribute_map_key(key): - """This decode a key in an _attribute_map to the actual key we want to look at - inside the received data. - - :param str key: A key string from the generated code - :returns: The decoded key - :rtype: str - """ - return key.replace("\\.", ".") - - -class Serializer: # pylint: disable=too-many-public-methods - """Request object model serializer.""" - - basic_types = {str: "str", int: "int", bool: "bool", float: "float"} - - _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()} - days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"} - months = { - 1: "Jan", - 2: "Feb", - 3: "Mar", - 4: "Apr", - 5: "May", - 6: "Jun", - 7: "Jul", - 8: "Aug", - 9: "Sep", - 10: "Oct", - 11: "Nov", - 12: "Dec", - } - validation = { - "min_length": lambda x, y: len(x) < y, - "max_length": lambda x, y: len(x) > y, - "minimum": lambda x, y: x < y, - "maximum": lambda x, y: x > y, - "minimum_ex": lambda x, y: x <= y, - "maximum_ex": lambda x, y: x >= y, - "min_items": lambda x, y: len(x) < y, - "max_items": lambda x, y: len(x) > y, - "pattern": lambda x, y: not re.match(y, x, re.UNICODE), - "unique": lambda x, y: len(x) != len(set(x)), - "multiple": lambda x, y: x % y != 0, - } - - def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: - self.serialize_type = { - "iso-8601": Serializer.serialize_iso, - "rfc-1123": Serializer.serialize_rfc, - "unix-time": Serializer.serialize_unix, - "duration": Serializer.serialize_duration, - "date": Serializer.serialize_date, - "time": Serializer.serialize_time, - "decimal": Serializer.serialize_decimal, - "long": Serializer.serialize_long, - "bytearray": Serializer.serialize_bytearray, - "base64": Serializer.serialize_base64, - "object": self.serialize_object, - "[]": self.serialize_iter, - "{}": self.serialize_dict, - } - self.dependencies: dict[str, type] = dict(classes) if classes else {} - self.key_transformer = full_restapi_key_transformer - self.client_side_validation = True - - def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals - self, target_obj, data_type=None, **kwargs - ): - """Serialize data into a string according to type. - - :param object target_obj: The data to be serialized. - :param str data_type: The type to be serialized from. - :rtype: str, dict - :raises SerializationError: if serialization fails. - :returns: The serialized data. - """ - key_transformer = kwargs.get("key_transformer", self.key_transformer) - keep_readonly = kwargs.get("keep_readonly", False) - if target_obj is None: - return None - - attr_name = None - class_name = target_obj.__class__.__name__ - - if data_type: - return self.serialize_data(target_obj, data_type, **kwargs) - - if not hasattr(target_obj, "_attribute_map"): - data_type = type(target_obj).__name__ - if data_type in self.basic_types.values(): - return self.serialize_data(target_obj, data_type, **kwargs) - - # Force "is_xml" kwargs if we detect a XML model - try: - is_xml_model_serialization = kwargs["is_xml"] - except KeyError: - is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) - - serialized = {} - if is_xml_model_serialization: - serialized = target_obj._create_xml_node() # pylint: disable=protected-access - try: - attributes = target_obj._attribute_map # pylint: disable=protected-access - for attr, attr_desc in attributes.items(): - attr_name = attr - if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access - attr_name, {} - ).get("readonly", False): - continue - - if attr_name == "additional_properties" and attr_desc["key"] == "": - if target_obj.additional_properties is not None: - serialized |= target_obj.additional_properties - continue - try: - - orig_attr = getattr(target_obj, attr) - if is_xml_model_serialization: - pass # Don't provide "transformer" for XML for now. Keep "orig_attr" - else: # JSON - keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) - keys = keys if isinstance(keys, list) else [keys] - - kwargs["serialization_ctxt"] = attr_desc - new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) - - if is_xml_model_serialization: - xml_desc = attr_desc.get("xml", {}) - xml_name = xml_desc.get("name", attr_desc["key"]) - xml_prefix = xml_desc.get("prefix", None) - xml_ns = xml_desc.get("ns", None) - if xml_desc.get("attr", False): - if xml_ns: - ET.register_namespace(xml_prefix, xml_ns) - xml_name = "{{{}}}{}".format(xml_ns, xml_name) - serialized.set(xml_name, new_attr) # type: ignore - continue - if xml_desc.get("text", False): - serialized.text = new_attr # type: ignore - continue - if isinstance(new_attr, list): - serialized.extend(new_attr) # type: ignore - elif isinstance(new_attr, ET.Element): - # If the down XML has no XML/Name, - # we MUST replace the tag with the local tag. But keeping the namespaces. - if "name" not in getattr(orig_attr, "_xml_map", {}): - splitted_tag = new_attr.tag.split("}") - if len(splitted_tag) == 2: # Namespace - new_attr.tag = "}".join([splitted_tag[0], xml_name]) - else: - new_attr.tag = xml_name - serialized.append(new_attr) # type: ignore - else: # That's a basic type - # Integrate namespace if necessary - local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) - local_node.text = str(new_attr) - serialized.append(local_node) # type: ignore - else: # JSON - for k in reversed(keys): # type: ignore - new_attr = {k: new_attr} - - _new_attr = new_attr - _serialized = serialized - for k in keys: # type: ignore - if k not in _serialized: - _serialized.update(_new_attr) # type: ignore - _new_attr = _new_attr[k] # type: ignore - _serialized = _serialized[k] - except ValueError as err: - if isinstance(err, SerializationError): - raise - - except (AttributeError, KeyError, TypeError) as err: - msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) - raise SerializationError(msg) from err - return serialized - - def body(self, data, data_type, **kwargs): - """Serialize data intended for a request body. - - :param object data: The data to be serialized. - :param str data_type: The type to be serialized from. - :rtype: dict - :raises SerializationError: if serialization fails. - :raises ValueError: if data is None - :returns: The serialized request body - """ - - # Just in case this is a dict - internal_data_type_str = data_type.strip("[]{}") - internal_data_type = self.dependencies.get(internal_data_type_str, None) - try: - is_xml_model_serialization = kwargs["is_xml"] - except KeyError: - if internal_data_type and issubclass(internal_data_type, Model): - is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) - else: - is_xml_model_serialization = False - if internal_data_type and not isinstance(internal_data_type, Enum): - try: - deserializer = Deserializer(self.dependencies) - # Since it's on serialization, it's almost sure that format is not JSON REST - # We're not able to deal with additional properties for now. - deserializer.additional_properties_detection = False - if is_xml_model_serialization: - deserializer.key_extractors = [ # type: ignore - attribute_key_case_insensitive_extractor, - ] - else: - deserializer.key_extractors = [ - rest_key_case_insensitive_extractor, - attribute_key_case_insensitive_extractor, - last_rest_key_case_insensitive_extractor, - ] - data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access - except DeserializationError as err: - raise SerializationError("Unable to build a model: " + str(err)) from err - - return self._serialize(data, data_type, **kwargs) - - def url(self, name, data, data_type, **kwargs): - """Serialize data intended for a URL path. - - :param str name: The name of the URL path parameter. - :param object data: The data to be serialized. - :param str data_type: The type to be serialized from. - :rtype: str - :returns: The serialized URL path - :raises TypeError: if serialization fails. - :raises ValueError: if data is None - """ - try: - output = self.serialize_data(data, data_type, **kwargs) - if data_type == "bool": - output = json.dumps(output) - - if kwargs.get("skip_quote") is True: - output = str(output) - output = output.replace("{", quote("{")).replace("}", quote("}")) - else: - output = quote(str(output), safe="") - except SerializationError as exc: - raise TypeError("{} must be type {}.".format(name, data_type)) from exc - return output - - def query(self, name, data, data_type, **kwargs): - """Serialize data intended for a URL query. - - :param str name: The name of the query parameter. - :param object data: The data to be serialized. - :param str data_type: The type to be serialized from. - :rtype: str, list - :raises TypeError: if serialization fails. - :raises ValueError: if data is None - :returns: The serialized query parameter - """ - try: - # Treat the list aside, since we don't want to encode the div separator - if data_type.startswith("["): - internal_data_type = data_type[1:-1] - do_quote = not kwargs.get("skip_quote", False) - return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs) - - # Not a list, regular serialization - output = self.serialize_data(data, data_type, **kwargs) - if data_type == "bool": - output = json.dumps(output) - if kwargs.get("skip_quote") is True: - output = str(output) - else: - output = quote(str(output), safe="") - except SerializationError as exc: - raise TypeError("{} must be type {}.".format(name, data_type)) from exc - return str(output) - - def header(self, name, data, data_type, **kwargs): - """Serialize data intended for a request header. - - :param str name: The name of the header. - :param object data: The data to be serialized. - :param str data_type: The type to be serialized from. - :rtype: str - :raises TypeError: if serialization fails. - :raises ValueError: if data is None - :returns: The serialized header - """ - try: - if data_type in ["[str]"]: - data = ["" if d is None else d for d in data] - - output = self.serialize_data(data, data_type, **kwargs) - if data_type == "bool": - output = json.dumps(output) - except SerializationError as exc: - raise TypeError("{} must be type {}.".format(name, data_type)) from exc - return str(output) - - def serialize_data(self, data, data_type, **kwargs): - """Serialize generic data according to supplied data type. - - :param object data: The data to be serialized. - :param str data_type: The type to be serialized from. - :raises AttributeError: if required data is None. - :raises ValueError: if data is None - :raises SerializationError: if serialization fails. - :returns: The serialized data. - :rtype: str, int, float, bool, dict, list - """ - if data is None: - raise ValueError("No value for given attribute") - - try: - if data is CoreNull: - return None - if data_type in self.basic_types.values(): - return self.serialize_basic(data, data_type, **kwargs) - - if data_type in self.serialize_type: - return self.serialize_type[data_type](data, **kwargs) - - # If dependencies is empty, try with current data class - # It has to be a subclass of Enum anyway - enum_type = self.dependencies.get(data_type, cast(type, data.__class__)) - if issubclass(enum_type, Enum): - return Serializer.serialize_enum(data, enum_obj=enum_type) - - iter_type = data_type[0] + data_type[-1] - if iter_type in self.serialize_type: - return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) - - except (ValueError, TypeError) as err: - msg = "Unable to serialize value: {!r} as type: {!r}." - raise SerializationError(msg.format(data, data_type)) from err - return self._serialize(data, **kwargs) - - @classmethod - def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements - custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) - if custom_serializer: - return custom_serializer - if kwargs.get("is_xml", False): - return cls._xml_basic_types_serializers.get(data_type) - - @classmethod - def serialize_basic(cls, data, data_type, **kwargs): - """Serialize basic builting data type. - Serializes objects to str, int, float or bool. - - Possible kwargs: - - basic_types_serializers dict[str, callable] : If set, use the callable as serializer - - is_xml bool : If set, use xml_basic_types_serializers - - :param obj data: Object to be serialized. - :param str data_type: Type of object in the iterable. - :rtype: str, int, float, bool - :return: serialized object - """ - custom_serializer = cls._get_custom_serializers(data_type, **kwargs) - if custom_serializer: - return custom_serializer(data) - if data_type == "str": - return cls.serialize_unicode(data) - return eval(data_type)(data) # nosec # pylint: disable=eval-used - - @classmethod - def serialize_unicode(cls, data): - """Special handling for serializing unicode strings in Py2. - Encode to UTF-8 if unicode, otherwise handle as a str. - - :param str data: Object to be serialized. - :rtype: str - :return: serialized object - """ - try: # If I received an enum, return its value - return data.value - except AttributeError: - pass - - try: - if isinstance(data, unicode): # type: ignore - # Don't change it, JSON and XML ElementTree are totally able - # to serialize correctly u'' strings - return data - except NameError: - return str(data) - return str(data) - - def serialize_iter(self, data, iter_type, div=None, **kwargs): - """Serialize iterable. - - Supported kwargs: - - serialization_ctxt dict : The current entry of _attribute_map, or same format. - serialization_ctxt['type'] should be same as data_type. - - is_xml bool : If set, serialize as XML - - :param list data: Object to be serialized. - :param str iter_type: Type of object in the iterable. - :param str div: If set, this str will be used to combine the elements - in the iterable into a combined string. Default is 'None'. - Defaults to False. - :rtype: list, str - :return: serialized iterable - """ - if isinstance(data, str): - raise SerializationError("Refuse str type as a valid iter type.") - - serialization_ctxt = kwargs.get("serialization_ctxt", {}) - is_xml = kwargs.get("is_xml", False) - - serialized = [] - for d in data: - try: - serialized.append(self.serialize_data(d, iter_type, **kwargs)) - except ValueError as err: - if isinstance(err, SerializationError): - raise - serialized.append(None) - - if kwargs.get("do_quote", False): - serialized = ["" if s is None else quote(str(s), safe="") for s in serialized] - - if div: - serialized = ["" if s is None else str(s) for s in serialized] - serialized = div.join(serialized) - - if "xml" in serialization_ctxt or is_xml: - # XML serialization is more complicated - xml_desc = serialization_ctxt.get("xml", {}) - xml_name = xml_desc.get("name") - if not xml_name: - xml_name = serialization_ctxt["key"] - - # Create a wrap node if necessary (use the fact that Element and list have "append") - is_wrapped = xml_desc.get("wrapped", False) - node_name = xml_desc.get("itemsName", xml_name) - if is_wrapped: - final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) - else: - final_result = [] - # All list elements to "local_node" - for el in serialized: - if isinstance(el, ET.Element): - el_node = el - else: - el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) - if el is not None: # Otherwise it writes "None" :-p - el_node.text = str(el) - final_result.append(el_node) - return final_result - return serialized - - def serialize_dict(self, attr, dict_type, **kwargs): - """Serialize a dictionary of objects. - - :param dict attr: Object to be serialized. - :param str dict_type: Type of object in the dictionary. - :rtype: dict - :return: serialized dictionary - """ - serialization_ctxt = kwargs.get("serialization_ctxt", {}) - serialized = {} - for key, value in attr.items(): - try: - serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) - except ValueError as err: - if isinstance(err, SerializationError): - raise - serialized[self.serialize_unicode(key)] = None - - if "xml" in serialization_ctxt: - # XML serialization is more complicated - xml_desc = serialization_ctxt["xml"] - xml_name = xml_desc["name"] - - final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) - for key, value in serialized.items(): - ET.SubElement(final_result, key).text = value - return final_result - - return serialized - - def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements - """Serialize a generic object. - This will be handled as a dictionary. If object passed in is not - a basic type (str, int, float, dict, list) it will simply be - cast to str. - - :param dict attr: Object to be serialized. - :rtype: dict or str - :return: serialized object - """ - if attr is None: - return None - if isinstance(attr, ET.Element): - return attr - obj_type = type(attr) - if obj_type in self.basic_types: - return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) - if obj_type is _long_type: - return self.serialize_long(attr) - if obj_type is str: - return self.serialize_unicode(attr) - if obj_type is datetime.datetime: - return self.serialize_iso(attr) - if obj_type is datetime.date: - return self.serialize_date(attr) - if obj_type is datetime.time: - return self.serialize_time(attr) - if obj_type is datetime.timedelta: - return self.serialize_duration(attr) - if obj_type is decimal.Decimal: - return self.serialize_decimal(attr) - - # If it's a model or I know this dependency, serialize as a Model - if obj_type in self.dependencies.values() or isinstance(attr, Model): - return self._serialize(attr) - - if obj_type == dict: - serialized = {} - for key, value in attr.items(): - try: - serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) - except ValueError: - serialized[self.serialize_unicode(key)] = None - return serialized - - if obj_type == list: - serialized = [] - for obj in attr: - try: - serialized.append(self.serialize_object(obj, **kwargs)) - except ValueError: - pass - return serialized - return str(attr) - - @staticmethod - def serialize_enum(attr, enum_obj=None): - try: - result = attr.value - except AttributeError: - result = attr - try: - enum_obj(result) # type: ignore - return result - except ValueError as exc: - for enum_value in enum_obj: # type: ignore - if enum_value.value.lower() == str(attr).lower(): - return enum_value.value - error = "{!r} is not valid value for enum {!r}" - raise SerializationError(error.format(attr, enum_obj)) from exc - - @staticmethod - def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument - """Serialize bytearray into base-64 string. - - :param str attr: Object to be serialized. - :rtype: str - :return: serialized base64 - """ - return b64encode(attr).decode() - - @staticmethod - def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument - """Serialize str into base-64 string. - - :param str attr: Object to be serialized. - :rtype: str - :return: serialized base64 - """ - encoded = b64encode(attr).decode("ascii") - return encoded.strip("=").replace("+", "-").replace("/", "_") - - @staticmethod - def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument - """Serialize Decimal object to float. - - :param decimal attr: Object to be serialized. - :rtype: float - :return: serialized decimal - """ - return float(attr) - - @staticmethod - def serialize_long(attr, **kwargs): # pylint: disable=unused-argument - """Serialize long (Py2) or int (Py3). - - :param int attr: Object to be serialized. - :rtype: int/long - :return: serialized long - """ - return _long_type(attr) - - @staticmethod - def serialize_date(attr, **kwargs): # pylint: disable=unused-argument - """Serialize Date object into ISO-8601 formatted string. - - :param Date attr: Object to be serialized. - :rtype: str - :return: serialized date - """ - if isinstance(attr, str): - attr = isodate.parse_date(attr) - t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) - return t - - @staticmethod - def serialize_time(attr, **kwargs): # pylint: disable=unused-argument - """Serialize Time object into ISO-8601 formatted string. - - :param datetime.time attr: Object to be serialized. - :rtype: str - :return: serialized time - """ - if isinstance(attr, str): - attr = isodate.parse_time(attr) - t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) - if attr.microsecond: - t += ".{:02}".format(attr.microsecond) - return t - - @staticmethod - def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument - """Serialize TimeDelta object into ISO-8601 formatted string. - - :param TimeDelta attr: Object to be serialized. - :rtype: str - :return: serialized duration - """ - if isinstance(attr, str): - attr = isodate.parse_duration(attr) - return isodate.duration_isoformat(attr) - - @staticmethod - def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument - """Serialize Datetime object into RFC-1123 formatted string. - - :param Datetime attr: Object to be serialized. - :rtype: str - :raises TypeError: if format invalid. - :return: serialized rfc - """ - try: - if not attr.tzinfo: - _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") - utc = attr.utctimetuple() - except AttributeError as exc: - raise TypeError("RFC1123 object must be valid Datetime object.") from exc - - return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( - Serializer.days[utc.tm_wday], - utc.tm_mday, - Serializer.months[utc.tm_mon], - utc.tm_year, - utc.tm_hour, - utc.tm_min, - utc.tm_sec, - ) - - @staticmethod - def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument - """Serialize Datetime object into ISO-8601 formatted string. - - :param Datetime attr: Object to be serialized. - :rtype: str - :raises SerializationError: if format invalid. - :return: serialized iso - """ - if isinstance(attr, str): - attr = isodate.parse_datetime(attr) - try: - if not attr.tzinfo: - _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") - utc = attr.utctimetuple() - if utc.tm_year > 9999 or utc.tm_year < 1: - raise OverflowError("Hit max or min date") - - microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") - if microseconds: - microseconds = "." + microseconds - date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( - utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec - ) - return date + microseconds + "Z" - except (ValueError, OverflowError) as err: - msg = "Unable to serialize datetime object." - raise SerializationError(msg) from err - except AttributeError as err: - msg = "ISO-8601 object must be valid Datetime object." - raise TypeError(msg) from err - - @staticmethod - def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument - """Serialize Datetime object into IntTime format. - This is represented as seconds. - - :param Datetime attr: Object to be serialized. - :rtype: int - :raises SerializationError: if format invalid - :return: serialied unix - """ - if isinstance(attr, int): - return attr - try: - if not attr.tzinfo: - _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") - return int(calendar.timegm(attr.utctimetuple())) - except AttributeError as exc: - raise TypeError("Unix time object must be valid Datetime object.") from exc - - -def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument - key = attr_desc["key"] - working_data = data - - while "." in key: - # Need the cast, as for some reasons "split" is typed as list[str | Any] - dict_keys = cast(list[str], _FLATTEN.split(key)) - if len(dict_keys) == 1: - key = _decode_attribute_map_key(dict_keys[0]) - break - working_key = _decode_attribute_map_key(dict_keys[0]) - working_data = working_data.get(working_key, data) - if working_data is None: - # If at any point while following flatten JSON path see None, it means - # that all properties under are None as well - return None - key = ".".join(dict_keys[1:]) - - return working_data.get(key) - - -def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements - attr, attr_desc, data -): - key = attr_desc["key"] - working_data = data - - while "." in key: - dict_keys = _FLATTEN.split(key) - if len(dict_keys) == 1: - key = _decode_attribute_map_key(dict_keys[0]) - break - working_key = _decode_attribute_map_key(dict_keys[0]) - working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) - if working_data is None: - # If at any point while following flatten JSON path see None, it means - # that all properties under are None as well - return None - key = ".".join(dict_keys[1:]) - - if working_data: - return attribute_key_case_insensitive_extractor(key, None, working_data) - - -def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument - """Extract the attribute in "data" based on the last part of the JSON path key. - - :param str attr: The attribute to extract - :param dict attr_desc: The attribute description - :param dict data: The data to extract from - :rtype: object - :returns: The extracted attribute - """ - key = attr_desc["key"] - dict_keys = _FLATTEN.split(key) - return attribute_key_extractor(dict_keys[-1], None, data) - - -def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument - """Extract the attribute in "data" based on the last part of the JSON path key. - - This is the case insensitive version of "last_rest_key_extractor" - :param str attr: The attribute to extract - :param dict attr_desc: The attribute description - :param dict data: The data to extract from - :rtype: object - :returns: The extracted attribute - """ - key = attr_desc["key"] - dict_keys = _FLATTEN.split(key) - return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) - - -def attribute_key_extractor(attr, _, data): - return data.get(attr) - - -def attribute_key_case_insensitive_extractor(attr, _, data): - found_key = None - lower_attr = attr.lower() - for key in data: - if lower_attr == key.lower(): - found_key = key - break - - return data.get(found_key) - - -def _extract_name_from_internal_type(internal_type): - """Given an internal type XML description, extract correct XML name with namespace. - - :param dict internal_type: An model type - :rtype: tuple - :returns: A tuple XML name + namespace dict - """ - internal_type_xml_map = getattr(internal_type, "_xml_map", {}) - xml_name = internal_type_xml_map.get("name", internal_type.__name__) - xml_ns = internal_type_xml_map.get("ns", None) - if xml_ns: - xml_name = "{{{}}}{}".format(xml_ns, xml_name) - return xml_name - - -def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements - if isinstance(data, dict): - return None - - # Test if this model is XML ready first - if not isinstance(data, ET.Element): - return None - - xml_desc = attr_desc.get("xml", {}) - xml_name = xml_desc.get("name", attr_desc["key"]) - - # Look for a children - is_iter_type = attr_desc["type"].startswith("[") - is_wrapped = xml_desc.get("wrapped", False) - internal_type = attr_desc.get("internalType", None) - internal_type_xml_map = getattr(internal_type, "_xml_map", {}) - - # Integrate namespace if necessary - xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) - if xml_ns: - xml_name = "{{{}}}{}".format(xml_ns, xml_name) - - # If it's an attribute, that's simple - if xml_desc.get("attr", False): - return data.get(xml_name) - - # If it's x-ms-text, that's simple too - if xml_desc.get("text", False): - return data.text - - # Scenario where I take the local name: - # - Wrapped node - # - Internal type is an enum (considered basic types) - # - Internal type has no XML/Name node - if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): - children = data.findall(xml_name) - # If internal type has a local name and it's not a list, I use that name - elif not is_iter_type and internal_type and "name" in internal_type_xml_map: - xml_name = _extract_name_from_internal_type(internal_type) - children = data.findall(xml_name) - # That's an array - else: - if internal_type: # Complex type, ignore itemsName and use the complex type name - items_name = _extract_name_from_internal_type(internal_type) - else: - items_name = xml_desc.get("itemsName", xml_name) - children = data.findall(items_name) - - if len(children) == 0: - if is_iter_type: - if is_wrapped: - return None # is_wrapped no node, we want None - return [] # not wrapped, assume empty list - return None # Assume it's not there, maybe an optional node. - - # If is_iter_type and not wrapped, return all found children - if is_iter_type: - if not is_wrapped: - return children - # Iter and wrapped, should have found one node only (the wrap one) - if len(children) != 1: - raise DeserializationError( - "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( - xml_name - ) - ) - return list(children[0]) # Might be empty list and that's ok. - - # Here it's not a itertype, we should have found one element only or empty - if len(children) > 1: - raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) - return children[0] - - -class Deserializer: - """Response object model deserializer. - - :param dict classes: Class type dictionary for deserializing complex types. - :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. - """ - - basic_types = {str: "str", int: "int", bool: "bool", float: "float"} - - valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") - - def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: - self.deserialize_type = { - "iso-8601": Deserializer.deserialize_iso, - "rfc-1123": Deserializer.deserialize_rfc, - "unix-time": Deserializer.deserialize_unix, - "duration": Deserializer.deserialize_duration, - "date": Deserializer.deserialize_date, - "time": Deserializer.deserialize_time, - "decimal": Deserializer.deserialize_decimal, - "long": Deserializer.deserialize_long, - "bytearray": Deserializer.deserialize_bytearray, - "base64": Deserializer.deserialize_base64, - "object": self.deserialize_object, - "[]": self.deserialize_iter, - "{}": self.deserialize_dict, - } - self.deserialize_expected_types = { - "duration": (isodate.Duration, datetime.timedelta), - "iso-8601": (datetime.datetime), - } - self.dependencies: dict[str, type] = dict(classes) if classes else {} - self.key_extractors = [rest_key_extractor, xml_key_extractor] - # Additional properties only works if the "rest_key_extractor" is used to - # extract the keys. Making it to work whatever the key extractor is too much - # complicated, with no real scenario for now. - # So adding a flag to disable additional properties detection. This flag should be - # used if your expect the deserialization to NOT come from a JSON REST syntax. - # Otherwise, result are unexpected - self.additional_properties_detection = True - - def __call__(self, target_obj, response_data, content_type=None): - """Call the deserializer to process a REST response. - - :param str target_obj: Target data type to deserialize to. - :param requests.Response response_data: REST response object. - :param str content_type: Swagger "produces" if available. - :raises DeserializationError: if deserialization fails. - :return: Deserialized object. - :rtype: object - """ - data = self._unpack_content(response_data, content_type) - return self._deserialize(target_obj, data) - - def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements - """Call the deserializer on a model. - - Data needs to be already deserialized as JSON or XML ElementTree - - :param str target_obj: Target data type to deserialize to. - :param object data: Object to deserialize. - :raises DeserializationError: if deserialization fails. - :return: Deserialized object. - :rtype: object - """ - # This is already a model, go recursive just in case - if hasattr(data, "_attribute_map"): - constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] - try: - for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access - if attr in constants: - continue - value = getattr(data, attr) - if value is None: - continue - local_type = mapconfig["type"] - internal_data_type = local_type.strip("[]{}") - if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): - continue - setattr(data, attr, self._deserialize(local_type, value)) - return data - except AttributeError: - return - - response, class_name = self._classify_target(target_obj, data) - - if isinstance(response, str): - return self.deserialize_data(data, response) - if isinstance(response, type) and issubclass(response, Enum): - return self.deserialize_enum(data, response) - - if data is None or data is CoreNull: - return data - try: - attributes = response._attribute_map # type: ignore # pylint: disable=protected-access - d_attrs = {} - for attr, attr_desc in attributes.items(): - # Check empty string. If it's not empty, someone has a real "additionalProperties"... - if attr == "additional_properties" and attr_desc["key"] == "": - continue - raw_value = None - # Enhance attr_desc with some dynamic data - attr_desc = attr_desc.copy() # Do a copy, do not change the real one - internal_data_type = attr_desc["type"].strip("[]{}") - if internal_data_type in self.dependencies: - attr_desc["internalType"] = self.dependencies[internal_data_type] - - for key_extractor in self.key_extractors: - found_value = key_extractor(attr, attr_desc, data) - if found_value is not None: - if raw_value is not None and raw_value != found_value: - msg = ( - "Ignoring extracted value '%s' from %s for key '%s'" - " (duplicate extraction, follow extractors order)" - ) - _LOGGER.warning(msg, found_value, key_extractor, attr) - continue - raw_value = found_value - - value = self.deserialize_data(raw_value, attr_desc["type"]) - d_attrs[attr] = value - except (AttributeError, TypeError, KeyError) as err: - msg = "Unable to deserialize to object: " + class_name # type: ignore - raise DeserializationError(msg) from err - additional_properties = self._build_additional_properties(attributes, data) - return self._instantiate_model(response, d_attrs, additional_properties) - - def _build_additional_properties(self, attribute_map, data): - if not self.additional_properties_detection: - return None - if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": - # Check empty string. If it's not empty, someone has a real "additionalProperties" - return None - if isinstance(data, ET.Element): - data = {el.tag: el.text for el in data} - - known_keys = { - _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) - for desc in attribute_map.values() - if desc["key"] != "" - } - present_keys = set(data.keys()) - missing_keys = present_keys - known_keys - return {key: data[key] for key in missing_keys} - - def _classify_target(self, target, data): - """Check to see whether the deserialization target object can - be classified into a subclass. - Once classification has been determined, initialize object. - - :param str target: The target object type to deserialize to. - :param str/dict data: The response data to deserialize. - :return: The classified target object and its class name. - :rtype: tuple - """ - if target is None: - return None, None - - if isinstance(target, str): - try: - target = self.dependencies[target] - except KeyError: - return target, target - - try: - target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access - except AttributeError: - pass # Target is not a Model, no classify - return target, target.__class__.__name__ # type: ignore - - def failsafe_deserialize(self, target_obj, data, content_type=None): - """Ignores any errors encountered in deserialization, - and falls back to not deserializing the object. Recommended - for use in error deserialization, as we want to return the - HttpResponseError to users, and not have them deal with - a deserialization error. - - :param str target_obj: The target object type to deserialize to. - :param str/dict data: The response data to deserialize. - :param str content_type: Swagger "produces" if available. - :return: Deserialized object. - :rtype: object - """ - try: - return self(target_obj, data, content_type=content_type) - except: # pylint: disable=bare-except - _LOGGER.debug( - "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True - ) - return None - - @staticmethod - def _unpack_content(raw_data, content_type=None): - """Extract the correct structure for deserialization. - - If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. - if we can't, raise. Your Pipeline should have a RawDeserializer. - - If not a pipeline response and raw_data is bytes or string, use content-type - to decode it. If no content-type, try JSON. - - If raw_data is something else, bypass all logic and return it directly. - - :param obj raw_data: Data to be processed. - :param str content_type: How to parse if raw_data is a string/bytes. - :raises JSONDecodeError: If JSON is requested and parsing is impossible. - :raises UnicodeDecodeError: If bytes is not UTF8 - :rtype: object - :return: Unpacked content. - """ - # Assume this is enough to detect a Pipeline Response without importing it - context = getattr(raw_data, "context", {}) - if context: - if RawDeserializer.CONTEXT_NAME in context: - return context[RawDeserializer.CONTEXT_NAME] - raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") - - # Assume this is enough to recognize universal_http.ClientResponse without importing it - if hasattr(raw_data, "body"): - return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) - - # Assume this enough to recognize requests.Response without importing it. - if hasattr(raw_data, "_content_consumed"): - return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) - - if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"): - return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore - return raw_data - - def _instantiate_model(self, response, attrs, additional_properties=None): - """Instantiate a response model passing in deserialized args. - - :param Response response: The response model class. - :param dict attrs: The deserialized response attributes. - :param dict additional_properties: Additional properties to be set. - :rtype: Response - :return: The instantiated response model. - """ - if callable(response): - subtype = getattr(response, "_subtype_map", {}) - try: - readonly = [ - k - for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore - if v.get("readonly") - ] - const = [ - k - for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore - if v.get("constant") - ] - kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} - response_obj = response(**kwargs) - for attr in readonly: - setattr(response_obj, attr, attrs.get(attr)) - if additional_properties: - response_obj.additional_properties = additional_properties # type: ignore - return response_obj - except TypeError as err: - msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore - raise DeserializationError(msg + str(err)) from err - else: - try: - for attr, value in attrs.items(): - setattr(response, attr, value) - return response - except Exception as exp: - msg = "Unable to populate response model. " - msg += "Type: {}, Error: {}".format(type(response), exp) - raise DeserializationError(msg) from exp - - def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements - """Process data for deserialization according to data type. - - :param str data: The response string to be deserialized. - :param str data_type: The type to deserialize to. - :raises DeserializationError: if deserialization fails. - :return: Deserialized object. - :rtype: object - """ - if data is None: - return data - - try: - if not data_type: - return data - if data_type in self.basic_types.values(): - return self.deserialize_basic(data, data_type) - if data_type in self.deserialize_type: - if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): - return data - - is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment - "object", - "[]", - r"{}", - ] - if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: - return None - data_val = self.deserialize_type[data_type](data) - return data_val - - iter_type = data_type[0] + data_type[-1] - if iter_type in self.deserialize_type: - return self.deserialize_type[iter_type](data, data_type[1:-1]) - - obj_type = self.dependencies[data_type] - if issubclass(obj_type, Enum): - if isinstance(data, ET.Element): - data = data.text - return self.deserialize_enum(data, obj_type) - - except (ValueError, TypeError, AttributeError) as err: - msg = "Unable to deserialize response data." - msg += " Data: {}, {}".format(data, data_type) - raise DeserializationError(msg) from err - return self._deserialize(obj_type, data) - - def deserialize_iter(self, attr, iter_type): - """Deserialize an iterable. - - :param list attr: Iterable to be deserialized. - :param str iter_type: The type of object in the iterable. - :return: Deserialized iterable. - :rtype: list - """ - if attr is None: - return None - if isinstance(attr, ET.Element): # If I receive an element here, get the children - attr = list(attr) - if not isinstance(attr, (list, set)): - raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) - return [self.deserialize_data(a, iter_type) for a in attr] - - def deserialize_dict(self, attr, dict_type): - """Deserialize a dictionary. - - :param dict/list attr: Dictionary to be deserialized. Also accepts - a list of key, value pairs. - :param str dict_type: The object type of the items in the dictionary. - :return: Deserialized dictionary. - :rtype: dict - """ - if isinstance(attr, list): - return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} - - if isinstance(attr, ET.Element): - # Transform value into {"Key": "value"} - attr = {el.tag: el.text for el in attr} - return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} - - def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements - """Deserialize a generic object. - This will be handled as a dictionary. - - :param dict attr: Dictionary to be deserialized. - :return: Deserialized object. - :rtype: dict - :raises TypeError: if non-builtin datatype encountered. - """ - if attr is None: - return None - if isinstance(attr, ET.Element): - # Do no recurse on XML, just return the tree as-is - return attr - if isinstance(attr, str): - return self.deserialize_basic(attr, "str") - obj_type = type(attr) - if obj_type in self.basic_types: - return self.deserialize_basic(attr, self.basic_types[obj_type]) - if obj_type is _long_type: - return self.deserialize_long(attr) - - if obj_type == dict: - deserialized = {} - for key, value in attr.items(): - try: - deserialized[key] = self.deserialize_object(value, **kwargs) - except ValueError: - deserialized[key] = None - return deserialized - - if obj_type == list: - deserialized = [] - for obj in attr: - try: - deserialized.append(self.deserialize_object(obj, **kwargs)) - except ValueError: - pass - return deserialized - - error = "Cannot deserialize generic object with type: " - raise TypeError(error + str(obj_type)) - - def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements - """Deserialize basic builtin data type from string. - Will attempt to convert to str, int, float and bool. - This function will also accept '1', '0', 'true' and 'false' as - valid bool values. - - :param str attr: response string to be deserialized. - :param str data_type: deserialization data type. - :return: Deserialized basic type. - :rtype: str, int, float or bool - :raises TypeError: if string format is not valid. - """ - # If we're here, data is supposed to be a basic type. - # If it's still an XML node, take the text - if isinstance(attr, ET.Element): - attr = attr.text - if not attr: - if data_type == "str": - # None or '', node is empty string. - return "" - # None or '', node with a strong type is None. - # Don't try to model "empty bool" or "empty int" - return None - - if data_type == "bool": - if attr in [True, False, 1, 0]: - return bool(attr) - if isinstance(attr, str): - if attr.lower() in ["true", "1"]: - return True - if attr.lower() in ["false", "0"]: - return False - raise TypeError("Invalid boolean value: {}".format(attr)) - - if data_type == "str": - return self.deserialize_unicode(attr) - return eval(data_type)(attr) # nosec # pylint: disable=eval-used - - @staticmethod - def deserialize_unicode(data): - """Preserve unicode objects in Python 2, otherwise return data - as a string. - - :param str data: response string to be deserialized. - :return: Deserialized string. - :rtype: str or unicode - """ - # We might be here because we have an enum modeled as string, - # and we try to deserialize a partial dict with enum inside - if isinstance(data, Enum): - return data - - # Consider this is real string - try: - if isinstance(data, unicode): # type: ignore - return data - except NameError: - return str(data) - return str(data) - - @staticmethod - def deserialize_enum(data, enum_obj): - """Deserialize string into enum object. - - If the string is not a valid enum value it will be returned as-is - and a warning will be logged. - - :param str data: Response string to be deserialized. If this value is - None or invalid it will be returned as-is. - :param Enum enum_obj: Enum object to deserialize to. - :return: Deserialized enum object. - :rtype: Enum - """ - if isinstance(data, enum_obj) or data is None: - return data - if isinstance(data, Enum): - data = data.value - if isinstance(data, int): - # Workaround. We might consider remove it in the future. - try: - return list(enum_obj.__members__.values())[data] - except IndexError as exc: - error = "{!r} is not a valid index for enum {!r}" - raise DeserializationError(error.format(data, enum_obj)) from exc - try: - return enum_obj(str(data)) - except ValueError: - for enum_value in enum_obj: - if enum_value.value.lower() == str(data).lower(): - return enum_value - # We don't fail anymore for unknown value, we deserialize as a string - _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) - return Deserializer.deserialize_unicode(data) - - @staticmethod - def deserialize_bytearray(attr): - """Deserialize string into bytearray. - - :param str attr: response string to be deserialized. - :return: Deserialized bytearray - :rtype: bytearray - :raises TypeError: if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - return bytearray(b64decode(attr)) # type: ignore - - @staticmethod - def deserialize_base64(attr): - """Deserialize base64 encoded string into string. - - :param str attr: response string to be deserialized. - :return: Deserialized base64 string - :rtype: bytearray - :raises TypeError: if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore - attr = attr + padding # type: ignore - encoded = attr.replace("-", "+").replace("_", "/") - return b64decode(encoded) - - @staticmethod - def deserialize_decimal(attr): - """Deserialize string into Decimal object. - - :param str attr: response string to be deserialized. - :return: Deserialized decimal - :raises DeserializationError: if string format invalid. - :rtype: decimal - """ - if isinstance(attr, ET.Element): - attr = attr.text - try: - return decimal.Decimal(str(attr)) # type: ignore - except decimal.DecimalException as err: - msg = "Invalid decimal {}".format(attr) - raise DeserializationError(msg) from err - - @staticmethod - def deserialize_long(attr): - """Deserialize string into long (Py2) or int (Py3). - - :param str attr: response string to be deserialized. - :return: Deserialized int - :rtype: long or int - :raises ValueError: if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - return _long_type(attr) # type: ignore - - @staticmethod - def deserialize_duration(attr): - """Deserialize ISO-8601 formatted string into TimeDelta object. - - :param str attr: response string to be deserialized. - :return: Deserialized duration - :rtype: TimeDelta - :raises DeserializationError: if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - try: - duration = isodate.parse_duration(attr) - except (ValueError, OverflowError, AttributeError) as err: - msg = "Cannot deserialize duration object." - raise DeserializationError(msg) from err - return duration - - @staticmethod - def deserialize_date(attr): - """Deserialize ISO-8601 formatted string into Date object. - - :param str attr: response string to be deserialized. - :return: Deserialized date - :rtype: Date - :raises DeserializationError: if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore - raise DeserializationError("Date must have only digits and -. Received: %s" % attr) - # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. - return isodate.parse_date(attr, defaultmonth=0, defaultday=0) - - @staticmethod - def deserialize_time(attr): - """Deserialize ISO-8601 formatted string into time object. - - :param str attr: response string to be deserialized. - :return: Deserialized time - :rtype: datetime.time - :raises DeserializationError: if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore - raise DeserializationError("Date must have only digits and -. Received: %s" % attr) - return isodate.parse_time(attr) - - @staticmethod - def deserialize_rfc(attr): - """Deserialize RFC-1123 formatted string into Datetime object. - - :param str attr: response string to be deserialized. - :return: Deserialized RFC datetime - :rtype: Datetime - :raises DeserializationError: if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - try: - parsed_date = email.utils.parsedate_tz(attr) # type: ignore - date_obj = datetime.datetime( - *parsed_date[:6], tzinfo=datetime.timezone(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) - ) - if not date_obj.tzinfo: - date_obj = date_obj.astimezone(tz=TZ_UTC) - except ValueError as err: - msg = "Cannot deserialize to rfc datetime object." - raise DeserializationError(msg) from err - return date_obj - - @staticmethod - def deserialize_iso(attr): - """Deserialize ISO-8601 formatted string into Datetime object. - - :param str attr: response string to be deserialized. - :return: Deserialized ISO datetime - :rtype: Datetime - :raises DeserializationError: if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - try: - attr = attr.upper() # type: ignore - match = Deserializer.valid_date.match(attr) - if not match: - raise ValueError("Invalid datetime string: " + attr) - - check_decimal = attr.split(".") - if len(check_decimal) > 1: - decimal_str = "" - for digit in check_decimal[1]: - if digit.isdigit(): - decimal_str += digit - else: - break - if len(decimal_str) > 6: - attr = attr.replace(decimal_str, decimal_str[0:6]) - - date_obj = isodate.parse_datetime(attr) - test_utc = date_obj.utctimetuple() - if test_utc.tm_year > 9999 or test_utc.tm_year < 1: - raise OverflowError("Hit max or min date") - except (ValueError, OverflowError, AttributeError) as err: - msg = "Cannot deserialize datetime object." - raise DeserializationError(msg) from err - return date_obj - - @staticmethod - def deserialize_unix(attr): - """Serialize Datetime object into IntTime format. - This is represented as seconds. - - :param int attr: Object to be serialized. - :return: Deserialized datetime - :rtype: Datetime - :raises DeserializationError: if format invalid - """ - if isinstance(attr, ET.Element): - attr = int(attr.text) # type: ignore - try: - attr = int(attr) - date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) - except ValueError as err: - msg = "Cannot deserialize to unix datetime object." - raise DeserializationError(msg) from err - return date_obj diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py deleted file mode 100644 index 8915aadb172b..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py +++ /dev/null @@ -1,315 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- -# pylint: disable=broad-exception-caught,unused-argument,logging-fstring-interpolation,too-many-statements,too-many-return-statements -import inspect -import json -import os -import traceback -from abc import abstractmethod -from typing import Any, AsyncGenerator, Generator, Union - -import uvicorn -from opentelemetry import context as otel_context, trace -from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator -from starlette.applications import Starlette -from starlette.middleware.base import BaseHTTPMiddleware -from starlette.middleware.cors import CORSMiddleware -from starlette.requests import Request -from starlette.responses import JSONResponse, Response, StreamingResponse -from starlette.routing import Route -from starlette.types import ASGIApp - -from ..constants import Constants -from ..logger import get_logger, request_context -from ..models import ( - Response as OpenAIResponse, - ResponseStreamEvent, -) -from .common.agent_run_context import AgentRunContext - -logger = get_logger() -DEBUG_ERRORS = os.environ.get(Constants.AGENT_DEBUG_ERRORS, "false").lower() == "true" - - -class AgentRunContextMiddleware(BaseHTTPMiddleware): - def __init__(self, app: ASGIApp): - super().__init__(app) - - async def dispatch(self, request: Request, call_next): - if request.url.path in ("/runs", "/responses"): - try: - self.set_request_id_to_context_var(request) - payload = await request.json() - except Exception as e: - logger.error(f"Invalid JSON payload: {e}") - return JSONResponse({"error": f"Invalid JSON payload: {e}"}, status_code=400) - try: - request.state.agent_run_context = AgentRunContext(payload) - self.set_run_context_to_context_var(request.state.agent_run_context) - except Exception as e: - logger.error(f"Context build failed: {e}.", exc_info=True) - return JSONResponse({"error": f"Context build failed: {e}"}, status_code=500) - return await call_next(request) - - def set_request_id_to_context_var(self, request): - request_id = request.headers.get("X-Request-Id", None) - if request_id: - ctx = request_context.get() or {} - ctx["azure.ai.agentserver.x-request-id"] = request_id - request_context.set(ctx) - - def set_run_context_to_context_var(self, run_context): - agent_id, agent_name = "", "" - agent_obj = run_context.get_agent_id_object() - if agent_obj: - agent_name = getattr(agent_obj, "name", "") - agent_version = getattr(agent_obj, "version", "") - agent_id = f"{agent_name}:{agent_version}" - - res = { - "azure.ai.agentserver.response_id": run_context.response_id or "", - "azure.ai.agentserver.conversation_id": run_context.conversation_id or "", - "azure.ai.agentserver.streaming": str(run_context.stream or False), - "gen_ai.agent.id": agent_id, - "gen_ai.agent.name": agent_name, - "gen_ai.provider.name": "AzureAI Hosted Agents", - "gen_ai.response.id": run_context.response_id or "", - } - ctx = request_context.get() or {} - ctx.update(res) - request_context.set(ctx) - - -class FoundryCBAgent: - def __init__(self): - async def runs_endpoint(request): - # Set up tracing context and span - context = request.state.agent_run_context - ctx = request_context.get() - with self.tracer.start_as_current_span( - name=f"HostedAgents-{context.response_id}", - attributes=ctx, - kind=trace.SpanKind.SERVER, - ): - try: - logger.info("Start processing CreateResponse request:") - - context_carrier = {} - TraceContextTextMapPropagator().inject(context_carrier) - - resp = await self.agent_run(context) - - if inspect.isgenerator(resp): - # Prefetch first event to allow 500 status if generation fails immediately - try: - first_event = next(resp) - except Exception as e: # noqa: BLE001 - err_msg = str(e) if DEBUG_ERRORS else "Internal error" - logger.error("Generator initialization failed: %s\n%s", e, traceback.format_exc()) - return JSONResponse({"error": err_msg}, status_code=500) - - def gen(): - ctx = TraceContextTextMapPropagator().extract(carrier=context_carrier) - token = otel_context.attach(ctx) - error_sent = False - try: - # yield prefetched first event - yield _event_to_sse_chunk(first_event) - for event in resp: - yield _event_to_sse_chunk(event) - except Exception as e: # noqa: BLE001 - err_msg = str(e) if DEBUG_ERRORS else "Internal error" - logger.error("Error in non-async generator: %s\n%s", e, traceback.format_exc()) - payload = {"error": err_msg} - yield f"event: error\ndata: {json.dumps(payload)}\n\n" - yield "data: [DONE]\n\n" - error_sent = True - finally: - logger.info("End of processing CreateResponse request:") - otel_context.detach(token) - if not error_sent: - yield "data: [DONE]\n\n" - - return StreamingResponse(gen(), media_type="text/event-stream") - if inspect.isasyncgen(resp): - # Prefetch first async event to allow early 500 - try: - first_event = await resp.__anext__() - except StopAsyncIteration: - # No items produced; treat as empty successful stream - def empty_gen(): - yield "data: [DONE]\n\n" - - return StreamingResponse(empty_gen(), media_type="text/event-stream") - except Exception as e: # noqa: BLE001 - err_msg = str(e) if DEBUG_ERRORS else "Internal error" - logger.error("Async generator initialization failed: %s\n%s", e, traceback.format_exc()) - return JSONResponse({"error": err_msg}, status_code=500) - - async def gen_async(): - ctx = TraceContextTextMapPropagator().extract(carrier=context_carrier) - token = otel_context.attach(ctx) - error_sent = False - try: - # yield prefetched first event - yield _event_to_sse_chunk(first_event) - async for event in resp: - yield _event_to_sse_chunk(event) - except Exception as e: # noqa: BLE001 - err_msg = str(e) if DEBUG_ERRORS else "Internal error" - logger.error("Error in async generator: %s\n%s", e, traceback.format_exc()) - payload = {"error": err_msg} - yield f"event: error\ndata: {json.dumps(payload)}\n\n" - yield "data: [DONE]\n\n" - error_sent = True - finally: - logger.info("End of processing CreateResponse request.") - otel_context.detach(token) - if not error_sent: - yield "data: [DONE]\n\n" - - return StreamingResponse(gen_async(), media_type="text/event-stream") - logger.info("End of processing CreateResponse request.") - return JSONResponse(resp.as_dict()) - except Exception as e: - # TODO: extract status code from exception - logger.error(f"Error processing CreateResponse request: {traceback.format_exc()}") - return JSONResponse({"error": str(e)}, status_code=500) - - async def liveness_endpoint(request): - result = await self.agent_liveness(request) - return _to_response(result) - - async def readiness_endpoint(request): - result = await self.agent_readiness(request) - return _to_response(result) - - routes = [ - Route("/runs", runs_endpoint, methods=["POST"], name="agent_run"), - Route("/responses", runs_endpoint, methods=["POST"], name="agent_response"), - Route("/liveness", liveness_endpoint, methods=["GET"], name="agent_liveness"), - Route("/readiness", readiness_endpoint, methods=["GET"], name="agent_readiness"), - ] - - self.app = Starlette(routes=routes) - self.app.add_middleware( - CORSMiddleware, - allow_origins=["*"], - allow_credentials=True, - allow_methods=["*"], - allow_headers=["*"], - ) - self.app.add_middleware(AgentRunContextMiddleware) - - @self.app.on_event("startup") - async def attach_appinsights_logger(): - import logging - - for handler in logger.handlers: - if handler.name == "appinsights_handler": - for logger_name in ["uvicorn", "uvicorn.error", "uvicorn.access"]: - uv_logger = logging.getLogger(logger_name) - uv_logger.addHandler(handler) - uv_logger.setLevel(logger.level) - uv_logger.propagate = False - - self.tracer = None - - @abstractmethod - async def agent_run( - self, context: AgentRunContext - ) -> Union[OpenAIResponse, Generator[ResponseStreamEvent, Any, Any], AsyncGenerator[ResponseStreamEvent, Any]]: - raise NotImplementedError - - async def agent_liveness(self, request) -> Union[Response, dict]: - return Response(status_code=200) - - async def agent_readiness(self, request) -> Union[Response, dict]: - return {"status": "ready"} - - async def run_async( - self, - port: int = int(os.environ.get("DEFAULT_AD_PORT", 8088)), - ) -> None: - """ - Awaitable server starter for use **inside** an existing event loop. - - :param port: Port to listen on. - :type port: int - """ - self.init_tracing() - config = uvicorn.Config(self.app, host="0.0.0.0", port=port, loop="asyncio") - server = uvicorn.Server(config) - logger.info(f"Starting FoundryCBAgent server async on port {port}") - await server.serve() - - def run(self, port: int = int(os.environ.get("DEFAULT_AD_PORT", 8088))) -> None: - """ - Start a Starlette server on localhost: exposing: - POST /runs - POST /responses - GET /liveness - GET /readiness - - :param port: Port to listen on. - :type port: int - """ - self.init_tracing() - logger.info(f"Starting FoundryCBAgent server on port {port}") - uvicorn.run(self.app, host="0.0.0.0", port=port) - - def init_tracing(self): - exporter = os.environ.get(Constants.OTEL_EXPORTER_ENDPOINT) - app_insights_conn_str = os.environ.get(Constants.APPLICATION_INSIGHTS_CONNECTION_STRING) - if exporter or app_insights_conn_str: - from opentelemetry.sdk.resources import Resource - from opentelemetry.sdk.trace import TracerProvider - - resource = Resource.create(self.get_trace_attributes()) - provider = TracerProvider(resource=resource) - if exporter: - self.setup_otlp_exporter(exporter, provider) - if app_insights_conn_str: - self.setup_application_insights_exporter(app_insights_conn_str, provider) - trace.set_tracer_provider(provider) - self.init_tracing_internal(exporter_endpoint=exporter, app_insights_conn_str=app_insights_conn_str) - self.tracer = trace.get_tracer(__name__) - - def get_trace_attributes(self): - return { - "service.name": "azure.ai.agentserver", - } - - def init_tracing_internal(self, exporter_endpoint=None, app_insights_conn_str=None): - pass - - def setup_application_insights_exporter(self, connection_string, provider): - from opentelemetry.sdk.trace.export import BatchSpanProcessor - - from azure.monitor.opentelemetry.exporter import AzureMonitorTraceExporter - - exporter_instance = AzureMonitorTraceExporter.from_connection_string(connection_string) - processor = BatchSpanProcessor(exporter_instance) - provider.add_span_processor(processor) - logger.info("Tracing setup with Application Insights exporter.") - - def setup_otlp_exporter(self, endpoint, provider): - from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter - from opentelemetry.sdk.trace.export import BatchSpanProcessor - - exporter_instance = OTLPSpanExporter(endpoint=endpoint) - processor = BatchSpanProcessor(exporter_instance) - provider.add_span_processor(processor) - logger.info(f"Tracing setup with OTLP exporter: {endpoint}") - - -def _event_to_sse_chunk(event: ResponseStreamEvent) -> str: - event_data = json.dumps(event.as_dict()) - if event.type: - return f"event: {event.type}\ndata: {event_data}\n\n" - return f"data: {event_data}\n\n" - - -def _to_response(result: Union[Response, dict]) -> Response: - return result if isinstance(result, Response) else JSONResponse(result) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/agent_run_context.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/agent_run_context.py deleted file mode 100644 index 6fae56f0027d..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/agent_run_context.py +++ /dev/null @@ -1,76 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- -from ...logger import get_logger -from ...models import CreateResponse -from ...models.projects import AgentId, AgentReference, ResponseConversation1 -from .id_generator.foundry_id_generator import FoundryIdGenerator -from .id_generator.id_generator import IdGenerator - -logger = get_logger() - - -class AgentRunContext: - def __init__(self, payload: dict): - self._raw_payload = payload - self._request = _deserialize_create_response(payload) - self._id_generator = FoundryIdGenerator.from_request(payload) - self._response_id = self._id_generator.response_id - self._conversation_id = self._id_generator.conversation_id - self._stream = self.request.get("stream", False) - - @property - def raw_payload(self) -> dict: - return self._raw_payload - - @property - def request(self) -> CreateResponse: - return self._request - - @property - def id_generator(self) -> IdGenerator: - return self._id_generator - - @property - def response_id(self) -> str: - return self._response_id - - @property - def conversation_id(self) -> str: - return self._conversation_id - - @property - def stream(self) -> bool: - return self._stream - - def get_agent_id_object(self) -> AgentId: - agent = self.request.get("agent") - if not agent: - return None # type: ignore - return AgentId( - { - "type": agent.type, - "name": agent.name, - "version": agent.version, - } - ) - - def get_conversation_object(self) -> ResponseConversation1: - if not self._conversation_id: - return None # type: ignore - return ResponseConversation1(id=self._conversation_id) - - -def _deserialize_create_response(payload: dict) -> CreateResponse: - _deserialized = CreateResponse(**payload) - - raw_agent_reference = payload.get("agent") - if raw_agent_reference: - _deserialized["agent"] = _deserialize_agent_reference(raw_agent_reference) - return _deserialized - - -def _deserialize_agent_reference(payload: dict) -> AgentReference: - if not payload: - return None # type: ignore - return AgentReference(**payload) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/foundry_id_generator.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/foundry_id_generator.py deleted file mode 100644 index 910a7c481daa..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/foundry_id_generator.py +++ /dev/null @@ -1,136 +0,0 @@ -# pylint: disable=docstring-missing-return,docstring-missing-param,docstring-missing-rtype -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- -from __future__ import annotations - -import base64 -import os -import re -from typing import Optional - -from .id_generator import IdGenerator - -_WATERMARK_RE = re.compile(r"^[A-Za-z0-9]*$") - - -class FoundryIdGenerator(IdGenerator): - """ - Python port of the C# FoundryIdGenerator. - - Notable behaviors preserved: - - Secure, alphanumeric entropy via base64 filtering, retrying until exact length. - - Watermark must be strictly alphanumeric; inserted mid-entropy. - - Only one delimiter (default "_") after the prefix; no delimiter between entropy and partition key. - - Partition key is the last N characters of the second ID segment (post-delimiter). - """ - - def __init__(self, response_id: Optional[str], conversation_id: Optional[str]): - self.response_id = response_id or self._new_id("resp") - self.conversation_id = conversation_id or self._new_id("conv") - self._partition_id = self._extract_partition_id(self.conversation_id) - - @classmethod - def from_request(cls, payload: dict) -> "FoundryIdGenerator": - response_id = payload.get("metadata", {}).get("response_id", None) - conv_id_raw = payload.get("conversation", None) - if isinstance(conv_id_raw, str): - conv_id = conv_id_raw - elif isinstance(conv_id_raw, dict): - conv_id = conv_id_raw.get("id", None) - else: - conv_id = None - return cls(response_id, conv_id) - - def generate(self, category: Optional[str] = None) -> str: - prefix = "id" if not category else category - return self._new_id(prefix, partition_key=self._partition_id) - - # --- Static helpers (mirror C# private static methods) -------------------- - - @staticmethod - def _new_id( - prefix: str, - string_length: int = 32, - partition_key_length: int = 18, - infix: Optional[str] = "", - watermark: str = "", - delimiter: str = "_", - partition_key: Optional[str] = None, - partition_key_hint: str = "", - ) -> str: - """ - Generates a new ID. - - Format matches the C# logic: - f"{prefix}{delimiter}{infix}{partitionKey}{entropy}" - (i.e., exactly one delimiter after prefix; no delimiter between entropy and partition key) - """ - entropy = FoundryIdGenerator._secure_entropy(string_length) - - if partition_key is not None: - pkey = partition_key - elif partition_key_hint: - pkey = FoundryIdGenerator._extract_partition_id( - partition_key_hint, - string_length=string_length, - partition_key_length=partition_key_length, - delimiter=delimiter, - ) - else: - pkey = FoundryIdGenerator._secure_entropy(partition_key_length) - - if watermark: - if not _WATERMARK_RE.fullmatch(watermark): - raise ValueError(f"Only alphanumeric characters may be in watermark: {watermark}") - half = string_length // 2 - entropy = f"{entropy[:half]}{watermark}{entropy[half:]}" - - infix = infix or "" - prefix_part = f"{prefix}{delimiter}" if prefix else "" - return f"{prefix_part}{entropy}{infix}{pkey}" - - @staticmethod - def _secure_entropy(string_length: int) -> str: - """ - Generates a secure random alphanumeric string of exactly `string_length`. - Re-tries whole generation until the filtered base64 string is exactly the desired length, - matching the C# behavior. - """ - if string_length < 1: - raise ValueError("Must be greater than or equal to 1") - - while True: - # Use cryptographically secure bytes; base64 then filter to alnum. - buf = os.urandom(string_length) - encoded = base64.b64encode(buf).decode("ascii") - alnum = "".join(ch for ch in encoded if ch.isalnum()) - if len(alnum) >= string_length: - return alnum[:string_length] - # else: retry, same as the C# loop which discards and regenerates - - @staticmethod - def _extract_partition_id( - id_str: str, - string_length: int = 32, - partition_key_length: int = 18, - delimiter: str = "_", - ) -> str: - """ - Extracts partition key from an existing ID. - - Expected shape (per C# logic): "_" - We take the last `partition_key_length` characters from the *second* segment. - """ - if not id_str: - raise ValueError("Id cannot be null or empty") - - parts = [p for p in id_str.split(delimiter) if p] # remove empty entries like C# Split(..., RemoveEmptyEntries) - if len(parts) < 2: - raise ValueError(f"Id '{id_str}' does not contain a valid partition key.") - - segment = parts[1] - if len(segment) < string_length + partition_key_length: - raise ValueError(f"Id '{id_str}' does not contain a valid id.") - - return segment[-partition_key_length:] diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/id_generator.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/id_generator.py deleted file mode 100644 index 48f0d9add17d..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/id_generator.py +++ /dev/null @@ -1,19 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- -from abc import ABC, abstractmethod -from typing import Optional - - -class IdGenerator(ABC): - @abstractmethod - def generate(self, category: Optional[str] = None) -> str: ... - - def generate_function_call_id(self) -> str: - return self.generate("func") - - def generate_function_output_id(self) -> str: - return self.generate("funcout") - - def generate_message_id(self) -> str: - return self.generate("msg") diff --git a/sdk/agentserver/azure-ai-agentserver-core/cspell.json b/sdk/agentserver/azure-ai-agentserver-core/cspell.json index 126cadc0625c..a2c6989a053e 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/cspell.json +++ b/sdk/agentserver/azure-ai-agentserver-core/cspell.json @@ -1,27 +1,25 @@ { "ignoreWords": [ - "Agentic", - "UPIA", - "ANSII", - "inpainting", - "CSDL", - "azureai", - "GLEU", - "fstring", - "alnum", - "GENAI", - "Prereqs", - "mslearn", - "PYTHONIOENCODING", - "GETFL", - "DETFL", - "SETFL", - "Planifica" + "agentserver", + "appinsights", + "ASGI", + "autouse", + "caplog", + "genai", + "hypercorn", + "openapi", + "paramtype", + "pytestmark", + "rtype", + "starlette", + "traceparent", + "tracestate", + "tracecontext" ], "ignorePaths": [ - "*.csv", - "*.json", - "*.rst", - "samples/**" + "*.csv", + "*.json", + "*.rst", + "samples/**" ] - } \ No newline at end of file +} diff --git a/sdk/agentserver/azure-ai-agentserver-core/dev_requirements.txt b/sdk/agentserver/azure-ai-agentserver-core/dev_requirements.txt index 129e3e21fef1..5a716de9f2de 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/dev_requirements.txt +++ b/sdk/agentserver/azure-ai-agentserver-core/dev_requirements.txt @@ -1,2 +1,7 @@ -e ../../../eng/tools/azure-sdk-tools -python-dotenv \ No newline at end of file +pytest +httpx +pytest-asyncio +opentelemetry-api>=1.20.0 +opentelemetry-sdk>=1.20.0 +azure-monitor-opentelemetry-exporter>=1.0.0b21 diff --git a/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.rst b/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.rst deleted file mode 100644 index da01b083b0b3..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.rst +++ /dev/null @@ -1,34 +0,0 @@ -azure.ai.agentserver.core package -================================= - -.. automodule:: azure.ai.agentserver.core - :inherited-members: - :members: - :undoc-members: - -Subpackages ------------ - -.. toctree:: - :maxdepth: 4 - - azure.ai.agentserver.core.server - -Submodules ----------- - -azure.ai.agentserver.core.constants module ------------------------------------------- - -.. automodule:: azure.ai.agentserver.core.constants - :inherited-members: - :members: - :undoc-members: - -azure.ai.agentserver.core.logger module ---------------------------------------- - -.. automodule:: azure.ai.agentserver.core.logger - :inherited-members: - :members: - :undoc-members: diff --git a/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.server.common.id_generator.rst b/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.server.common.id_generator.rst deleted file mode 100644 index cf935aa1d1ed..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.server.common.id_generator.rst +++ /dev/null @@ -1,26 +0,0 @@ -azure.ai.agentserver.core.server.common.id\_generator package -============================================================= - -.. automodule:: azure.ai.agentserver.core.server.common.id_generator - :inherited-members: - :members: - :undoc-members: - -Submodules ----------- - -azure.ai.agentserver.core.server.common.id\_generator.foundry\_id\_generator module ------------------------------------------------------------------------------------ - -.. automodule:: azure.ai.agentserver.core.server.common.id_generator.foundry_id_generator - :inherited-members: - :members: - :undoc-members: - -azure.ai.agentserver.core.server.common.id\_generator.id\_generator module --------------------------------------------------------------------------- - -.. automodule:: azure.ai.agentserver.core.server.common.id_generator.id_generator - :inherited-members: - :members: - :undoc-members: diff --git a/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.server.common.rst b/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.server.common.rst deleted file mode 100644 index 26c4aaf4d15a..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.server.common.rst +++ /dev/null @@ -1,26 +0,0 @@ -azure.ai.agentserver.core.server.common package -=============================================== - -.. automodule:: azure.ai.agentserver.core.server.common - :inherited-members: - :members: - :undoc-members: - -Subpackages ------------ - -.. toctree:: - :maxdepth: 4 - - azure.ai.agentserver.core.server.common.id_generator - -Submodules ----------- - -azure.ai.agentserver.core.server.common.agent\_run\_context module ------------------------------------------------------------------- - -.. automodule:: azure.ai.agentserver.core.server.common.agent_run_context - :inherited-members: - :members: - :undoc-members: diff --git a/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.server.rst b/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.server.rst deleted file mode 100644 index b82fa765b839..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.server.rst +++ /dev/null @@ -1,26 +0,0 @@ -azure.ai.agentserver.core.server package -======================================== - -.. automodule:: azure.ai.agentserver.core.server - :inherited-members: - :members: - :undoc-members: - -Subpackages ------------ - -.. toctree:: - :maxdepth: 4 - - azure.ai.agentserver.core.server.common - -Submodules ----------- - -azure.ai.agentserver.core.server.base module --------------------------------------------- - -.. automodule:: azure.ai.agentserver.core.server.base - :inherited-members: - :members: - :undoc-members: diff --git a/sdk/agentserver/azure-ai-agentserver-core/pyproject.toml b/sdk/agentserver/azure-ai-agentserver-core/pyproject.toml index f574360722bb..4080fb6de696 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/pyproject.toml +++ b/sdk/agentserver/azure-ai-agentserver-core/pyproject.toml @@ -1,13 +1,14 @@ [project] name = "azure-ai-agentserver-core" dynamic = ["version", "readme"] -description = "Agents server adapter for Azure AI" +description = "Foundation utilities and host framework for Azure AI Hosted Agents" requires-python = ">=3.10" authors = [ { name = "Microsoft Corporation", email = "azpysdkhelp@microsoft.com" }, ] license = "MIT" classifiers = [ + "Development Status :: 4 - Beta", "Programming Language :: Python", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3", @@ -15,26 +16,30 @@ classifiers = [ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", ] -keywords = ["azure", "azure sdk"] +keywords = ["azure", "azure sdk", "agent", "agentserver", "core"] dependencies = [ - "azure-monitor-opentelemetry>=1.5.0", - "azure-ai-projects", - "azure-ai-agents>=1.2.0b5", - "azure-core>=1.35.0", - "azure-identity", - "openai>=1.80.0", - "opentelemetry-api>=1.35", - "opentelemetry-exporter-otlp-proto-http", "starlette>=0.45.0", - "uvicorn>=0.31.0", + "hypercorn>=0.17.0", +] + +[project.optional-dependencies] +tracing = [ + "opentelemetry-api>=1.20.0", + "opentelemetry-sdk>=1.20.0", + "opentelemetry-exporter-otlp-proto-grpc>=1.20.0", + "azure-monitor-opentelemetry-exporter>=1.0.0b21", ] [build-system] requires = ["setuptools>=69", "wheel"] build-backend = "setuptools.build_meta" +[project.urls] +repository = "https://github.com/Azure/azure-sdk-for-python" + [tool.setuptools.packages.find] exclude = [ "tests*", @@ -42,6 +47,7 @@ exclude = [ "doc*", "azure", "azure.ai", + "azure.ai.agentserver", ] [tool.setuptools.dynamic] @@ -49,23 +55,23 @@ version = { attr = "azure.ai.agentserver.core._version.VERSION" } readme = { file = ["README.md"], content-type = "text/markdown" } [tool.setuptools.package-data] -pytyped = ["py.typed"] +"azure.ai.agentserver.core" = ["py.typed"] [tool.ruff] line-length = 120 -target-version = "py311" -lint.select = ["E", "F", "B", "I"] # E=pycodestyle errors, F=Pyflakes, B=bugbear, I=import sort +target-version = "py310" +lint.select = ["E", "F", "B", "I"] lint.ignore = [] fix = false -exclude = [ - "**/azure/ai/agentserver/core/models/", -] [tool.ruff.lint.isort] known-first-party = ["azure.ai.agentserver.core"] combine-as-imports = true [tool.azure-sdk-build] -breaking = false # incompatible python version -pyright = false -verifytypes = false \ No newline at end of file +breaking = false +mypy = true +pyright = true +verifytypes = true +pylint = true +type_check_samples = false diff --git a/sdk/agentserver/azure-ai-agentserver-core/pyrightconfig.json b/sdk/agentserver/azure-ai-agentserver-core/pyrightconfig.json index b7490ae2b8c7..f36c5a7fe0d3 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/pyrightconfig.json +++ b/sdk/agentserver/azure-ai-agentserver-core/pyrightconfig.json @@ -1,13 +1,11 @@ { - "reportOptionalMemberAccess": "warning", - "reportArgumentType": "warning", - "reportAttributeAccessIssue": "warning", - "reportMissingImports": "warning", - "reportGeneralTypeIssues": "warning", - "reportReturnType": "warning", - - "exclude": [ - "**/azure/ai/agentserver/core/models/**", - "**/samples/**" - ] -} \ No newline at end of file + "reportOptionalMemberAccess": "warning", + "reportArgumentType": "warning", + "reportAttributeAccessIssue": "warning", + "reportMissingImports": "warning", + "reportGeneralTypeIssues": "warning", + "reportReturnType": "warning", + "exclude": [ + "**/samples/**" + ] +} diff --git a/sdk/agentserver/azure-ai-agentserver-core/samples/bilingual_weekend_planner/.env.sample b/sdk/agentserver/azure-ai-agentserver-core/samples/bilingual_weekend_planner/.env.sample deleted file mode 100644 index a19b1c6d02f7..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/samples/bilingual_weekend_planner/.env.sample +++ /dev/null @@ -1,24 +0,0 @@ -# Core agent configuration -API_HOST=github -WEEKEND_PLANNER_MODE=container - -# GitHub Models (when API_HOST=github) -GITHUB_TOKEN=your-github-token -GITHUB_OPENAI_BASE_URL=https://models.inference.ai.azure.com -GITHUB_MODEL=gpt-4o - -# Azure OpenAI (when API_HOST=azure) -AZURE_OPENAI_ENDPOINT=https://.openai.azure.com/ -AZURE_OPENAI_VERSION=2025-01-01-preview -AZURE_OPENAI_CHAT_DEPLOYMENT= - -# Telemetry & tracing -OTEL_EXPORTER_OTLP_ENDPOINT=http://127.0.0.1:4318/v1/traces -OTEL_EXPORTER_OTLP_PROTOCOL=grpc -OTEL_EXPORTER_OTLP_GRPC_ENDPOINT=http://127.0.0.1:4317 -APPLICATION_INSIGHTS_CONNECTION_STRING= - -# Optional GenAI capture overrides -OTEL_GENAI_AGENT_NAME=Bilingual Weekend Planner Agent -OTEL_GENAI_AGENT_DESCRIPTION=Assistant that plans weekend activities using weather and events data in multiple languages -OTEL_GENAI_AGENT_ID=bilingual-weekend-planner diff --git a/sdk/agentserver/azure-ai-agentserver-core/samples/bilingual_weekend_planner/README.md b/sdk/agentserver/azure-ai-agentserver-core/samples/bilingual_weekend_planner/README.md deleted file mode 100644 index 83296f5dd348..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/samples/bilingual_weekend_planner/README.md +++ /dev/null @@ -1,42 +0,0 @@ -Bilingual Weekend Planner (Custom Container + Telemetry) - -- Container-hosted multi-agent weekend planner with full GenAI telemetry capture and a standalone tracing demo that exercises `opentelemetry-instrumentation-openai-agents-v2`. - -Prereqs -- Optional: Activate repo venv `source .venv/bin/activate` -- Install deps `pip install -U -r samples/python/custom/bilingual_weekend_planner/requirements.txt` - -Env Vars -Choose the API host via `API_HOST`: - -- `github`: GitHub Models hosted on Azure AI Inference - - `GITHUB_TOKEN` - - Optional: `GITHUB_OPENAI_BASE_URL` (default `https://models.inference.ai.azure.com`) - - Optional: `GITHUB_MODEL` (default `gpt-4o`) -- `azure`: Azure OpenAI - - `AZURE_OPENAI_ENDPOINT` (e.g. `https://.openai.azure.com/`) - - `AZURE_OPENAI_VERSION` (e.g. `2025-01-01-preview`) - - `AZURE_OPENAI_CHAT_DEPLOYMENT` (deployment name) - -Modes -- Container (default): runs the bilingual triage agent via `FoundryCBAgent`. -- `API_HOST=github GITHUB_TOKEN=... ./run.sh` -- `API_HOST=azure AZURE_OPENAI_ENDPOINT=... AZURE_OPENAI_VERSION=2025-01-01-preview AZURE_OPENAI_CHAT_DEPLOYMENT=... ./run.sh` - - Test (non-stream): - `curl -s http://localhost:8088/responses -H 'Content-Type: application/json' -d '{"input":"What should I do this weekend in Seattle?"}'` - - Test (stream): - `curl -s http://localhost:8088/responses -H 'Content-Type: application/json' -d '{"input":"Plan my weekend in Barcelona","stream":true}'` -- Telemetry demo: set `WEEKEND_PLANNER_MODE=demo` to run the content-capture simulation (no model calls). - `WEEKEND_PLANNER_MODE=demo python main.py` - -Telemetry -- Console exporter is enabled by default; set `OTEL_EXPORTER_OTLP_ENDPOINT` (HTTP) or `OTEL_EXPORTER_OTLP_GRPC_ENDPOINT` to export spans elsewhere. -- Set `APPLICATION_INSIGHTS_CONNECTION_STRING` to export spans to Azure Monitor. -- GenAI capture flags are pre-configured (content, system instructions, tool metadata). -- `opentelemetry-instrumentation-openai-agents-v2` enables span-and-event message capture for requests, responses, and tool payloads. -- The tracing demo uses the `agents.tracing` helpers to emit spans without invoking external APIs. - -Notes -- Uses `FoundryCBAgent` to host the bilingual weekend planner triage agent on `http://localhost:8088`. -- Tools: `get_weather`, `get_activities`, `get_current_date`. -- Rich logger output highlights tool invocations; bilingual agents route traveler requests to the right language specialist. diff --git a/sdk/agentserver/azure-ai-agentserver-core/samples/bilingual_weekend_planner/main.py b/sdk/agentserver/azure-ai-agentserver-core/samples/bilingual_weekend_planner/main.py deleted file mode 100644 index 099d8dc45181..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/samples/bilingual_weekend_planner/main.py +++ /dev/null @@ -1,579 +0,0 @@ -# mypy: ignore-errors -"""Bilingual weekend planner sample with full GenAI telemetry capture.""" - -from __future__ import annotations - -import json -import logging -import os -import random -from dataclasses import dataclass -from datetime import datetime, timezone -from typing import Callable -from urllib.parse import urlparse - -import azure.identity -import openai -from agents import ( - Agent, - OpenAIChatCompletionsModel, - Runner, - function_tool, - set_default_openai_client, - set_tracing_disabled, -) -from agents.tracing import ( - agent_span as tracing_agent_span, - function_span as tracing_function_span, - generation_span as tracing_generation_span, - trace as tracing_trace, -) -from azure.ai.agentserver.core import AgentRunContext, FoundryCBAgent -from azure.ai.agentserver.core.models import ( - CreateResponse, - Response as OpenAIResponse, -) -from azure.ai.agentserver.core.models.projects import ( - ItemContentOutputText, - ResponseCompletedEvent, - ResponseCreatedEvent, - ResponseOutputItemAddedEvent, - ResponsesAssistantMessageItemResource, - ResponseTextDeltaEvent, - ResponseTextDoneEvent, -) -from dotenv import load_dotenv -from opentelemetry import trace -from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter -from opentelemetry.instrumentation.openai_agents import OpenAIAgentsInstrumentor -from opentelemetry.sdk.resources import Resource -from opentelemetry.sdk.trace import TracerProvider -from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter -from rich.logging import RichHandler - -try: - from azure.monitor.opentelemetry.exporter import ( # mypy: ignore - AzureMonitorTraceExporter, - ) -except Exception: # pragma: no cover - AzureMonitorTraceExporter = None # mypy: ignore - -# Load env early so adapter init sees them -load_dotenv(override=True) - - -logging.basicConfig( - level=logging.WARNING, - format="%(message)s", - datefmt="[%X]", - handlers=[RichHandler()], -) -logger = logging.getLogger("bilingual_weekend_planner") -RUN_MODE = os.getenv("WEEKEND_PLANNER_MODE", "container").lower() - - -@dataclass -class _ApiConfig: - """Helper describing how to create the OpenAI client.""" - - build_client: Callable[[], openai.AsyncOpenAI] - model_name: str - base_url: str - provider: str - - -def _set_capture_env(provider: str, base_url: str) -> None: - """Enable all GenAI capture toggles prior to instrumentation.""" - - capture_defaults = { - "OTEL_INSTRUMENTATION_OPENAI_AGENTS_CAPTURE_CONTENT": "true", - "OTEL_INSTRUMENTATION_OPENAI_AGENTS_CAPTURE_METRICS": "true", - "OTEL_GENAI_CAPTURE_MESSAGES": "true", - "OTEL_GENAI_CAPTURE_SYSTEM_INSTRUCTIONS": "true", - "OTEL_GENAI_CAPTURE_TOOL_DEFINITIONS": "true", - "OTEL_GENAI_EMIT_OPERATION_DETAILS": "true", - "OTEL_GENAI_AGENT_NAME": os.getenv( - "OTEL_GENAI_AGENT_NAME", - "Bilingual Weekend Planner Agent", - ), - "OTEL_GENAI_AGENT_DESCRIPTION": os.getenv( - "OTEL_GENAI_AGENT_DESCRIPTION", - "Assistant that plans weekend activities using weather and events data in multiple languages", - ), - "OTEL_GENAI_AGENT_ID": os.getenv( - "OTEL_GENAI_AGENT_ID", "bilingual-weekend-planner" - ), - } - for env_key, value in capture_defaults.items(): - os.environ.setdefault(env_key, value) - - parsed = urlparse(base_url) - if parsed.hostname: - os.environ.setdefault("OTEL_GENAI_SERVER_ADDRESS", parsed.hostname) - if parsed.port: - os.environ.setdefault("OTEL_GENAI_SERVER_PORT", str(parsed.port)) - - -def _resolve_api_config() -> _ApiConfig: - """Return the client configuration for the requested host.""" - - host = os.getenv("API_HOST", "github").lower() - - if host == "github": - base_url = os.getenv( - "GITHUB_OPENAI_BASE_URL", - "https://models.inference.ai.azure.com", - ).rstrip("/") - model_name = os.getenv("GITHUB_MODEL", "gpt-4o") - api_key = os.environ.get("GITHUB_TOKEN") - if not api_key: - if RUN_MODE != "demo": - raise RuntimeError("GITHUB_TOKEN is required when API_HOST=github") - api_key = "demo-key" - - def _build_client() -> openai.AsyncOpenAI: - return openai.AsyncOpenAI(base_url=base_url, api_key=api_key) - - return _ApiConfig( - build_client=_build_client, - model_name=model_name, - base_url=base_url, - provider="azure.ai.inference", - ) - - if host == "azure": - # Explicitly check for required environment variables - if "AZURE_OPENAI_ENDPOINT" not in os.environ: - raise ValueError("AZURE_OPENAI_ENDPOINT is required when API_HOST=azure") - if "AZURE_OPENAI_VERSION" not in os.environ: - raise ValueError("AZURE_OPENAI_VERSION is required when API_HOST=azure") - if "AZURE_OPENAI_CHAT_DEPLOYMENT" not in os.environ: - raise ValueError( - "AZURE_OPENAI_CHAT_DEPLOYMENT is required when API_HOST=azure" - ) - endpoint = os.environ["AZURE_OPENAI_ENDPOINT"].rstrip("/") - api_version = os.environ["AZURE_OPENAI_VERSION"] - deployment = os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"] - - credential = azure.identity.DefaultAzureCredential() - token_provider = azure.identity.get_bearer_token_provider( - credential, - "https://cognitiveservices.azure.com/.default", - ) - - def _build_client() -> openai.AsyncAzureOpenAI: - return openai.AsyncAzureOpenAI( - api_version=api_version, - azure_endpoint=endpoint, - azure_ad_token_provider=token_provider, - ) - - return _ApiConfig( - build_client=_build_client, - model_name=deployment, - base_url=endpoint, - provider="azure.ai.openai", - ) - - raise ValueError( - f"Unsupported API_HOST '{host}'. Supported values are 'github' or 'azure'." - ) - - -def _configure_otel() -> None: - """Configure the tracer provider and exporters.""" - - grpc_endpoint = os.getenv("OTEL_EXPORTER_OTLP_GRPC_ENDPOINT") - if not grpc_endpoint: - default_otlp_endpoint = os.getenv("OTEL_EXPORTER_OTLP_ENDPOINT") - protocol = os.getenv("OTEL_EXPORTER_OTLP_PROTOCOL", "grpc").lower() - if default_otlp_endpoint and protocol == "grpc": - grpc_endpoint = default_otlp_endpoint - - conn = os.getenv("APPLICATION_INSIGHTS_CONNECTION_STRING") - resource = Resource.create( - { - "service.name": "weekend-planner-service", - "service.namespace": "leisure-orchestration", - "service.version": os.getenv("SERVICE_VERSION", "1.0.0"), - } - ) - - tracer_provider = TracerProvider(resource=resource) - - if grpc_endpoint: - tracer_provider.add_span_processor( - BatchSpanProcessor(OTLPSpanExporter(endpoint=grpc_endpoint)) - ) - print(f"[otel] OTLP gRPC exporter configured ({grpc_endpoint})") - elif conn: - if AzureMonitorTraceExporter is None: - print( - "Warning: Azure Monitor exporter not installed. " - "Install with: pip install azure-monitor-opentelemetry-exporter", - ) - tracer_provider.add_span_processor( - BatchSpanProcessor(ConsoleSpanExporter()) - ) - else: - tracer_provider.add_span_processor( - BatchSpanProcessor( - AzureMonitorTraceExporter.from_connection_string(conn) - ) - ) - print("[otel] Azure Monitor trace exporter configured") - else: - tracer_provider.add_span_processor(BatchSpanProcessor(ConsoleSpanExporter())) - print("[otel] Console span exporter configured") - print( - "[otel] Set APPLICATION_INSIGHTS_CONNECTION_STRING to export to Application Insights " - "instead of the console", - ) - - trace.set_tracer_provider(tracer_provider) - - -api_config = _resolve_api_config() -_set_capture_env(api_config.provider, api_config.base_url) -_configure_otel() -OpenAIAgentsInstrumentor().instrument( - tracer_provider=trace.get_tracer_provider(), - capture_message_content="span_and_event", - agent_name="Weekend Planner", - base_url=api_config.base_url, - system=api_config.provider, -) - -client = api_config.build_client() -set_default_openai_client(client) -set_tracing_disabled(False) - - -def _chat_model() -> OpenAIChatCompletionsModel: - """Return the chat completions model used for weekend planning.""" - - return OpenAIChatCompletionsModel(model=api_config.model_name, openai_client=client) - - -SUNNY_WEATHER_PROBABILITY = 0.05 - - -@function_tool -def get_weather(city: str) -> dict[str, object]: - """Fetch mock weather information for the requested city.""" - - logger.info("Getting weather for %s", city) - if random.random() < SUNNY_WEATHER_PROBABILITY: - return {"city": city, "temperature": 72, "description": "Sunny"} - return {"city": city, "temperature": 60, "description": "Rainy"} - - -@function_tool -def get_activities(city: str, date: str) -> list[dict[str, object]]: - """Return mock activities for the supplied city and date.""" - - logger.info("Getting activities for %s on %s", city, date) - return [ - {"name": "Hiking", "location": city}, - {"name": "Beach", "location": city}, - {"name": "Museum", "location": city}, - ] - - -@function_tool -def get_current_date() -> str: - """Return the current date as YYYY-MM-DD.""" - - logger.info("Getting current date") - return datetime.now().strftime("%Y-%m-%d") - - -ENGLISH_WEEKEND_PLANNER = Agent( - name="Weekend Planner (English)", - instructions=( - "You help English-speaking travelers plan their weekends. " - "Use the available tools to gather the weekend date, current weather, and local activities. " - "Only recommend activities that align with the weather and include the date in your final response." - ), - tools=[get_weather, get_activities, get_current_date], - model=_chat_model(), -) - -# cSpell:disable -SPANISH_WEEKEND_PLANNER = Agent( - name="Planificador de fin de semana (Español)", - instructions=( - "Ayudas a viajeros hispanohablantes a planificar su fin de semana. " - "Usa las herramientas disponibles para obtener la fecha, el clima y actividades locales. " - "Recomienda actividades acordes al clima e incluye la fecha del fin de semana en tu respuesta." - ), - tools=[get_weather, get_activities, get_current_date], - model=_chat_model(), -) - -TRIAGE_AGENT = Agent( - name="Weekend Planner Triage", - instructions=( - "Revisa el idioma del viajero. " - "Si el mensaje está en español, realiza un handoff a 'Planificador de fin de semana (Español)'. " - "De lo contrario, usa 'Weekend Planner (English)'." - ), - handoffs=[SPANISH_WEEKEND_PLANNER, ENGLISH_WEEKEND_PLANNER], - model=_chat_model(), -) -# cSpell:enable - - -def _root_span_name(provider: str) -> str: - return f"weekend_planning_session[{provider}]" - - -def _apply_weekend_semconv( - span: trace.Span, - *, - user_text: str, - final_text: str, - conversation_id: str | None, - response_id: str, - final_agent_name: str | None, - success: bool, -) -> None: - parsed = urlparse(api_config.base_url) - if parsed.hostname: - span.set_attribute("server.address", parsed.hostname) - if parsed.port: - span.set_attribute("server.port", parsed.port) - - span.set_attribute("gen_ai.operation.name", "invoke_agent") - span.set_attribute("gen_ai.provider.name", api_config.provider) - span.set_attribute("gen_ai.request.model", api_config.model_name) - span.set_attribute("gen_ai.output.type", "text") - span.set_attribute("gen_ai.response.model", api_config.model_name) - span.set_attribute("gen_ai.response.id", response_id) - span.set_attribute( - "gen_ai.response.finish_reasons", - ["stop"] if success else ["error"], - ) - - if conversation_id: - span.set_attribute("gen_ai.conversation.id", conversation_id) - if TRIAGE_AGENT.instructions: - span.set_attribute("gen_ai.system_instructions", TRIAGE_AGENT.instructions) - if final_agent_name: - span.set_attribute("gen_ai.agent.name", final_agent_name) - else: - span.set_attribute("gen_ai.agent.name", TRIAGE_AGENT.name) - if user_text: - span.set_attribute( - "gen_ai.input.messages", - json.dumps([{"role": "user", "content": user_text}]), - ) - if final_text: - span.set_attribute( - "gen_ai.output.messages", - json.dumps([{"role": "assistant", "content": final_text}]), - ) - - -def _extract_user_text(request: CreateResponse) -> str: - """Extract the first user text input from the request body.""" - - input = request.get("input") - if not input: - return "" - - first = input[0] - content = first.get("content", None) if isinstance(first, dict) else first - if isinstance(content, str): - return content - - if isinstance(content, list): - for item in content: - text = item.get("text", None) - if text: - return text - return "" - - -def _stream_final_text(final_text: str, context: AgentRunContext): - """Yield streaming events for the provided final text.""" - - async def _async_stream(): - assembled = "" - yield ResponseCreatedEvent(response=OpenAIResponse(output=[])) - item_id = context.id_generator.generate_message_id() - yield ResponseOutputItemAddedEvent( - output_index=0, - item=ResponsesAssistantMessageItemResource( - id=item_id, - status="in_progress", - content=[ItemContentOutputText(text="", annotations=[])], - ), - ) - - words = final_text.split(" ") - for idx, token in enumerate(words): - piece = token if idx == len(words) - 1 else token + " " - assembled += piece - yield ResponseTextDeltaEvent(output_index=0, content_index=0, delta=piece) - - yield ResponseTextDoneEvent(output_index=0, content_index=0, text=assembled) - yield ResponseCompletedEvent( - response=OpenAIResponse( - metadata={}, - temperature=0.0, - top_p=0.0, - user="user", - id=context.response_id, - created_at=datetime.now(timezone.utc), - output=[ - ResponsesAssistantMessageItemResource( - id=item_id, - status="completed", - content=[ItemContentOutputText(text=assembled, annotations=[])], - ) - ], - ) - ) - - return _async_stream() - - -def dump(title: str, payload: object) -> None: - """Pretty print helper for the tracing demo.""" - - print(f"\n=== {title} ===") - print(json.dumps(payload, indent=2)) - - -def run_content_capture_demo() -> None: - """Simulate an agent workflow using the tracing helpers without calling an API.""" - - itinerary_prompt = [ - {"role": "system", "content": "Help travelers plan memorable weekends."}, - {"role": "user", "content": "I'm visiting Seattle this weekend."}, - ] - tool_args = {"city": "Seattle", "date": "2025-05-17"} - tool_result = { - "forecast": "Light rain, highs 60°F", - "packing_tips": ["rain jacket", "waterproof shoes"], - } - - with tracing_trace("weekend-planner-simulation"): - with tracing_agent_span(name="weekend_planner_demo") as agent: - dump( - "Agent span started", - {"span_id": agent.span_id, "trace_id": agent.trace_id}, - ) - - with tracing_generation_span( - input=itinerary_prompt, - output=[ - { - "role": "assistant", - "content": ( - "Day 1 explore Pike Place Market, Day 2 visit the Museum of Pop Culture, " - "Day 3 take the Bainbridge ferry if weather allows." - ), - } - ], - model=api_config.model_name, - usage={ - "input_tokens": 128, - "output_tokens": 96, - "total_tokens": 224, - }, - ): - pass - - with tracing_function_span( - name="get_weather", - input=json.dumps(tool_args), - output=tool_result, - ): - pass - - print("\nWorkflow complete – spans exported to the configured OTLP endpoint.") - - -class WeekendPlannerContainer(FoundryCBAgent): - """Container entry point that surfaces the weekend planner agent via FoundryCBAgent.""" - - async def agent_run(self, context: AgentRunContext): - request = context.request - user_text = _extract_user_text(request) - - tracer = trace.get_tracer(__name__) - with tracer.start_as_current_span(_root_span_name(api_config.provider)) as span: - span.set_attribute("user.request", user_text) - span.set_attribute("api.host", os.getenv("API_HOST", "github")) - span.set_attribute("model.name", api_config.model_name) - span.set_attribute("agent.name", TRIAGE_AGENT.name) - span.set_attribute("triage.languages", "en,es") - - try: - result = await Runner.run(TRIAGE_AGENT, input=user_text) - final_text = str(result.final_output or "") - span.set_attribute( - "agent.response", final_text[:500] if final_text else "" - ) - final_agent = getattr(result, "last_agent", None) - if final_agent and getattr(final_agent, "name", None): - span.set_attribute("agent.final", final_agent.name) - span.set_attribute("request.success", True) - _apply_weekend_semconv( - span, - user_text=user_text, - final_text=final_text, - conversation_id=context.conversation_id, - response_id=context.response_id, - final_agent_name=getattr(final_agent, "name", None), - success=True, - ) - logger.info("Weekend planning completed successfully") - except Exception as exc: # pragma: no cover - defensive logging path - span.record_exception(exc) - span.set_attribute("request.success", False) - span.set_attribute("error.type", exc.__class__.__name__) - logger.error("Error during weekend planning: %s", exc) - final_text = f"Error running agent: {exc}" - _apply_weekend_semconv( - span, - user_text=user_text, - final_text=final_text, - conversation_id=context.conversation_id, - response_id=context.response_id, - final_agent_name=None, - success=False, - ) - - if request.get("stream", False): - return _stream_final_text(final_text, context) - - response = OpenAIResponse( - metadata={}, - temperature=0.0, - top_p=0.0, - user="user", - id=context.response_id, - created_at=datetime.now(timezone.utc), - output=[ - ResponsesAssistantMessageItemResource( - id=context.id_generator.generate_message_id(), - status="completed", - content=[ItemContentOutputText(text=final_text, annotations=[])], - ) - ], - ) - return response - - -if __name__ == "__main__": - logger.setLevel(logging.INFO) - try: - if RUN_MODE == "demo": - run_content_capture_demo() - else: - WeekendPlannerContainer().run() - finally: - trace.get_tracer_provider().shutdown() diff --git a/sdk/agentserver/azure-ai-agentserver-core/samples/bilingual_weekend_planner/requirements.txt b/sdk/agentserver/azure-ai-agentserver-core/samples/bilingual_weekend_planner/requirements.txt deleted file mode 100644 index faf4fd5fbe2c..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/samples/bilingual_weekend_planner/requirements.txt +++ /dev/null @@ -1,13 +0,0 @@ -openai-agents>=0.3.3 -python-dotenv -openai>=1.42.0 -azure-identity>=1.17.0 -opentelemetry-api>=1.26.0 -opentelemetry-sdk>=1.26.0 -opentelemetry-exporter-otlp-proto-http>=1.26.0 -opentelemetry-exporter-otlp-proto-grpc>=1.26.0 -opentelemetry-instrumentation-openai-agents-v2>=0.1.0 -rich>=13.9.0 -azure-ai-agentserver-core -# Optional tracing exporters -azure-monitor-opentelemetry-exporter>=1.0.0b16 diff --git a/sdk/agentserver/azure-ai-agentserver-core/samples/bilingual_weekend_planner/run.sh b/sdk/agentserver/azure-ai-agentserver-core/samples/bilingual_weekend_planner/run.sh deleted file mode 100644 index e3d097e14166..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/samples/bilingual_weekend_planner/run.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -# Simple local runner for the bilingual weekend planner container sample. -# Examples: -# API_HOST=github GITHUB_TOKEN=... ./run.sh - -SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" -ROOT_DIR="$(cd "$SCRIPT_DIR/../../../.." && pwd)" - -export PYTHONPATH="$ROOT_DIR:${PYTHONPATH:-}" - -if [[ -d "$ROOT_DIR/.venv" ]]; then - # shellcheck disable=SC1090 - source "$ROOT_DIR/.venv/bin/activate" -fi - -PYTHON_BIN="${ROOT_DIR}/.venv/bin/python" -if [[ ! -x "$PYTHON_BIN" ]]; then - PYTHON_BIN="python3" -fi - -"$PYTHON_BIN" -u "$SCRIPT_DIR/main.py" diff --git a/sdk/agentserver/azure-ai-agentserver-core/samples/mcp_simple/mcp_simple.py b/sdk/agentserver/azure-ai-agentserver-core/samples/mcp_simple/mcp_simple.py deleted file mode 100644 index af9812826941..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/samples/mcp_simple/mcp_simple.py +++ /dev/null @@ -1,246 +0,0 @@ -# mypy: ignore-errors -"""Custom MCP simple sample. - -This sample combines the patterns from: - - langgraph `mcp_simple` (uses MultiServerMCPClient to discover tools) - - `custom_mock_agent_test` (implements a custom FoundryCBAgent with streaming events) - -Goal: When invoked in stream mode, emit MCP list tools related stream events so a -consumer (UI / CLI) can visualize tool enumeration plus a final assistant -message. In non-stream mode, return a single aggregated response summarizing -the tools. - -Run: - python mcp_simple.py - -Then call (example): - curl -X POST http://localhost:8088/responses -H 'Content-Type: application/json' -d '{ - "agent": {"name": "custom_mcp", "type": "agent_reference"}, - "stream": true, - "input": "List the tools available" - }' -""" - -import datetime -import json -from typing import AsyncGenerator, List - -from langchain_mcp_adapters.client import MultiServerMCPClient - -from azure.ai.agentserver.core import AgentRunContext, FoundryCBAgent -from azure.ai.agentserver.core.models import Response as OpenAIResponse -from azure.ai.agentserver.core.models.projects import ( - ItemContentOutputText, - MCPListToolsItemResource, - MCPListToolsTool, - ResponseCompletedEvent, - ResponseCreatedEvent, - ResponseMCPListToolsCompletedEvent, - ResponseMCPListToolsInProgressEvent, - ResponseOutputItemAddedEvent, - ResponsesAssistantMessageItemResource, - ResponseTextDeltaEvent, - ResponseTextDoneEvent, -) - - -class MCPToolsAgent(FoundryCBAgent): - def __init__(self): # noqa: D401 - super().__init__() - # Lazy init; created on first request to avoid startup latency if unused - self._mcp_client = None - - async def _get_client(self) -> MultiServerMCPClient: - if self._mcp_client is None: - # Mirror langgraph sample server config - self._mcp_client = MultiServerMCPClient( - { - "mslearn": { - "url": "https://learn.microsoft.com/api/mcp", - "transport": "streamable_http", - } - } - ) - return self._mcp_client - - async def _list_tools(self) -> List[MCPListToolsTool]: - client = await self._get_client() - try: - raw_tools = await client.get_tools() - tools: List[MCPListToolsTool] = [] - for t in raw_tools: - # Support either dict-like or attribute-based tool objects - if isinstance(t, dict): - name = t.get("name", "unknown_tool") - description = t.get("description") - schema = ( - t.get("input_schema") - or t.get("schema") - or t.get("parameters") - or {} - ) - else: # Fallback to attribute access - name = getattr(t, "name", "unknown_tool") - description = getattr(t, "description", None) - schema = ( - getattr(t, "input_schema", None) - or getattr(t, "schema", None) - or getattr(t, "parameters", None) - or {} - ) - tools.append( - MCPListToolsTool( - name=name, - description=description, - input_schema=schema, - ) - ) - if not tools: - raise ValueError("No tools discovered from MCP server") - return tools - except Exception: # noqa: BLE001 - # Provide deterministic fallback so sample always works offline - return [ - MCPListToolsTool( - name="fallback_echo", - description="Echo back provided text.", - input_schema={ - "type": "object", - "properties": {"text": {"type": "string"}}, - "required": ["text"], - }, - ) - ] - - async def agent_run(self, context: AgentRunContext): # noqa: D401 - """Implements the FoundryCBAgent contract. - - Streaming path emits MCP list tools events + assistant summary. - Non-stream path returns aggregated assistant message. - """ - - tools = await self._list_tools() - - if context.stream: - - async def stream() -> AsyncGenerator: # noqa: D401 - # Initial empty response context (pattern from mock sample) - yield ResponseCreatedEvent(response=OpenAIResponse(output=[])) - - # Indicate listing in progress - yield ResponseMCPListToolsInProgressEvent() - - mcp_item = MCPListToolsItemResource( - id=context.id_generator.generate("mcp_list"), - server_label="mslearn", - tools=tools, - ) - yield ResponseOutputItemAddedEvent(output_index=0, item=mcp_item) - yield ResponseMCPListToolsCompletedEvent() - - # Assistant streaming summary - assistant_item = ResponsesAssistantMessageItemResource( - id=context.id_generator.generate_message_id(), - status="in_progress", - content=[ItemContentOutputText(text="", annotations=[])], - ) - yield ResponseOutputItemAddedEvent(output_index=1, item=assistant_item) - - summary_text = "Discovered MCP tools: " + ", ".join( - t.name for t in tools - ) - assembled = "" - parts = summary_text.split(" ") - for i, token in enumerate(parts): - piece = token if i == len(parts) - 1 else token + " " # keep spaces - assembled += piece - yield ResponseTextDeltaEvent( - output_index=1, content_index=0, delta=piece - ) - yield ResponseTextDoneEvent( - output_index=1, content_index=0, text=assembled - ) - - final_response = OpenAIResponse( - metadata={}, - temperature=0.0, - top_p=0.0, - user="user", - id=context.response_id, - created_at=datetime.datetime.now(), - output=[ - mcp_item, - ResponsesAssistantMessageItemResource( - id=assistant_item.id, - status="completed", - content=[ - ItemContentOutputText(text=assembled, annotations=[]) - ], - ), - ], - ) - yield ResponseCompletedEvent(response=final_response) - - return stream() - - # Non-stream path: single assistant message - # Build a JSON-serializable summary. Avoid dumping complex model/schema objects that - # can include non-serializable metaclass references (seen in error stacktrace). - safe_tools = [] - for t in tools: - schema = t.input_schema - # Simplify schema to plain dict/str; if not directly serializable, fallback to string. - if isinstance(schema, (str, int, float, bool)) or schema is None: - safe_schema = schema - elif isinstance(schema, dict): - # Shallow copy ensuring nested values are primitive or stringified - safe_schema = {} - for k, v in schema.items(): - if isinstance(v, (str, int, float, bool, type(None), list, dict)): - safe_schema[k] = v - else: - safe_schema[k] = str(v) - else: - safe_schema = str(schema) - safe_tools.append( - { - "name": t.name, - "description": t.description, - # Provide only top-level schema keys if dict. - "input_schema_keys": list(safe_schema.keys()) - if isinstance(safe_schema, dict) - else safe_schema, - } - ) - summary = { - "server_label": "mslearn", - "tool_count": len(tools), - "tools": safe_tools, - } - content = [ - ItemContentOutputText( - text="MCP tool listing completed.\n" + json.dumps(summary, indent=2), - annotations=[], - ) - ] - return OpenAIResponse( - metadata={}, - temperature=0.0, - top_p=0.0, - user="user", - id="id", - created_at=datetime.datetime.now(), - output=[ - ResponsesAssistantMessageItemResource( - id=context.id_generator.generate_message_id(), - status="completed", - content=content, - ) - ], - ) - - -my_agent = MCPToolsAgent() - -if __name__ == "__main__": - my_agent.run() diff --git a/sdk/agentserver/azure-ai-agentserver-core/samples/mcp_simple/requirements.txt b/sdk/agentserver/azure-ai-agentserver-core/samples/mcp_simple/requirements.txt deleted file mode 100644 index 525ee6af3f7d..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/samples/mcp_simple/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -langchain-mcp-adapters==0.1.11 -azure-ai-agentserver-core diff --git a/sdk/agentserver/azure-ai-agentserver-core/samples/selfhosted_invocation/requirements.txt b/sdk/agentserver/azure-ai-agentserver-core/samples/selfhosted_invocation/requirements.txt new file mode 100644 index 000000000000..1840264735c0 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/samples/selfhosted_invocation/requirements.txt @@ -0,0 +1 @@ +azure-ai-agentserver-core[tracing] diff --git a/sdk/agentserver/azure-ai-agentserver-core/samples/selfhosted_invocation/selfhosted_invocation.py b/sdk/agentserver/azure-ai-agentserver-core/samples/selfhosted_invocation/selfhosted_invocation.py new file mode 100644 index 000000000000..693d0215825a --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/samples/selfhosted_invocation/selfhosted_invocation.py @@ -0,0 +1,104 @@ +"""Self-hosted invocation agent with tracing using only the hosting package (Tier 3). + +Demonstrates implementing the invocations protocol directly with +``AgentHost``, ``register_routes``, and ``TracingHelper`` — without +the invocations protocol package. You handle invocation ID tracking, +session resolution, tracing spans, and response headers yourself. + +This pattern is useful when: + +- You need a custom protocol not provided by the SDK +- You want full control over endpoint routing, tracing, and request handling +- You're learning how the protocol packages work internally + +Usage:: + + pip install azure-ai-agentserver-core[tracing] + + # Enable tracing via App Insights connection string + export APPLICATIONINSIGHTS_CONNECTION_STRING="InstrumentationKey=..." + + python selfhosted_invocation.py + + # Invoke the agent + curl -X POST http://localhost:8088/invocations -H "Content-Type: application/json" -d '{"name": "Alice"}' + # -> {"greeting": "Hello, Alice!"} + + # Health check (provided by AgentHost) + curl http://localhost:8088/readiness + # -> {"status": "healthy"} +""" +import contextlib +import os +import uuid +from typing import Optional + +from starlette.requests import Request +from starlette.responses import JSONResponse, Response +from starlette.routing import Route + +from azure.ai.agentserver.core import get_logger, AgentHost, TracingHelper + +logger = get_logger() + +server = AgentHost() + +# Access the tracing helper from the server (None if tracing is disabled) +tracing: Optional[TracingHelper] = server.tracing + + +async def invoke(request: Request) -> Response: + """POST /invocations — handle an invocation request with tracing. + + Demonstrates using TracingHelper to create spans, set attributes, + record errors, and propagate W3C trace context. + """ + invocation_id = request.headers.get("x-agent-invocation-id") or str(uuid.uuid4()) + session_id = ( + request.query_params.get("agent_session_id") + or os.environ.get("FOUNDRY_AGENT_SESSION_ID") + or str(uuid.uuid4()) + ) + + # Create a traced span that covers the entire request. + # When tracing is disabled, request_span yields None and is a no-op. + if tracing is not None: + span_cm = tracing.request_span( + headers=request.headers, + invocation_id=invocation_id, + span_operation="invoke_agent", + operation_name="invoke_agent", + session_id=session_id, + ) + else: + span_cm = contextlib.nullcontext(None) + + with span_cm as otel_span: + logger.info("Processing invocation %s in session %s", invocation_id, session_id) + + try: + data = await request.json() + name = data.get("name", "World") + result = {"greeting": f"Hello, {name}!"} + except Exception as exc: + # Record the error on the span if tracing is active + if tracing is not None and otel_span is not None: + tracing.record_error(otel_span, exc) + logger.error("Invocation %s failed: %s", invocation_id, exc) + raise + + return JSONResponse( + result, + headers={ + "x-agent-invocation-id": invocation_id, + "x-agent-session-id": session_id, + }, + ) + + +server.register_routes([ + Route("/invocations", invoke, methods=["POST"]), +]) + +if __name__ == "__main__": + server.run() diff --git a/sdk/agentserver/azure-ai-agentserver-core/samples/simple_mock_agent/custom_mock_agent_test.py b/sdk/agentserver/azure-ai-agentserver-core/samples/simple_mock_agent/custom_mock_agent_test.py deleted file mode 100644 index 3d4187a188f2..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/samples/simple_mock_agent/custom_mock_agent_test.py +++ /dev/null @@ -1,104 +0,0 @@ -# mypy: ignore-errors -import datetime - -from azure.ai.agentserver.core import AgentRunContext, FoundryCBAgent -from azure.ai.agentserver.core.models import Response as OpenAIResponse -from azure.ai.agentserver.core.models.projects import ( - ItemContentOutputText, - ResponseCompletedEvent, - ResponseCreatedEvent, - ResponseOutputItemAddedEvent, - ResponsesAssistantMessageItemResource, - ResponseTextDeltaEvent, - ResponseTextDoneEvent, -) - - -def stream_events(text: str, context: AgentRunContext): - item_id = context.id_generator.generate_message_id() - - assembled = "" - yield ResponseCreatedEvent(response=OpenAIResponse(output=[])) - yield ResponseOutputItemAddedEvent( - output_index=0, - item=ResponsesAssistantMessageItemResource( - id=item_id, - status="in_progress", - content=[ - ItemContentOutputText( - text="", - annotations=[], - ) - ], - ), - ) - for i, token in enumerate(text.split(" ")): - piece = token if i == len(text.split(" ")) - 1 else token + " " - assembled += piece - yield ResponseTextDeltaEvent(output_index=0, content_index=0, delta=piece) - # Done with text - yield ResponseTextDoneEvent(output_index=0, content_index=0, text=assembled) - yield ResponseCompletedEvent( - response=OpenAIResponse( - metadata={}, - temperature=0.0, - top_p=0.0, - user="me", - id=context.response_id, - created_at=datetime.datetime.now(), - output=[ - ResponsesAssistantMessageItemResource( - id=item_id, - status="completed", - content=[ - ItemContentOutputText( - text=assembled, - annotations=[], - ) - ], - ) - ], - ) - ) - - -async def agent_run(context: AgentRunContext): - agent = context.request.get("agent") - print(f"agent:{agent}") - - if context.stream: - return stream_events( - "I am mock agent with no intelligence in stream mode.", context - ) - - # Build assistant output content - output_content = [ - ItemContentOutputText( - text="I am mock agent with no intelligence.", - annotations=[], - ) - ] - - response = OpenAIResponse( - metadata={}, - temperature=0.0, - top_p=0.0, - user="me", - id=context.response_id, - created_at=datetime.datetime.now(), - output=[ - ResponsesAssistantMessageItemResource( - id=context.id_generator.generate_message_id(), - status="completed", - content=output_content, - ) - ], - ) - return response - - -my_agent = FoundryCBAgent() -my_agent.agent_run = agent_run - -if __name__ == "__main__": - my_agent.run() diff --git a/sdk/agentserver/azure-ai-agentserver-core/samples/simple_mock_agent/requirements.txt b/sdk/agentserver/azure-ai-agentserver-core/samples/simple_mock_agent/requirements.txt deleted file mode 100644 index 3f2b4e9ee6b4..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/samples/simple_mock_agent/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -azure-ai-agentserver-core diff --git a/sdk/agentserver/azure-ai-agentserver-core/tests/conftest.py b/sdk/agentserver/azure-ai-agentserver-core/tests/conftest.py index e84bdfff3bd7..e1e8e071bf7e 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/tests/conftest.py +++ b/sdk/agentserver/azure-ai-agentserver-core/tests/conftest.py @@ -1,456 +1,26 @@ -""" -Pytest configuration for samples gated tests. - -This file automatically loads environment variables from .env file -and provides shared test fixtures. -""" - -import json -import logging -import os -import socket -import subprocess -import sys -import time -from pathlib import Path -from typing import Any, Dict, Optional - +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Shared fixtures for azure-ai-agentserver-core tests.""" import pytest -import requests -from dotenv import load_dotenv - -# Load .env file from project root or current directory -# conftest.py is at: src/adapter/python/tests/gated_test/conftest.py -# Need to go up 6 levels to reach project root -project_root = Path(__file__).parent.parent -env_paths = [ - project_root / ".env", # Project root - Path.cwd() / ".env", # Current working directory - Path(__file__).parent / ".env", # Test directory -] - -for env_path in env_paths: - if env_path.exists(): - load_dotenv(env_path, override=True) - break - -# Setup logging -logging.basicConfig( - level=logging.DEBUG, - format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", - handlers=[logging.StreamHandler(sys.stdout)], -) -logger = logging.getLogger(__name__) - - -class AgentTestClient: - """Generic test client for all agent types.""" - - def __init__( - self, - sample_name: str, - script_name: str, - endpoint: str = "/responses", # Default endpoint - base_url: Optional[str] = None, - env_vars: Optional[Dict[str, str]] = None, - timeout: int = 120, - port: Optional[int] = None, - ): - self.sample_name = sample_name - self.script_name = script_name - self.endpoint = endpoint - self.timeout = timeout - - # Setup paths - self.project_root = project_root # Use already defined project_root - self.sample_dir = self.project_root / "samples" / sample_name - self.original_dir = os.getcwd() - - # Determine port assignment priority: explicit param > env override > random - if env_vars and env_vars.get("DEFAULT_AD_PORT"): - self.port = int(env_vars["DEFAULT_AD_PORT"]) - elif port is not None: - self.port = port - else: - self.port = self._find_free_port() - - # Configure base URL for client requests - self.base_url = (base_url or f"http://127.0.0.1:{self.port}").rstrip("/") - - # Setup environment - # Get Agent Framework configuration (new format) - azure_ai_project_endpoint = os.getenv("AZURE_AI_PROJECT_ENDPOINT", "") - azure_ai_model_deployment = os.getenv("AZURE_AI_MODEL_DEPLOYMENT_NAME", "") - agent_project_name = os.getenv("AGENT_PROJECT_NAME", "") - - # Get legacy Azure OpenAI configuration (for backward compatibility) - main_api_key = os.getenv("AZURE_OPENAI_API_KEY", "") - main_endpoint = os.getenv("AZURE_OPENAI_ENDPOINT", "") - main_api_version = os.getenv("OPENAI_API_VERSION", "2025-03-01-preview") - embedding_api_version = os.getenv("AZURE_OPENAI_EMBEDDINGS_API_VERSION", "2024-02-01") - - self.env_vars = { - "PYTHONIOENCODING": "utf-8", - "LANG": "C.UTF-8", - "LC_ALL": "C.UTF-8", - "PYTHONUNBUFFERED": "1", - # Agent Framework environment variables (new) - "AZURE_AI_PROJECT_ENDPOINT": azure_ai_project_endpoint, - "AZURE_AI_MODEL_DEPLOYMENT_NAME": azure_ai_model_deployment, - "AGENT_PROJECT_NAME": agent_project_name, - # Legacy Azure OpenAI environment variables (for backward compatibility) - "AZURE_OPENAI_API_KEY": main_api_key, - "AZURE_OPENAI_ENDPOINT": main_endpoint, - "AZURE_OPENAI_CHAT_DEPLOYMENT_NAME": os.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT_NAME", ""), - "OPENAI_API_VERSION": main_api_version, - } - - # Auto-configure embeddings to use main config if not explicitly set - # This allows using the same Azure OpenAI resource for both chat and embeddings - self.env_vars["AZURE_OPENAI_EMBEDDINGS_API_KEY"] = os.getenv( - "AZURE_OPENAI_EMBEDDINGS_API_KEY", - main_api_key, # Fallback to main API key - ) - self.env_vars["AZURE_OPENAI_EMBEDDINGS_ENDPOINT"] = os.getenv( - "AZURE_OPENAI_EMBEDDINGS_ENDPOINT", - main_endpoint, # Fallback to main endpoint - ) - self.env_vars["AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME"] = os.getenv( - "AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME", "" - ) - self.env_vars["AZURE_OPENAI_EMBEDDINGS_API_VERSION"] = os.getenv( - "AZURE_OPENAI_EMBEDDINGS_API_VERSION", - embedding_api_version, # Fallback to main API version - ) - self.env_vars["AZURE_OPENAI_EMBEDDINGS_MODEL_NAME"] = os.getenv( - "AZURE_OPENAI_EMBEDDINGS_MODEL_NAME", - os.getenv("AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME", ""), # Fallback to deployment name - ) - - if env_vars: - self.env_vars.update(env_vars) - - # Ensure server picks the dynamically assigned port and clients know how to reach it - self.env_vars.setdefault("DEFAULT_AD_PORT", str(self.port)) - self.env_vars.setdefault("AGENT_BASE_URL", self.base_url) - - self.process = None - self.session = requests.Session() - - @staticmethod - def _find_free_port() -> int: - with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: - sock.bind(("127.0.0.1", 0)) - return sock.getsockname()[1] - - def setup(self): - """Setup test environment.""" - os.chdir(self.sample_dir) - - logger.info( - "Configured %s to listen on %s", - self.sample_name, - f"{self.base_url}{self.endpoint}", - ) - - # Validate critical environment variables - # For Agent Framework samples, check new env vars first - required_vars = [] - if "agent_framework" in self.sample_name: - # Agent Framework samples use new format - required_vars = [ - "AZURE_AI_PROJECT_ENDPOINT", - "AZURE_AI_MODEL_DEPLOYMENT_NAME", - ] - else: - # Legacy samples use old format - required_vars = [ - "AZURE_OPENAI_API_KEY", - "AZURE_OPENAI_ENDPOINT", - "AZURE_OPENAI_CHAT_DEPLOYMENT_NAME", - ] - - missing_vars = [] - for var in required_vars: - value = self.env_vars.get(var) or os.getenv(var) - if not value: - missing_vars.append(var) - else: - logger.debug(f"Environment variable {var} is set") - - if missing_vars: - logger.error(f"Missing required environment variables: {', '.join(missing_vars)}") - logger.error(f"Sample name: {self.sample_name}") - if "agent_framework" in self.sample_name: - logger.error("For Agent Framework samples, please set:") - logger.error(" - AZURE_AI_PROJECT_ENDPOINT") - logger.error(" - AZURE_AI_MODEL_DEPLOYMENT_NAME") - pytest.skip(f"Missing required environment variables: {', '.join(missing_vars)}") - - # Set environment variables - for key, value in self.env_vars.items(): - if value: # Only set if value is not empty - os.environ[key] = value - - # Start server - self.start_server() - - # Wait for server to be ready - if not self.wait_for_ready(): - self.cleanup() - logger.error(f"{self.sample_name} server failed to start") - pytest.skip(f"{self.sample_name} server failed to start") - - def start_server(self): - """Start the agent server.""" - logger.info( - "Starting %s server in %s on port %s", - self.sample_name, - self.sample_dir, - self.port, - ) - - env = os.environ.copy() - env.update(self.env_vars) - env["DEFAULT_AD_PORT"] = str(self.port) - env.setdefault("AGENT_BASE_URL", self.base_url) +import httpx - # Use unbuffered output to capture logs in real-time - self.process = subprocess.Popen( - [sys.executable, "-u", self.script_name], # -u for unbuffered output - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, # Merge stderr into stdout - env=env, - text=True, - encoding="utf-8", - errors="replace", - bufsize=1, # Line buffered - ) - logger.info(f"Server process started with PID {self.process.pid}") +from azure.ai.agentserver.core import AgentHost - def wait_for_ready(self, max_attempts: int = 30, delay: float = 1.0) -> bool: - """Wait for server to be ready.""" - logger.info( - "Waiting for server to be ready at %s (max %s attempts)", - f"{self.base_url}{self.endpoint}", - max_attempts, - ) - for i in range(max_attempts): - # Check process status first - if self.process.poll() is not None: - # Process has terminated - read all output - stdout, stderr = self.process.communicate() - logger.error(f"Server terminated with code {self.process.returncode}") - logger.error("=== SERVER OUTPUT ===") - if stdout: - logger.error(stdout) - if stderr: - logger.error("=== STDERR ===") - logger.error(stderr) - return False +@pytest.fixture() +def agent() -> AgentHost: + """Create a bare AgentHost with no protocol routes. - # Read and log any available output - self._log_server_output() - - # Check health endpoint - try: - health_response = self.session.get(f"{self.base_url}/readiness", timeout=2) - if health_response.status_code == 200: - logger.info(f"Server ready after {i + 1} attempts") - return True - else: - logger.debug(f"Health check attempt {i + 1}: status {health_response.status_code}") - except Exception as e: - logger.debug(f"Health check attempt {i + 1} failed: {e}") - # After several failed attempts, show server output for debugging - if i > 5 and i % 5 == 0: - logger.warning(f"Server still not ready after {i + 1} attempts, checking output...") - self._log_server_output(force=True) - - time.sleep(delay) - - # Timeout reached - dump all server output - logger.error(f"Server failed to start within {max_attempts} attempts") - self._dump_server_output() - return False - - def cleanup(self): - """Cleanup resources.""" - if self.process: - try: - self.process.terminate() - self.process.wait(timeout=5) - except Exception: - self.process.kill() - - os.chdir(self.original_dir) - - def request( - self, - input_data: Any, - stream: bool = False, - timeout: Optional[int] = None, - debug: bool = False, - ) -> requests.Response: - """Send request to the server.""" - url = f"{self.base_url}{self.endpoint}" - timeout = timeout or self.timeout - - payload = {"input": input_data, "stream": stream} - - headers = { - "Content-Type": "application/json; charset=utf-8", - "Accept": "application/json; charset=utf-8", - } - - if debug: - logger.info(f">>> POST {url}") - logger.info(f">>> Headers: {headers}") - logger.info(f">>> Payload: {json.dumps(payload, indent=2)}") - - try: - response = self.session.post(url, json=payload, headers=headers, timeout=timeout, stream=stream) - - if debug: - logger.info(f"<<< Status: {response.status_code}") - logger.info(f"<<< Headers: {dict(response.headers)}") - - # For non-streaming responses, log the body - if not stream: - try: - content = response.json() - logger.info(f"<<< Body: {json.dumps(content, indent=2)}") - except (ValueError, requests.exceptions.JSONDecodeError): - logger.info(f"<<< Body: {response.text}") - - return response - - except Exception as e: - logger.error(f"Request failed: {e}") - self._log_server_output() - raise - - def _log_server_output(self, force=False): - """Log server output for debugging.""" - if self.process and self.process.poll() is None and hasattr(self.process, "stdout"): - try: - import select - - if hasattr(select, "select"): - # Use non-blocking read - ready, _, _ = select.select([self.process.stdout], [], [], 0.1) - if ready: - # Read available lines without blocking - import fcntl - import os as os_module - - # Set non-blocking mode - fd = self.process.stdout.fileno() - fl = fcntl.fcntl(fd, fcntl.F_GETFL) - fcntl.fcntl(fd, fcntl.F_SETFL, fl | os_module.O_NONBLOCK) - - try: - while True: - line = self.process.stdout.readline() - if not line: - break - line = line.strip() - if line: - if force or any( - keyword in line.lower() - for keyword in [ - "error", - "exception", - "traceback", - "failed", - ] - ): - logger.error(f"Server output: {line}") - else: - logger.info(f"Server output: {line}") - except BlockingIOError: - pass # No more data available - except Exception as e: - if force: - logger.debug(f"Could not read server output: {e}") - - def _dump_server_output(self): - """Dump all remaining server output.""" - if self.process: - try: - # Try to read any remaining output - if self.process.poll() is None: - # Process still running, terminate and get output - self.process.terminate() - try: - stdout, stderr = self.process.communicate(timeout=5) - except subprocess.TimeoutExpired: - self.process.kill() - stdout, stderr = self.process.communicate() - else: - stdout, stderr = self.process.communicate() - - if stdout: - logger.error(f"=== FULL SERVER OUTPUT ===\n{stdout}") - if stderr: - logger.error(f"=== FULL SERVER STDERR ===\n{stderr}") - except Exception as e: - logger.error(f"Failed to dump server output: {e}") - - -@pytest.fixture -def basic_client(): - """Fixture for basic agent tests.""" - client = AgentTestClient( - sample_name="agent_framework/basic_simple", - script_name="minimal_example.py", - endpoint="/responses", - timeout=60, - ) - client.setup() - yield client - client.cleanup() - - -@pytest.fixture -def workflow_client(): - """Fixture for workflow agent tests (reflection pattern with Worker + Reviewer).""" - client = AgentTestClient( - sample_name="agent_framework/workflow_agent_simple", - script_name="workflow_agent_simple.py", - endpoint="/responses", # Changed from /runs to /responses - timeout=600, # Increased timeout for workflow agent (reflection loop may need multiple iterations) - ) - client.setup() - yield client - client.cleanup() - - -@pytest.fixture -def mcp_client(): - """Fixture for MCP simple agent tests (uses Microsoft Learn MCP, no auth required).""" - client = AgentTestClient( - sample_name="agent_framework/mcp_simple", - script_name="mcp_simple.py", - endpoint="/responses", # Changed from /runs to /responses - timeout=120, - ) - client.setup() - yield client - client.cleanup() + Tracing is disabled to avoid requiring opentelemetry in the test env. + """ + return AgentHost() -@pytest.fixture -def mcp_apikey_client(): - """Fixture for MCP API Key agent tests (uses GitHub MCP, requires GITHUB_TOKEN).""" - client = AgentTestClient( - sample_name="agent_framework/mcp_apikey", - script_name="mcp_apikey.py", - endpoint="/responses", # Changed from /runs to /responses - timeout=120, - env_vars={"GITHUB_TOKEN": os.getenv("GITHUB_TOKEN", "")}, +@pytest.fixture() +def client(agent: AgentHost) -> httpx.AsyncClient: + """Create an httpx.AsyncClient bound to the AgentHost's ASGI app.""" + return httpx.AsyncClient( + transport=httpx.ASGITransport(app=agent.app), + base_url="http://testserver", ) - client.setup() - yield client - client.cleanup() diff --git a/sdk/agentserver/azure-ai-agentserver-core/tests/env-template b/sdk/agentserver/azure-ai-agentserver-core/tests/env-template deleted file mode 100644 index 33c60226b90b..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/tests/env-template +++ /dev/null @@ -1,31 +0,0 @@ -# ===== Agent Framework Configuration (NEW - Required for agent_framework samples) ===== -# Required for all Agent Framework samples (basic_simple, mcp_simple, mcp_apikey, workflow_agent_simple) -AZURE_AI_PROJECT_ENDPOINT=https://.region.project.azure.ai/ -AZURE_AI_MODEL_DEPLOYMENT_NAME=gpt-4o - -# Optional: Azure AI Project resource ID for telemetry -# Format: /subscriptions//resourceGroups//providers/Microsoft.MachineLearningServices/workspaces/ -AGENT_PROJECT_NAME= - -# GitHub Token for MCP samples (mcp_simple, mcp_apikey) -# Get from: https://github.com/settings/tokens -GITHUB_TOKEN=your-github-token-here - -# ===== Legacy Azure OpenAI Configuration (For backward compatibility) ===== -AZURE_OPENAI_API_KEY=your-api-key-here -AZURE_OPENAI_ENDPOINT=https://your-endpoint.openai.azure.com/ -AZURE_OPENAI_CHAT_DEPLOYMENT_NAME=gpt-4o -OPENAI_API_VERSION=2025-03-01-preview - -# Azure OpenAI Embeddings Configuration (for RAG tests) -# If not set, will use the same values as Chat API -AZURE_OPENAI_EMBEDDINGS_API_KEY=your-embeddings-api-key-here -AZURE_OPENAI_EMBEDDINGS_ENDPOINT=https://your-endpoint.openai.azure.com/ -AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME=text-embedding-ada-002 -AZURE_OPENAI_EMBEDDINGS_API_VERSION=2025-03-01-preview - -# Note: -# - Copy this file to .env and fill in your actual values -# - Never commit .env file to git (it's in .gitignore) -# - In CI/CD, these values are loaded from GitHub Secrets - diff --git a/sdk/agentserver/azure-ai-agentserver-core/tests/test_custom.py b/sdk/agentserver/azure-ai-agentserver-core/tests/test_custom.py deleted file mode 100644 index f8f2075e22e5..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/tests/test_custom.py +++ /dev/null @@ -1,298 +0,0 @@ -#!/usr/bin/env python3 -""" -Custom agents samples gated test. - -This module tests all Custom agent samples with parametrized test cases. -Each sample gets its own test class with multiple test scenarios. -""" - -import os -import socket -import subprocess -import sys -import time -from pathlib import Path -from typing import Any - -import pytest -import requests - -# Add the project root to the path -project_root = Path(__file__).parent.parent -sys.path.insert(0, str(project_root)) - - -class BaseCustomAgentTest: - """Base class for Custom agent sample tests with common utilities.""" - - def __init__(self, sample_name: str, script_name: str): - """ - Initialize test configuration. - - Args: - sample_name: Name of the sample directory (e.g., 'simple_mock_agent') - script_name: Name of the Python script to run (e.g., 'custom_mock_agent_test.py') - """ - self.sample_name = sample_name - self.script_name = script_name - self.sample_dir = project_root / "samples" / sample_name - self.port = self._find_free_port() - self.base_url = f"http://127.0.0.1:{self.port}" - self.responses_endpoint = f"{self.base_url}/responses" - self.process = None - self.original_dir = os.getcwd() - - def setup(self): - """Set up environment (dependencies are pre-installed in CI/CD).""" - os.chdir(self.sample_dir) - - def start_server(self): - """Start the agent server in background.""" - # Prepare environment with UTF-8 encoding to handle emoji in agent output - env = os.environ.copy() - env["PYTHONIOENCODING"] = "utf-8" - env["DEFAULT_AD_PORT"] = str(self.port) - env.setdefault("AGENT_BASE_URL", self.base_url) - - # Use subprocess.DEVNULL to avoid buffering issues - self.process = subprocess.Popen( - [sys.executable, self.script_name], - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - env=env, - ) - - def wait_for_ready(self, max_attempts: int = 30, delay: float = 1.0) -> bool: - """Wait for the server to be ready.""" - for _i in range(max_attempts): - # Check if process is still running - if self.process and self.process.poll() is not None: - # Process has terminated - print(f"Server process terminated unexpectedly with exit code {self.process.returncode}") - return False - - try: - response = requests.get(f"{self.base_url}/readiness", timeout=1) - if response.status_code == 200: - return True - except requests.exceptions.RequestException: - pass - - try: - response = requests.get(self.base_url, timeout=1) - if response.status_code in [200, 404]: - return True - except requests.exceptions.RequestException: - pass - - time.sleep(delay) - - # Server didn't start - print diagnostics - if self.process: - self.process.terminate() - stdout, stderr = self.process.communicate(timeout=5) - print(f"Server failed to start. Logs:\n{stdout}\nErrors:\n{stderr}") - - return False - - def send_request(self, input_data: Any, stream: bool = False, timeout: int = 30) -> requests.Response: - """ - Send a request to the agent. - - Args: - input_data: Input to send (string or structured message) - stream: Whether to use streaming - timeout: Request timeout in seconds - - Returns: - Response object - """ - payload = { - "agent": {"name": "mock_agent", "type": "agent_reference"}, - "input": input_data, - "stream": stream, - } - - # Note: Only set stream parameter for requests.post if streaming is requested - # Otherwise, let requests handle response body reading with timeout - if stream: - return requests.post(self.responses_endpoint, json=payload, timeout=timeout, stream=True) - else: - return requests.post(self.responses_endpoint, json=payload, timeout=timeout) - - def cleanup(self): - """Clean up resources and restore directory.""" - if self.process: - try: - self.process.terminate() - self.process.wait(timeout=5) - except Exception: - self.process.kill() - - os.chdir(self.original_dir) - - @staticmethod - def _find_free_port() -> int: - with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: - sock.bind(("127.0.0.1", 0)) - return sock.getsockname()[1] - - -class TestSimpleMockAgent: - """Test suite for Simple Mock Agent - uses shared server.""" - - @pytest.fixture(scope="class") - def mock_server(self): - """Shared server instance for all mock agent tests.""" - tester = BaseCustomAgentTest("simple_mock_agent", "custom_mock_agent_test.py") - tester.setup() - tester.start_server() - - if not tester.wait_for_ready(): - tester.cleanup() - pytest.fail("Simple Mock Agent server failed to start") - - yield tester - tester.cleanup() - - @pytest.mark.parametrize( - "input_text,expected_keywords,description", - [ - ("Hello, mock agent!", ["mock"], "simple_greeting"), - ("Test message", ["mock"], "test_message"), - ("What can you do?", ["mock"], "capability_query"), - ], - ) - def test_mock_agent_queries(self, mock_server, input_text: str, expected_keywords: list, description: str): - """Test mock agent with various queries.""" - response = mock_server.send_request(input_text, stream=False) - - assert response.status_code == 200, f"Expected 200, got {response.status_code}" - - response_text = response.text.lower() - found_keyword = any(kw.lower() in response_text for kw in expected_keywords) - assert found_keyword, f"Expected one of {expected_keywords} in response" - - def test_streaming_response(self, mock_server): - """Test mock agent with streaming response.""" - response = mock_server.send_request("Hello, streaming test!", stream=True) - - assert response.status_code == 200, f"Expected 200, got {response.status_code}" - - # Verify we can read streaming data - lines_read = 0 - for line in response.iter_lines(): - if line: - lines_read += 1 - if lines_read >= 3: - break - - assert lines_read > 0, "Expected to read at least one line from streaming response" - - -@pytest.mark.skip -class TestMcpSimple: - """Test suite for Custom MCP Simple - uses Microsoft Learn MCP.""" - - @pytest.fixture(scope="class") - def mcp_server(self): - """Shared server instance for all MCP Simple tests.""" - tester = BaseCustomAgentTest("mcp_simple", "mcp_simple.py") - tester.setup() - tester.start_server() - - if not tester.wait_for_ready(): - tester.cleanup() - pytest.fail("MCP Simple server failed to start") - - yield tester - tester.cleanup() - - @pytest.mark.parametrize( - "input_text,expected_keywords,description", - [ - ( - "What Azure services can I use for image generation?", - ["image", "generation", "azure"], - "image_generation", - ), - ( - "Show me documentation about Azure App Service", - ["app", "service", "azure"], - "app_service_docs", - ), - ], - ) - def test_mcp_operations(self, mcp_server, input_text: str, expected_keywords: list, description: str): - """Test MCP Simple with Microsoft Learn queries.""" - response = mcp_server.send_request(input_text, stream=False, timeout=60) - - assert response.status_code == 200, f"Expected 200, got {response.status_code}" - - response_text = response.text.lower() - found_keyword = any(kw.lower() in response_text for kw in expected_keywords) - assert found_keyword, f"Expected one of {expected_keywords} in response" - - -@pytest.mark.skip -class TestBilingualWeekendPlanner: - """Test suite for the bilingual weekend planner custom sample.""" - - @pytest.fixture(scope="class") - def weekend_planner_server(self): - """Shared server fixture for bilingual weekend planner tests.""" - pytest.importorskip("azure.identity") - pytest.importorskip("agents") - pytest.importorskip("openai") - - tester = BaseCustomAgentTest("bilingual_weekend_planner", "main.py") - tester.setup() - - env_overrides = { - "API_HOST": "github", - "GITHUB_TOKEN": os.environ.get("GITHUB_TOKEN", "unit-test-token"), - "GITHUB_OPENAI_BASE_URL": os.environ.get("GITHUB_OPENAI_BASE_URL", "http://127.0.0.1:65535"), - "WEEKEND_PLANNER_MODE": "container", - } - original_env = {key: os.environ.get(key) for key in env_overrides} - os.environ.update(env_overrides) - - try: - tester.start_server() - - if not tester.wait_for_ready(max_attempts=60, delay=1.0): - tester.cleanup() - pytest.fail("Bilingual weekend planner server failed to start") - - yield tester - finally: - tester.cleanup() - for key, value in original_env.items(): - if value is None: - os.environ.pop(key, None) - else: - os.environ[key] = value - - def test_offline_planner_response(self, weekend_planner_server): - """Verify the planner responds with a graceful error when the model is unreachable.""" - response = weekend_planner_server.send_request("Plan my weekend in Seattle", stream=False, timeout=60) - - assert response.status_code == 200, f"Expected 200, got {response.status_code}" - - response_text = response.text.lower() - assert "error running agent" in response_text - - def test_streaming_offline_response(self, weekend_planner_server): - """Verify streaming responses deliver data even when the model call fails.""" - response = weekend_planner_server.send_request("Planifica mi fin de semana en Madrid", stream=True, timeout=60) - - assert response.status_code == 200, f"Expected 200, got {response.status_code}" - - lines_read = 0 - for line in response.iter_lines(): - if line: - lines_read += 1 - if lines_read >= 3: - break - - assert lines_read > 0, "Expected to read at least one line from streaming response" diff --git a/sdk/agentserver/azure-ai-agentserver-core/tests/test_edge_cases.py b/sdk/agentserver/azure-ai-agentserver-core/tests/test_edge_cases.py new file mode 100644 index 000000000000..643e8211d992 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/tests/test_edge_cases.py @@ -0,0 +1,103 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Hosting-specific edge-case tests.""" +import logging + +import pytest +import httpx + +from azure.ai.agentserver.core import AgentHost +from azure.ai.agentserver.core._config import resolve_log_level + + +# ------------------------------------------------------------------ # +# POST /readiness → 405 +# ------------------------------------------------------------------ # + + +@pytest.fixture() +def client() -> httpx.AsyncClient: + agent = AgentHost() + return httpx.AsyncClient( + transport=httpx.ASGITransport(app=agent.app), + base_url="http://testserver", + ) + + +@pytest.mark.asyncio +async def test_post_readiness_returns_405(client: httpx.AsyncClient) -> None: + """POST /readiness is method-not-allowed.""" + resp = await client.post("/readiness") + assert resp.status_code == 405 + + +# ------------------------------------------------------------------ # +# Log level via constructor +# ------------------------------------------------------------------ # + + +class TestLogLevelConstructor: + """Log-level configuration via the AgentHost constructor.""" + + def test_log_level_via_constructor(self) -> None: + AgentHost(log_level="DEBUG") # side-effect: configures logger + lib_logger = logging.getLogger("azure.ai.agentserver") + assert lib_logger.level == logging.DEBUG + + def test_log_level_warning_via_constructor(self) -> None: + AgentHost(log_level="WARNING") # side-effect: configures logger + lib_logger = logging.getLogger("azure.ai.agentserver") + assert lib_logger.level == logging.WARNING + + def test_log_level_case_insensitive(self) -> None: + AgentHost(log_level="error") # side-effect: configures logger + lib_logger = logging.getLogger("azure.ai.agentserver") + assert lib_logger.level == logging.ERROR + + +# ------------------------------------------------------------------ # +# Log level via env var +# ------------------------------------------------------------------ # + + +class TestInvalidLogLevel: + """Invalid log levels are rejected with ValueError.""" + + def test_invalid_log_level_raises(self) -> None: + with pytest.raises(ValueError, match="Invalid log level"): + AgentHost(log_level="TRACE") + + +# ------------------------------------------------------------------ # +# resolve_log_level unit tests +# ------------------------------------------------------------------ # + + +class TestResolveLogLevel: + """Unit tests for resolve_log_level().""" + + def test_explicit_debug(self) -> None: + assert resolve_log_level("DEBUG") == "DEBUG" + + def test_explicit_info(self) -> None: + assert resolve_log_level("INFO") == "INFO" + + def test_explicit_warning(self) -> None: + assert resolve_log_level("WARNING") == "WARNING" + + def test_explicit_error(self) -> None: + assert resolve_log_level("ERROR") == "ERROR" + + def test_explicit_critical(self) -> None: + assert resolve_log_level("CRITICAL") == "CRITICAL" + + def test_case_insensitive(self) -> None: + assert resolve_log_level("debug") == "DEBUG" + + def test_invalid_raises(self) -> None: + with pytest.raises(ValueError, match="Invalid log level"): + resolve_log_level("TRACE") + + def test_default_info(self) -> None: + assert resolve_log_level(None) == "INFO" diff --git a/sdk/agentserver/azure-ai-agentserver-core/tests/test_graceful_shutdown.py b/sdk/agentserver/azure-ai-agentserver-core/tests/test_graceful_shutdown.py new file mode 100644 index 000000000000..3a68d444790d --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/tests/test_graceful_shutdown.py @@ -0,0 +1,305 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Tests for graceful-shutdown configuration, lifecycle, and handler dispatch.""" +import asyncio +import logging +import os +from unittest import mock + +import pytest + +from azure.ai.agentserver.core import AgentHost +from azure.ai.agentserver.core._config import resolve_graceful_shutdown_timeout, _DEFAULT_GRACEFUL_SHUTDOWN_TIMEOUT + + +# ------------------------------------------------------------------ # +# Timeout resolution: explicit > env > default (30s) +# ------------------------------------------------------------------ # + + +class TestResolveGracefulShutdownTimeout: + """Tests for resolve_graceful_shutdown_timeout().""" + + def test_explicit_wins(self) -> None: + assert resolve_graceful_shutdown_timeout(10) == 10 + + def test_default(self) -> None: + assert resolve_graceful_shutdown_timeout(None) == _DEFAULT_GRACEFUL_SHUTDOWN_TIMEOUT + + def test_non_int_explicit_raises(self) -> None: + with pytest.raises(ValueError, match="expected an integer"): + resolve_graceful_shutdown_timeout("ten") # type: ignore[arg-type] + + def test_negative_explicit_clamps_to_zero(self) -> None: + assert resolve_graceful_shutdown_timeout(-5) == 0 + + def test_zero_explicit(self) -> None: + assert resolve_graceful_shutdown_timeout(0) == 0 + + +# ------------------------------------------------------------------ # +# Hypercorn config receives graceful_timeout +# ------------------------------------------------------------------ # + + +class TestHypercornConfig: + """Verify _build_hypercorn_config passes the resolved timeout to Hypercorn.""" + + def test_sync_run_passes_timeout(self) -> None: + agent = AgentHost(graceful_shutdown_timeout=15) + config = agent._build_hypercorn_config("127.0.0.1", 8000) + assert config.graceful_timeout == 15.0 + + def test_async_run_passes_timeout(self) -> None: + agent = AgentHost(graceful_shutdown_timeout=25) + config = agent._build_hypercorn_config("0.0.0.0", 9000) + assert config.graceful_timeout == 25.0 + + def test_default_timeout_in_config(self) -> None: + env = os.environ.copy() + env.pop("AGENT_GRACEFUL_SHUTDOWN_TIMEOUT", None) + with mock.patch.dict(os.environ, env, clear=True): + agent = AgentHost() + config = agent._build_hypercorn_config("0.0.0.0", 8088) + assert config.graceful_timeout == float(_DEFAULT_GRACEFUL_SHUTDOWN_TIMEOUT) + + +# ------------------------------------------------------------------ # +# Lifespan shutdown logging +# ------------------------------------------------------------------ # + + +@pytest.mark.asyncio +async def test_lifespan_shutdown_logs(caplog: pytest.LogCaptureFixture) -> None: + """The lifespan shutdown phase logs the graceful timeout.""" + agent = AgentHost(graceful_shutdown_timeout=7) + + # Drive the lifespan manually via the ASGI interface. + scope = {"type": "lifespan"} + startup_complete = asyncio.Event() + shutdown_complete = asyncio.Event() + + async def receive(): + if not startup_complete.is_set(): + startup_complete.set() + return {"type": "lifespan.startup"} + await asyncio.sleep(0) + return {"type": "lifespan.shutdown"} + + async def send(message): + if message["type"] == "lifespan.shutdown.complete": + shutdown_complete.set() + + with caplog.at_level(logging.INFO, logger="azure.ai.agentserver"): + await agent.app(scope, receive, send) + + assert any("shutting down" in r.message.lower() for r in caplog.records) + assert any("7" in r.message for r in caplog.records) + + +# ------------------------------------------------------------------ # +# Shutdown handler decorator +# ------------------------------------------------------------------ # + + +@pytest.mark.asyncio +async def test_shutdown_handler_called() -> None: + """The function registered via @shutdown_handler is called during shutdown.""" + agent = AgentHost(graceful_shutdown_timeout=5) + called = False + + @agent.shutdown_handler + async def on_shutdown(): + nonlocal called + called = True + + # Drive lifespan + scope = {"type": "lifespan"} + startup_done = asyncio.Event() + shutdown_done = asyncio.Event() + + async def receive(): + if not startup_done.is_set(): + startup_done.set() + return {"type": "lifespan.startup"} + await asyncio.sleep(0) + return {"type": "lifespan.shutdown"} + + async def send(message): + if message["type"] == "lifespan.shutdown.complete": + shutdown_done.set() + + await agent.app(scope, receive, send) + assert called is True + + +@pytest.mark.asyncio +async def test_default_shutdown_is_noop() -> None: + """When no shutdown handler is registered, shutdown succeeds silently.""" + agent = AgentHost(graceful_shutdown_timeout=5) + + scope = {"type": "lifespan"} + startup_done = asyncio.Event() + shutdown_done = asyncio.Event() + + async def receive(): + if not startup_done.is_set(): + startup_done.set() + return {"type": "lifespan.startup"} + await asyncio.sleep(0) + return {"type": "lifespan.shutdown"} + + async def send(message): + if message["type"] == "lifespan.shutdown.complete": + shutdown_done.set() + + # Should not raise + await agent.app(scope, receive, send) + assert shutdown_done.is_set() + + +# ------------------------------------------------------------------ # +# Failing shutdown is logged, not raised +# ------------------------------------------------------------------ # + + +@pytest.mark.asyncio +async def test_failing_shutdown_is_logged(caplog: pytest.LogCaptureFixture) -> None: + """A shutdown handler that raises is logged but does not crash the server.""" + agent = AgentHost(graceful_shutdown_timeout=5) + + @agent.shutdown_handler + async def on_shutdown(): + raise RuntimeError("shutdown kaboom") + + scope = {"type": "lifespan"} + startup_done = asyncio.Event() + + async def receive(): + if not startup_done.is_set(): + startup_done.set() + return {"type": "lifespan.startup"} + await asyncio.sleep(0) + return {"type": "lifespan.shutdown"} + + sent_messages: list[dict] = [] + + async def send(message): + sent_messages.append(message) + + with caplog.at_level(logging.ERROR, logger="azure.ai.agentserver"): + await agent.app(scope, receive, send) + + # The error should be logged + assert any("on_shutdown" in r.message.lower() or "error" in r.message.lower() for r in caplog.records) + # Server should still complete shutdown + assert any(m["type"] == "lifespan.shutdown.complete" for m in sent_messages) + + +# ------------------------------------------------------------------ # +# Slow shutdown is cancelled with warning +# ------------------------------------------------------------------ # + + +@pytest.mark.asyncio +async def test_slow_shutdown_cancelled_with_warning(caplog: pytest.LogCaptureFixture) -> None: + """A shutdown handler exceeding the timeout is cancelled and a warning is logged.""" + agent = AgentHost(graceful_shutdown_timeout=1) + + @agent.shutdown_handler + async def on_shutdown(): + await asyncio.sleep(60) # way longer than the 1s timeout + + scope = {"type": "lifespan"} + startup_done = asyncio.Event() + + async def receive(): + if not startup_done.is_set(): + startup_done.set() + return {"type": "lifespan.startup"} + await asyncio.sleep(0) + return {"type": "lifespan.shutdown"} + + sent_messages: list[dict] = [] + + async def send(message): + sent_messages.append(message) + + with caplog.at_level(logging.WARNING, logger="azure.ai.agentserver"): + await agent.app(scope, receive, send) + + assert any("did not complete" in r.message.lower() or "timeout" in r.message.lower() for r in caplog.records) + assert any(m["type"] == "lifespan.shutdown.complete" for m in sent_messages) + + +# ------------------------------------------------------------------ # +# Fast shutdown completes normally +# ------------------------------------------------------------------ # + + +@pytest.mark.asyncio +async def test_fast_shutdown_completes_normally() -> None: + """A shutdown handler that finishes within the timeout completes normally.""" + agent = AgentHost(graceful_shutdown_timeout=10) + completed = False + + @agent.shutdown_handler + async def on_shutdown(): + nonlocal completed + await asyncio.sleep(0.01) + completed = True + + scope = {"type": "lifespan"} + startup_done = asyncio.Event() + + async def receive(): + if not startup_done.is_set(): + startup_done.set() + return {"type": "lifespan.startup"} + await asyncio.sleep(0) + return {"type": "lifespan.shutdown"} + + sent_messages: list[dict] = [] + + async def send(message): + sent_messages.append(message) + + await agent.app(scope, receive, send) + assert completed is True + assert any(m["type"] == "lifespan.shutdown.complete" for m in sent_messages) + + +# ------------------------------------------------------------------ # +# Zero timeout passes None (no timeout) +# ------------------------------------------------------------------ # + + +@pytest.mark.asyncio +async def test_zero_timeout_skips_shutdown_handler() -> None: + """When graceful_shutdown_timeout=0, the shutdown handler is skipped.""" + agent = AgentHost(graceful_shutdown_timeout=0) + completed = False + + @agent.shutdown_handler + async def on_shutdown(): + nonlocal completed + completed = True + + scope = {"type": "lifespan"} + startup_done = asyncio.Event() + + async def receive(): + if not startup_done.is_set(): + startup_done.set() + return {"type": "lifespan.startup"} + await asyncio.sleep(0) + return {"type": "lifespan.shutdown"} + + sent_messages: list[dict] = [] + + async def send(message): + sent_messages.append(message) + + await agent.app(scope, receive, send) + assert completed is False # handler was NOT called diff --git a/sdk/agentserver/azure-ai-agentserver-core/tests/test_health.py b/sdk/agentserver/azure-ai-agentserver-core/tests/test_health.py new file mode 100644 index 000000000000..35f1814ea52e --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/tests/test_health.py @@ -0,0 +1,53 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Tests for the GET /readiness health-check endpoint.""" +import pytest +import httpx + +from azure.ai.agentserver.core import AgentHost + + +@pytest.fixture() +def client() -> httpx.AsyncClient: + agent = AgentHost() + return httpx.AsyncClient( + transport=httpx.ASGITransport(app=agent.app), + base_url="http://testserver", + ) + + +@pytest.mark.asyncio +async def test_readiness_returns_200(client: httpx.AsyncClient) -> None: + """GET /readiness returns 200 with the expected JSON body.""" + resp = await client.get("/readiness") + assert resp.status_code == 200 + assert resp.json() == {"status": "healthy"} + + +@pytest.mark.asyncio +async def test_readiness_content_type(client: httpx.AsyncClient) -> None: + """GET /readiness returns application/json content type.""" + resp = await client.get("/readiness") + assert "application/json" in resp.headers["content-type"] + + +@pytest.mark.asyncio +async def test_readiness_post_returns_405(client: httpx.AsyncClient) -> None: + """POST /readiness is not allowed — only GET is registered.""" + resp = await client.post("/readiness") + assert resp.status_code == 405 + + +@pytest.mark.asyncio +async def test_old_liveness_endpoint_returns_404(client: httpx.AsyncClient) -> None: + """The old /liveness endpoint no longer exists.""" + resp = await client.get("/liveness") + assert resp.status_code == 404 + + +@pytest.mark.asyncio +async def test_old_healthy_endpoint_returns_404(client: httpx.AsyncClient) -> None: + """The old /healthy endpoint no longer exists.""" + resp = await client.get("/healthy") + assert resp.status_code == 404 diff --git a/sdk/agentserver/azure-ai-agentserver-core/tests/test_logger.py b/sdk/agentserver/azure-ai-agentserver-core/tests/test_logger.py new file mode 100644 index 000000000000..a95e4980d530 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/tests/test_logger.py @@ -0,0 +1,19 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Tests for the library-scoped logger.""" +import logging + + +def test_library_logger_exists() -> None: + """The library logger uses the expected dotted name.""" + lib_logger = logging.getLogger("azure.ai.agentserver") + assert lib_logger.name == "azure.ai.agentserver" + + +def test_log_level_preserved_across_imports() -> None: + """Importing internal modules does not reset the log level set by user code.""" + lib_logger = logging.getLogger("azure.ai.agentserver") + lib_logger.setLevel(logging.ERROR) + from azure.ai.agentserver.core import _base # noqa: F401 + assert lib_logger.level == logging.ERROR diff --git a/sdk/agentserver/azure-ai-agentserver-core/tests/test_server_routes.py b/sdk/agentserver/azure-ai-agentserver-core/tests/test_server_routes.py new file mode 100644 index 000000000000..f2a22ba45fa8 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/tests/test_server_routes.py @@ -0,0 +1,85 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Tests for port resolution and unknown-route handling.""" +import os +from unittest import mock + +import pytest +import httpx + +from azure.ai.agentserver.core import AgentHost +from azure.ai.agentserver.core._config import resolve_port +from azure.ai.agentserver.core._constants import Constants + + +# ------------------------------------------------------------------ # +# Port resolution +# ------------------------------------------------------------------ # + + +class TestResolvePort: + """Tests for resolve_port() — explicit > env > default.""" + + def test_explicit_port_wins(self) -> None: + assert resolve_port(9090) == 9090 + + def test_env_var_port(self) -> None: + with mock.patch.dict(os.environ, {"PORT": "7777"}): + assert resolve_port(None) == 7777 + + def test_default_port(self) -> None: + with mock.patch.dict(os.environ, {}, clear=True): + # Remove PORT if set so the default is used. + env = os.environ.copy() + env.pop("PORT", None) + with mock.patch.dict(os.environ, env, clear=True): + assert resolve_port(None) == Constants.DEFAULT_PORT + + def test_invalid_env_var_raises(self) -> None: + with mock.patch.dict(os.environ, {"PORT": "not-a-number"}): + with pytest.raises(ValueError, match="Invalid value for PORT"): + resolve_port(None) + + def test_non_int_explicit_raises(self) -> None: + with pytest.raises(ValueError, match="expected an integer"): + resolve_port("8080") # type: ignore[arg-type] + + def test_port_out_of_range_explicit(self) -> None: + with pytest.raises(ValueError, match="expected 1-65535"): + resolve_port(0) + + def test_port_above_range_explicit(self) -> None: + with pytest.raises(ValueError, match="expected 1-65535"): + resolve_port(70000) + + def test_env_var_port_out_of_range(self) -> None: + with mock.patch.dict(os.environ, {"PORT": "0"}): + with pytest.raises(ValueError, match="expected 1-65535"): + resolve_port(None) + + def test_env_var_port_above_range(self) -> None: + with mock.patch.dict(os.environ, {"PORT": "99999"}): + with pytest.raises(ValueError, match="expected 1-65535"): + resolve_port(None) + + +# ------------------------------------------------------------------ # +# Unknown route +# ------------------------------------------------------------------ # + + +@pytest.fixture() +def client() -> httpx.AsyncClient: + agent = AgentHost() + return httpx.AsyncClient( + transport=httpx.ASGITransport(app=agent.app), + base_url="http://testserver", + ) + + +@pytest.mark.asyncio +async def test_unknown_route_returns_404(client: httpx.AsyncClient) -> None: + """A request to an unregistered path returns 404.""" + resp = await client.get("/no-such-endpoint") + assert resp.status_code == 404 diff --git a/sdk/agentserver/azure-ai-agentserver-core/tests/test_tracing.py b/sdk/agentserver/azure-ai-agentserver-core/tests/test_tracing.py new file mode 100644 index 000000000000..1c1b7348a7d4 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/tests/test_tracing.py @@ -0,0 +1,185 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Tests for tracing configuration — not invocation spans (those live in the invocations package).""" +import contextlib +import os +from unittest import mock + +from azure.ai.agentserver.core import AgentHost +from azure.ai.agentserver.core._config import ( + resolve_agent_name, + resolve_agent_version, + resolve_appinsights_connection_string, +) +from azure.ai.agentserver.core._constants import Constants + + +# ------------------------------------------------------------------ # +# Tracing enabled / disabled +# ------------------------------------------------------------------ # + + +class TestTracingToggle: + """Tracing is enabled when App Insights or OTLP endpoint is configured.""" + + def test_tracing_disabled_when_no_endpoints(self) -> None: + env = os.environ.copy() + env.pop(Constants.APPLICATIONINSIGHTS_CONNECTION_STRING, None) + env.pop(Constants.OTEL_EXPORTER_OTLP_ENDPOINT, None) + with mock.patch.dict(os.environ, env, clear=True): + agent = AgentHost() + assert agent.tracing is None + + def test_tracing_enabled_via_appinsights_env_var(self) -> None: + with mock.patch.dict(os.environ, {Constants.APPLICATIONINSIGHTS_CONNECTION_STRING: "InstrumentationKey=test"}): + with mock.patch( + "azure.ai.agentserver.core._tracing.TracingHelper.__init__", + return_value=None, + ): + agent = AgentHost() + assert agent.tracing is not None + + def test_tracing_enabled_via_otlp_env_var(self) -> None: + with mock.patch.dict(os.environ, {Constants.OTEL_EXPORTER_OTLP_ENDPOINT: "http://localhost:4318"}): + with mock.patch( + "azure.ai.agentserver.core._tracing.TracingHelper.__init__", + return_value=None, + ): + agent = AgentHost() + assert agent.tracing is not None + + def test_tracing_enabled_via_constructor_connection_string(self) -> None: + with mock.patch( + "azure.ai.agentserver.core._tracing.TracingHelper.__init__", + return_value=None, + ): + agent = AgentHost(application_insights_connection_string="InstrumentationKey=ctor") + assert agent.tracing is not None + + +# ------------------------------------------------------------------ # +# Application Insights connection string resolution +# ------------------------------------------------------------------ # + + +class TestAppInsightsConnectionString: + """Tests for resolve_appinsights_connection_string().""" + + def test_explicit_wins(self) -> None: + assert resolve_appinsights_connection_string("InstrumentationKey=abc") == "InstrumentationKey=abc" + + def test_env_var(self) -> None: + with mock.patch.dict( + os.environ, + {Constants.APPLICATIONINSIGHTS_CONNECTION_STRING: "InstrumentationKey=env"}, + ): + assert resolve_appinsights_connection_string(None) == "InstrumentationKey=env" + + def test_none_when_unset(self) -> None: + env = os.environ.copy() + env.pop(Constants.APPLICATIONINSIGHTS_CONNECTION_STRING, None) + with mock.patch.dict(os.environ, env, clear=True): + assert resolve_appinsights_connection_string(None) is None + + def test_explicit_overrides_env_var(self) -> None: + with mock.patch.dict( + os.environ, + {Constants.APPLICATIONINSIGHTS_CONNECTION_STRING: "InstrumentationKey=env"}, + ): + result = resolve_appinsights_connection_string("InstrumentationKey=explicit") + assert result == "InstrumentationKey=explicit" + + +# ------------------------------------------------------------------ # +# _setup_azure_monitor (mocked) +# ------------------------------------------------------------------ # + + +class TestSetupAzureMonitor: + """Verify _setup_azure_monitor calls the right helpers.""" + + @staticmethod + def _tracing_mocks() -> contextlib.ExitStack: + """Enter the common set of mocks needed to instantiate TracingHelper.""" + stack = contextlib.ExitStack() + stack.enter_context(mock.patch("azure.ai.agentserver.core._tracing._HAS_OTEL", True)) + stack.enter_context(mock.patch("azure.ai.agentserver.core._tracing.trace", create=True)) + stack.enter_context( + mock.patch("azure.ai.agentserver.core._tracing.TraceContextTextMapPropagator", create=True) + ) + stack.enter_context( + mock.patch("azure.ai.agentserver.core._tracing._ensure_trace_provider", return_value=mock.MagicMock()) + ) + return stack + + def test_setup_azure_monitor_called_when_conn_str_provided(self) -> None: + with self._tracing_mocks(): + with mock.patch( + "azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor" + ) as mock_setup: + with mock.patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_otlp_export"): + from azure.ai.agentserver.core._tracing import TracingHelper + TracingHelper(connection_string="InstrumentationKey=test") + # _setup_azure_monitor receives (connection_string, resource, trace_provider) + mock_setup.assert_called_once() + args = mock_setup.call_args[0] + assert args[0] == "InstrumentationKey=test" + + def test_setup_azure_monitor_not_called_when_no_conn_str(self) -> None: + with self._tracing_mocks(): + with mock.patch( + "azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor" + ) as mock_setup: + with mock.patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_otlp_export"): + from azure.ai.agentserver.core._tracing import TracingHelper + TracingHelper(connection_string=None) + mock_setup.assert_not_called() + + +# ------------------------------------------------------------------ # +# Constructor passes / skips connection string +# ------------------------------------------------------------------ # + + +class TestConstructorConnectionString: + """Verify AgentHost forwards the connection string to TracingHelper.""" + + def test_constructor_passes_connection_string(self) -> None: + with mock.patch( + "azure.ai.agentserver.core._tracing.TracingHelper.__init__", + return_value=None, + ) as mock_init: + AgentHost( + application_insights_connection_string="InstrumentationKey=ctor", + ) + mock_init.assert_called_once_with(connection_string="InstrumentationKey=ctor") + + +# ------------------------------------------------------------------ # +# Agent name / version resolution with new env vars +# ------------------------------------------------------------------ # + + +class TestAgentIdentityResolution: + """Tests for resolve_agent_name() and resolve_agent_version().""" + + def test_agent_name_from_env(self) -> None: + with mock.patch.dict(os.environ, {Constants.FOUNDRY_AGENT_NAME: "my-agent"}): + assert resolve_agent_name() == "my-agent" + + def test_agent_name_default_empty(self) -> None: + env = os.environ.copy() + env.pop(Constants.FOUNDRY_AGENT_NAME, None) + with mock.patch.dict(os.environ, env, clear=True): + assert resolve_agent_name() == "" + + def test_agent_version_from_env(self) -> None: + with mock.patch.dict(os.environ, {Constants.FOUNDRY_AGENT_VERSION: "2.0"}): + assert resolve_agent_version() == "2.0" + + def test_agent_version_default_empty(self) -> None: + env = os.environ.copy() + env.pop(Constants.FOUNDRY_AGENT_VERSION, None) + with mock.patch.dict(os.environ, env, clear=True): + assert resolve_agent_version() == "" diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/CHANGELOG.md b/sdk/agentserver/azure-ai-agentserver-invocations/CHANGELOG.md new file mode 100644 index 000000000000..1cb00d1154d0 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/CHANGELOG.md @@ -0,0 +1,15 @@ +# Release History + +## 1.0.0b1 (Unreleased) + +### Features Added + +- Initial release of `azure-ai-agentserver-invocations`. +- `InvocationHandler` for wiring invocation protocol endpoints to an `AgentHost`. +- Decorator-based handler registration (`@invocations.invoke_handler`). +- Optional `GET /invocations/{id}` and `POST /invocations/{id}/cancel` endpoints. +- `GET /invocations/docs/openapi.json` for OpenAPI spec serving. +- Invocation ID tracking and session correlation via `agent_session_id` query parameter. +- Distributed tracing with GenAI semantic convention span attributes. +- W3C Baggage propagation for cross-service correlation. +- Streaming response support with span lifecycle management. diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/LICENSE b/sdk/agentserver/azure-ai-agentserver-invocations/LICENSE new file mode 100644 index 000000000000..4c3581d3b052 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/LICENSE @@ -0,0 +1,21 @@ +Copyright (c) Microsoft Corporation. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/MANIFEST.in b/sdk/agentserver/azure-ai-agentserver-invocations/MANIFEST.in new file mode 100644 index 000000000000..cd83a6c13bfa --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/MANIFEST.in @@ -0,0 +1,8 @@ +include *.md +include LICENSE +recursive-include tests *.py +recursive-include samples *.py *.md +include azure/__init__.py +include azure/ai/__init__.py +include azure/ai/agentserver/__init__.py +include azure/ai/agentserver/invocations/py.typed diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/README.md b/sdk/agentserver/azure-ai-agentserver-invocations/README.md new file mode 100644 index 000000000000..0ab1bf64f5d6 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/README.md @@ -0,0 +1,221 @@ +# Azure AI AgentHost Invocations for Python + +The `azure-ai-agentserver-invocations` package provides the invocation protocol endpoints for Azure AI Hosted Agent containers. It plugs into the [`azure-ai-agentserver-core`](https://pypi.org/project/azure-ai-agentserver-core/) host framework and adds the full invocation lifecycle: `POST /invocations`, `GET /invocations/{id}`, `POST /invocations/{id}/cancel`, and `GET /invocations/docs/openapi.json`. + +## Getting started + +### Install the package + +```bash +pip install azure-ai-agentserver-invocations +``` + +This automatically installs `azure-ai-agentserver-core` as a dependency. + +### Prerequisites + +- Python 3.10 or later + +## Key concepts + +### InvocationHandler + +`InvocationHandler` is the composable protocol handler that mounts invocation endpoints onto an `AgentHost`. It provides decorator methods for registering handler functions: + +- `@invocations.invoke_handler` — **Required.** Handles `POST /invocations`. +- `@invocations.get_invocation_handler` — Optional. Handles `GET /invocations/{id}`. +- `@invocations.cancel_invocation_handler` — Optional. Handles `POST /invocations/{id}/cancel`. + +### Protocol endpoints + +| Method | Route | Required | Description | +|---|---|---|---| +| `POST` | `/invocations` | Yes | Execute the agent | +| `GET` | `/invocations/{invocation_id}` | No | Retrieve invocation status or result | +| `POST` | `/invocations/{invocation_id}/cancel` | No | Cancel a running invocation | +| `GET` | `/invocations/docs/openapi.json` | No | Serve the agent's OpenAPI 3.x spec | + +### Request and response headers + +The SDK automatically manages these headers on every invocation: + +| Header | Direction | Description | +|---|---|---| +| `x-agent-invocation-id` | Request & Response | Echoed if provided, otherwise a UUID is generated | +| `x-agent-session-id` | Response (POST only) | Resolved from `agent_session_id` query param, `FOUNDRY_AGENT_SESSION_ID` env var, or generated UUID | + +### Session ID resolution + +Session IDs group related invocations into a conversation. The SDK resolves the session ID in order: + +1. `agent_session_id` query parameter on `POST /invocations` +2. `FOUNDRY_AGENT_SESSION_ID` environment variable +3. Auto-generated UUID + +The resolved session ID is available in handler functions via `request.state.session_id`. + +### Handler access to SDK state + +Inside handler functions, the SDK sets these attributes on `request.state`: + +- `request.state.invocation_id` — The invocation ID (echoed or generated). +- `request.state.session_id` — The resolved session ID (POST /invocations only). + +### Distributed tracing + +When tracing is enabled on the `AgentHost`, invocation spans are automatically created with GenAI semantic conventions: + +- **Span name**: `invoke_agent {FOUNDRY_AGENT_NAME}:{FOUNDRY_AGENT_VERSION}` +- **Span attributes**: `gen_ai.system`, `gen_ai.operation.name`, `gen_ai.response.id`, `gen_ai.conversation.id`, `gen_ai.agent.id`, `gen_ai.agent.name`, `gen_ai.agent.version` +- **Error tags**: `azure.ai.agentserver.invocations.error.code`, `.error.message` +- **Baggage keys**: `azure.ai.agentserver.invocation_id`, `.session_id` + +## Examples + +### Simple synchronous agent + +```python +from azure.ai.agentserver.core import AgentHost +from azure.ai.agentserver.invocations import InvocationHandler +from starlette.requests import Request +from starlette.responses import JSONResponse, Response + +server = AgentHost() +invocations = InvocationHandler(server) + +@invocations.invoke_handler +async def handle(request: Request) -> Response: + data = await request.json() + return JSONResponse({"greeting": f"Hello, {data['name']}!"}) + +server.run() +``` + +### Long-running operations with polling + +```python +import asyncio +import json + +from azure.ai.agentserver.core import AgentHost +from azure.ai.agentserver.invocations import InvocationHandler +from starlette.requests import Request +from starlette.responses import JSONResponse, Response + +_tasks: dict[str, asyncio.Task] = {} +_results: dict[str, bytes] = {} + +server = AgentHost() +invocations = InvocationHandler(server) + +@invocations.invoke_handler +async def handle(request: Request) -> Response: + data = await request.json() + invocation_id = request.state.invocation_id + task = asyncio.create_task(do_work(invocation_id, data)) + _tasks[invocation_id] = task + return JSONResponse({"invocation_id": invocation_id, "status": "running"}) + +@invocations.get_invocation_handler +async def get_invocation(request: Request) -> Response: + invocation_id = request.state.invocation_id + if invocation_id in _results: + return Response(content=_results[invocation_id], media_type="application/json") + return JSONResponse({"invocation_id": invocation_id, "status": "running"}) + +@invocations.cancel_invocation_handler +async def cancel_invocation(request: Request) -> Response: + invocation_id = request.state.invocation_id + if invocation_id in _tasks: + _tasks[invocation_id].cancel() + del _tasks[invocation_id] + return JSONResponse({"invocation_id": invocation_id, "status": "cancelled"}) + return JSONResponse({"error": "not found"}, status_code=404) +``` + +### Streaming (Server-Sent Events) + +```python +import json + +from azure.ai.agentserver.core import AgentHost +from azure.ai.agentserver.invocations import InvocationHandler +from starlette.requests import Request +from starlette.responses import Response, StreamingResponse + +server = AgentHost() +invocations = InvocationHandler(server) + +@invocations.invoke_handler +async def handle(request: Request) -> Response: + async def generate(): + for word in ["Hello", " ", "world", "!"]: + yield json.dumps({"delta": word}).encode() + b"\n" + + return StreamingResponse(generate(), media_type="text/event-stream") +``` + +### Multi-turn conversation + +Use the `agent_session_id` query parameter to group invocations into a conversation: + +```bash +# First turn +curl -X POST "http://localhost:8088/invocations?agent_session_id=session-abc" \ + -H "Content-Type: application/json" \ + -d '{"message": "My name is Alice"}' + +# Second turn (same session) +curl -X POST "http://localhost:8088/invocations?agent_session_id=session-abc" \ + -H "Content-Type: application/json" \ + -d '{"message": "What is my name?"}' +``` + +The session ID is available in the handler via `request.state.session_id`. + +### Serving an OpenAPI spec + +Pass an OpenAPI spec dict to enable the discovery endpoint at `GET /invocations/docs/openapi.json`: + +```python +server = AgentHost() +invocations = InvocationHandler(server, openapi_spec={ + "openapi": "3.0.3", + "info": {"title": "My Agent", "version": "1.0.0"}, + "paths": { ... }, +}) +``` + +## Troubleshooting + +### Reporting issues + +To report an issue with the client library, or request additional features, please open a GitHub issue [here](https://github.com/Azure/azure-sdk-for-python/issues). + +## Next steps + +Visit the [Samples](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/agentserver/azure-ai-agentserver-invocations/samples) folder for complete working examples: + +| Sample | Description | +|---|---| +| [simple_invoke_agent](samples/simple_invoke_agent/) | Minimal synchronous request-response | +| [async_invoke_agent](samples/async_invoke_agent/) | Long-running operations with polling and cancellation | + +## Contributing + +This project welcomes contributions and suggestions. Most contributions require +you to agree to a Contributor License Agreement (CLA) declaring that you have +the right to, and actually do, grant us the rights to use your contribution. +For details, visit https://cla.microsoft.com. + +When you submit a pull request, a CLA-bot will automatically determine whether +you need to provide a CLA and decorate the PR appropriately (e.g., label, +comment). Simply follow the instructions provided by the bot. You will only +need to do this once across all repos using our CLA. + +This project has adopted the +[Microsoft Open Source Code of Conduct][code_of_conduct]. For more information, +see the Code of Conduct FAQ or contact opencode@microsoft.com with any +additional questions or comments. + +[code_of_conduct]: https://opensource.microsoft.com/codeofconduct/ diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/__init__.py b/sdk/agentserver/azure-ai-agentserver-invocations/azure/__init__.py similarity index 100% rename from sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/__init__.py rename to sdk/agentserver/azure-ai-agentserver-invocations/azure/__init__.py diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/__init__.py b/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/__init__.py similarity index 100% rename from sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/__init__.py rename to sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/__init__.py diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/__init__.py b/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/__init__.py new file mode 100644 index 000000000000..8db66d3d0f0f --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/__init__.py b/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/__init__.py new file mode 100644 index 000000000000..e8cdb4179622 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/__init__.py @@ -0,0 +1,30 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Invocations protocol for Azure AI Hosted Agents. + +This package provides the invocation protocol endpoints and handler +wiring for :class:`~azure.ai.agentserver.core.AgentHost`. + +Quick start:: + + from azure.ai.agentserver.core import AgentHost + from azure.ai.agentserver.invocations import InvocationHandler + from starlette.responses import JSONResponse + + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request): + return JSONResponse({"ok": True}) + + server.run() +""" +__path__ = __import__("pkgutil").extend_path(__path__, __name__) + +from ._invocation import InvocationHandler +from ._version import VERSION + +__all__ = ["InvocationHandler"] +__version__ = VERSION diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/_constants.py b/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/_constants.py new file mode 100644 index 000000000000..2dd8e9f91ce7 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/_constants.py @@ -0,0 +1,25 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + + +class InvocationConstants: + """Invocation protocol constants. + + Protocol-specific headers, env vars, and defaults for the invocation + endpoints. + """ + + # Request / response headers + INVOCATION_ID_HEADER = "x-agent-invocation-id" + SESSION_ID_HEADER = "x-agent-session-id" + + # Span attribute keys + ATTR_SPAN_INVOCATION_ID = "azure.ai.agentserver.invocations.invocation_id" + ATTR_SPAN_SESSION_ID = "azure.ai.agentserver.invocations.session_id" + ATTR_SPAN_ERROR_CODE = "azure.ai.agentserver.invocations.error.code" + ATTR_SPAN_ERROR_MESSAGE = "azure.ai.agentserver.invocations.error.message" + + # Baggage keys + ATTR_BAGGAGE_INVOCATION_ID = "azure.ai.agentserver.invocation_id" + ATTR_BAGGAGE_SESSION_ID = "azure.ai.agentserver.session_id" diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/_invocation.py b/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/_invocation.py new file mode 100644 index 000000000000..2c2a56554615 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/_invocation.py @@ -0,0 +1,467 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Invocation protocol handler for AgentHost. + +Provides the invocation protocol endpoints and handler decorators. +Registers routes with the ``AgentHost`` on construction. +""" +import contextlib +import inspect +import os +import re +import uuid +from collections.abc import Awaitable, Callable # pylint: disable=import-error +from typing import TYPE_CHECKING, Any, Optional + +from starlette.requests import Request +from starlette.responses import JSONResponse, Response, StreamingResponse +from starlette.routing import Route + +from azure.ai.agentserver.core import ( # pylint: disable=no-name-in-module + get_logger, + Constants, + create_error_response, +) + +if TYPE_CHECKING: + from azure.ai.agentserver.core import AgentHost, TracingHelper + +from ._constants import InvocationConstants + +logger = get_logger() + +# Maximum length and allowed characters for user-provided IDs (defense in depth). +_MAX_ID_LENGTH = 256 +_VALID_ID_RE = re.compile(r"^[a-zA-Z0-9\-_.:]+$") + + +def _sanitize_id(value: str, fallback: str) -> str: + """Validate a user-provided ID string. + + Returns *value* unchanged when it passes validation, otherwise returns + *fallback*. This prevents excessively long or malformed IDs from + propagating into headers, span attributes, and log messages. + + :param value: The raw ID from a header or query parameter. + :type value: str + :param fallback: A safe fallback value (typically a generated UUID). + :type fallback: str + :return: The validated ID or the fallback. + :rtype: str + """ + if not value or len(value) > _MAX_ID_LENGTH or not _VALID_ID_RE.match(value): + return fallback + return value + + +class InvocationHandler: + """Invocation protocol handler that plugs into an ``AgentHost``. + + Creates the invocation protocol endpoints and registers them with + the server. Use the decorator methods to wire handler functions + to the endpoints. + + This design supports multi-protocol composition — multiple protocol + handlers (e.g. ``InvocationHandler``, ``ResponseHandler``) can be + mounted onto the same ``AgentHost``. + + Usage:: + + from azure.ai.agentserver.core import AgentHost + from azure.ai.agentserver.invocations import InvocationHandler + + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request): + return JSONResponse({"ok": True}) + + server.run() + + :param server: The ``AgentHost`` to register invocation protocol + routes with. + :type server: AgentHost + :param openapi_spec: Optional OpenAPI spec dict. When provided, the spec + is served at ``GET /invocations/docs/openapi.json``. + :type openapi_spec: Optional[dict[str, Any]] + """ + + def __init__( + self, + server: "AgentHost", + *, + openapi_spec: Optional[dict[str, Any]] = None, + ) -> None: + self._tracing: Optional["TracingHelper"] = server.tracing + self._invoke_fn: Optional[Callable] = None + self._get_invocation_fn: Optional[Callable] = None + self._cancel_invocation_fn: Optional[Callable] = None + self._openapi_spec = openapi_spec + + # Build and cache routes once + self._routes: list[Route] = [ + Route( + "/invocations/docs/openapi.json", + self._get_openapi_spec_endpoint, + methods=["GET"], + name="get_openapi_spec", + ), + Route( + "/invocations", + self._create_invocation_endpoint, + methods=["POST"], + name="create_invocation", + ), + Route( + "/invocations/{invocation_id}", + self._get_invocation_endpoint, + methods=["GET"], + name="get_invocation", + ), + Route( + "/invocations/{invocation_id}/cancel", + self._cancel_invocation_endpoint, + methods=["POST"], + name="cancel_invocation", + ), + ] + + # Register routes with the server + server.register_routes(self._routes) + + # ------------------------------------------------------------------ + # Routes + # ------------------------------------------------------------------ + + @property + def routes(self) -> list[Route]: + """Starlette routes for the invocation protocol. + + :return: A list of Route objects for the invocation endpoints. + :rtype: list[Route] + """ + return self._routes + + # ------------------------------------------------------------------ + # Handler decorators + # ------------------------------------------------------------------ + + def invoke_handler( + self, fn: Callable[[Request], Awaitable[Response]] + ) -> Callable[[Request], Awaitable[Response]]: + """Register a function as the invoke handler. + + Usage:: + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + ... + + :param fn: Async function accepting a Starlette Request and returning a Response. + :type fn: Callable[[Request], Awaitable[Response]] + :return: The original function (unmodified). + :rtype: Callable[[Request], Awaitable[Response]] + :raises TypeError: If *fn* is not an async function. + """ + if not inspect.iscoroutinefunction(fn): + raise TypeError( + f"invoke_handler expects an async function, got {type(fn).__name__}. " + "Use 'async def' to define your handler." + ) + self._invoke_fn = fn + return fn + + def get_invocation_handler( + self, fn: Callable[[Request], Awaitable[Response]] + ) -> Callable[[Request], Awaitable[Response]]: + """Register a function as the get-invocation handler. + + :param fn: Async function accepting a Starlette Request and returning a Response. + :type fn: Callable[[Request], Awaitable[Response]] + :return: The original function (unmodified). + :rtype: Callable[[Request], Awaitable[Response]] + :raises TypeError: If *fn* is not an async function. + """ + if not inspect.iscoroutinefunction(fn): + raise TypeError( + f"get_invocation_handler expects an async function, got {type(fn).__name__}. " + "Use 'async def' to define your handler." + ) + self._get_invocation_fn = fn + return fn + + def cancel_invocation_handler( + self, fn: Callable[[Request], Awaitable[Response]] + ) -> Callable[[Request], Awaitable[Response]]: + """Register a function as the cancel-invocation handler. + + :param fn: Async function accepting a Starlette Request and returning a Response. + :type fn: Callable[[Request], Awaitable[Response]] + :return: The original function (unmodified). + :rtype: Callable[[Request], Awaitable[Response]] + :raises TypeError: If *fn* is not an async function. + """ + if not inspect.iscoroutinefunction(fn): + raise TypeError( + f"cancel_invocation_handler expects an async function, got {type(fn).__name__}. " + "Use 'async def' to define your handler." + ) + self._cancel_invocation_fn = fn + return fn + + # ------------------------------------------------------------------ + # Dispatch methods (internal) + # ------------------------------------------------------------------ + + async def _dispatch_invoke(self, request: Request) -> Response: + if self._invoke_fn is not None: + return await self._invoke_fn(request) + raise NotImplementedError( + "No invoke handler registered. Use the @invocations.invoke_handler decorator." + ) + + async def _dispatch_get_invocation(self, request: Request) -> Response: + if self._get_invocation_fn is not None: + return await self._get_invocation_fn(request) + return create_error_response("not_found", "get_invocation not implemented", status_code=404) + + async def _dispatch_cancel_invocation(self, request: Request) -> Response: + if self._cancel_invocation_fn is not None: + return await self._cancel_invocation_fn(request) + return create_error_response("not_found", "cancel_invocation not implemented", status_code=404) + + def get_openapi_spec(self) -> Optional[dict[str, Any]]: + """Return the stored OpenAPI spec, or None.""" + return self._openapi_spec + + # ------------------------------------------------------------------ + # Span attribute helper + # ------------------------------------------------------------------ + + @staticmethod + def _safe_set_attrs(span: Any, attrs: dict[str, str]) -> None: + if span is None: + return + try: + for key, value in attrs.items(): + span.set_attribute(key, value) + except Exception: # pylint: disable=broad-exception-caught + logger.debug("Failed to set span attributes: %s", list(attrs.keys()), exc_info=True) + + # ------------------------------------------------------------------ + # Streaming response helpers + # ------------------------------------------------------------------ + + def _wrap_streaming_response( + self, + response: StreamingResponse, + otel_span: Any, + baggage_token: Any, + span_token: Any, + ) -> StreamingResponse: + """Wrap a streaming response's body iterator with tracing and context cleanup. + + Two layers of wrapping are applied in order: + + 1. **Inner (tracing):** ``trace_stream`` wraps the body iterator so + the OTel span covers the full streaming duration and records any + errors that occur while yielding chunks. + 2. **Outer (context cleanup):** A second async generator detaches the + span context and W3C Baggage context *after* all chunks have been + sent (or an error occurs). This ordering ensures the span is + ended before the contexts are detached. + + :param response: The ``StreamingResponse`` returned by the user handler. + :param otel_span: The OTel span (or *None* when tracing is disabled). + :param baggage_token: Token from ``set_baggage`` (or *None*). + :param span_token: Token from ``set_current_span`` (or *None*). + :return: The same response object, with its body_iterator replaced. + """ + # When tracing is disabled there is nothing to wrap — skip the + # extra async-generator layer to avoid unnecessary overhead on + # every streaming chunk. + if self._tracing is None: + return response + + # Inner wrap: trace_stream ends the span when iteration completes. + response.body_iterator = self._tracing.trace_stream(response.body_iterator, otel_span) + + # Outer wrap: detach span context and baggage after all chunks are sent. + original_iterator = response.body_iterator + tracing = self._tracing # capture for the closure + + async def _cleanup_iter(): # type: ignore[return-value] + try: + async for chunk in original_iterator: + yield chunk + finally: + tracing.detach_context(span_token) + tracing.detach_baggage(baggage_token) + + response.body_iterator = _cleanup_iter() + return response + + # ------------------------------------------------------------------ + # Endpoint handlers + # ------------------------------------------------------------------ + + async def _get_openapi_spec_endpoint(self, request: Request) -> Response: # pylint: disable=unused-argument + spec = self.get_openapi_spec() + if spec is None: + return create_error_response("not_found", "No OpenAPI spec registered", status_code=404) + return JSONResponse(spec) + + async def _create_invocation_endpoint(self, request: Request) -> Response: + generated_id = str(uuid.uuid4()) + raw_invocation_id = request.headers.get(InvocationConstants.INVOCATION_ID_HEADER) or "" + invocation_id = _sanitize_id(raw_invocation_id, generated_id) + request.state.invocation_id = invocation_id + + # Session ID: query param overrides env var / generated UUID + raw_session_id = ( + request.query_params.get("agent_session_id") + or os.environ.get(Constants.FOUNDRY_AGENT_SESSION_ID) + or "" + ) + session_id = _sanitize_id(raw_session_id, str(uuid.uuid4())) + request.state.session_id = session_id + + baggage_token = None + span_token = None + response: Optional[Response] = None + streaming_wrapped = False + + try: + otel_span = None + if self._tracing is not None: + otel_span = self._tracing.start_request_span( + request.headers, + invocation_id, + span_operation="invoke_agent", + operation_name="invoke_agent", + session_id=session_id, + ) + # Make the span the current span in context so that + # child spans created by framework handlers are correctly + # parented under this span instead of appearing as siblings. + span_token = self._tracing.set_current_span(otel_span) + self._safe_set_attrs(otel_span, { + InvocationConstants.ATTR_SPAN_INVOCATION_ID: invocation_id, + InvocationConstants.ATTR_SPAN_SESSION_ID: session_id, + }) + baggage_token = self._tracing.set_baggage({ + InvocationConstants.ATTR_BAGGAGE_INVOCATION_ID: invocation_id, + InvocationConstants.ATTR_BAGGAGE_SESSION_ID: session_id, + }) + + try: + response = await self._dispatch_invoke(request) + response.headers[InvocationConstants.INVOCATION_ID_HEADER] = invocation_id + response.headers[InvocationConstants.SESSION_ID_HEADER] = session_id + except NotImplementedError as exc: + self._safe_set_attrs(otel_span, { + InvocationConstants.ATTR_SPAN_ERROR_CODE: "not_implemented", + InvocationConstants.ATTR_SPAN_ERROR_MESSAGE: str(exc), + }) + if self._tracing is not None: + self._tracing.end_span(otel_span, exc=exc) + logger.error("Invocation %s failed: %s", invocation_id, exc) + return create_error_response( + "not_implemented", + str(exc), + status_code=501, + headers={ + InvocationConstants.INVOCATION_ID_HEADER: invocation_id, + InvocationConstants.SESSION_ID_HEADER: session_id, + }, + ) + except Exception as exc: # pylint: disable=broad-exception-caught + self._safe_set_attrs(otel_span, { + InvocationConstants.ATTR_SPAN_ERROR_CODE: "internal_error", + InvocationConstants.ATTR_SPAN_ERROR_MESSAGE: str(exc), + }) + if self._tracing is not None: + self._tracing.end_span(otel_span, exc=exc) + logger.error("Error processing invocation %s: %s", invocation_id, exc, exc_info=True) + return create_error_response( + "internal_error", + "Internal server error", + status_code=500, + headers={ + InvocationConstants.INVOCATION_ID_HEADER: invocation_id, + InvocationConstants.SESSION_ID_HEADER: session_id, + }, + ) + + if isinstance(response, StreamingResponse): + wrapped = self._wrap_streaming_response(response, otel_span, baggage_token, span_token) + streaming_wrapped = True + return wrapped + + # Non-streaming: end the span immediately. + if self._tracing is not None: + self._tracing.end_span(otel_span) + + return response + finally: + # For non-streaming responses (or error paths that returned + # before reaching _wrap_streaming_response), detach context + # and baggage immediately. Streaming responses handle this in + # _wrap_streaming_response's cleanup iterator instead. + if not streaming_wrapped: + if self._tracing is not None: + self._tracing.detach_context(span_token) + self._tracing.detach_baggage(baggage_token) + + async def _traced_invocation_endpoint( + self, + request: Request, + span_operation: str, + dispatch: Callable[[Request], Awaitable[Response]], + ) -> Response: + invocation_id = request.path_params["invocation_id"] + request.state.invocation_id = invocation_id + + span_cm: Any = contextlib.nullcontext(None) + if self._tracing is not None: + span_cm = self._tracing.request_span( + request.headers, invocation_id, span_operation, + session_id=request.query_params.get("agent_session_id", ""), + ) + with span_cm as _otel_span: + self._safe_set_attrs(_otel_span, { + InvocationConstants.ATTR_SPAN_INVOCATION_ID: invocation_id, + InvocationConstants.ATTR_SPAN_SESSION_ID: request.query_params.get("agent_session_id", ""), + }) + try: + response = await dispatch(request) + response.headers[InvocationConstants.INVOCATION_ID_HEADER] = invocation_id + return response + except Exception as exc: # pylint: disable=broad-exception-caught + self._safe_set_attrs(_otel_span, { + InvocationConstants.ATTR_SPAN_ERROR_CODE: "internal_error", + InvocationConstants.ATTR_SPAN_ERROR_MESSAGE: str(exc), + }) + # The exception is caught here (not re-raised), so OTel's + # start_as_current_span won't see it. Record it explicitly. + if self._tracing is not None: + self._tracing.record_error(_otel_span, exc) + logger.error("Error in %s %s: %s", span_operation, invocation_id, exc, exc_info=True) + return create_error_response( + "internal_error", + "Internal server error", + status_code=500, + headers={InvocationConstants.INVOCATION_ID_HEADER: invocation_id}, + ) + + async def _get_invocation_endpoint(self, request: Request) -> Response: + return await self._traced_invocation_endpoint( + request, "get_invocation", self._dispatch_get_invocation + ) + + async def _cancel_invocation_endpoint(self, request: Request) -> Response: + return await self._traced_invocation_endpoint( + request, "cancel_invocation", self._dispatch_cancel_invocation + ) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/__init__.py b/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/_version.py similarity index 73% rename from sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/__init__.py rename to sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/_version.py index fdf8caba9ef5..67d209a8cafd 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/__init__.py +++ b/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/_version.py @@ -2,4 +2,4 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -__path__ = __import__("pkgutil").extend_path(__path__, __name__) +VERSION = "1.0.0b1" diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/py.typed b/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/py.typed new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/cspell.json b/sdk/agentserver/azure-ai-agentserver-invocations/cspell.json new file mode 100644 index 000000000000..5858cd8e195b --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/cspell.json @@ -0,0 +1,26 @@ +{ + "ignoreWords": [ + "agentserver", + "appinsights", + "ASGI", + "autouse", + "caplog", + "genai", + "hypercorn", + "invocations", + "openapi", + "paramtype", + "pytestmark", + "rtype", + "starlette", + "traceparent", + "tracestate", + "tracecontext" + ], + "ignorePaths": [ + "*.csv", + "*.json", + "*.rst", + "samples/**" + ] +} diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/dev_requirements.txt b/sdk/agentserver/azure-ai-agentserver-invocations/dev_requirements.txt new file mode 100644 index 000000000000..e7af80133df7 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/dev_requirements.txt @@ -0,0 +1,7 @@ +-e ../../../eng/tools/azure-sdk-tools +../azure-ai-agentserver-core +pytest +httpx +pytest-asyncio +opentelemetry-api>=1.20.0 +opentelemetry-sdk>=1.20.0 diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/pyproject.toml b/sdk/agentserver/azure-ai-agentserver-invocations/pyproject.toml new file mode 100644 index 000000000000..2427a5757164 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/pyproject.toml @@ -0,0 +1,68 @@ +[project] +name = "azure-ai-agentserver-invocations" +dynamic = ["version", "readme"] +description = "Invocations protocol for Azure AI Hosted Agents" +requires-python = ">=3.10" +authors = [ + { name = "Microsoft Corporation", email = "azpysdkhelp@microsoft.com" }, +] +license = "MIT" +classifiers = [ + "Development Status :: 4 - Beta", + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", +] +keywords = ["azure", "azure sdk", "agent", "agentserver", "invocations"] + +dependencies = [ + "azure-ai-agentserver-core>=2.0.0b1", +] + +[build-system] +requires = ["setuptools>=69", "wheel"] +build-backend = "setuptools.build_meta" + +[project.urls] +repository = "https://github.com/Azure/azure-sdk-for-python" + +[tool.setuptools.packages.find] +exclude = [ + "tests*", + "samples*", + "doc*", + "azure", + "azure.ai", + "azure.ai.agentserver", +] + +[tool.setuptools.dynamic] +version = { attr = "azure.ai.agentserver.invocations._version.VERSION" } +readme = { file = ["README.md"], content-type = "text/markdown" } + +[tool.setuptools.package-data] +"azure.ai.agentserver.invocations" = ["py.typed"] + +[tool.ruff] +line-length = 120 +target-version = "py310" +lint.select = ["E", "F", "B", "I"] +lint.ignore = [] +fix = false + +[tool.ruff.lint.isort] +known-first-party = ["azure.ai.agentserver.invocations"] +combine-as-imports = true + +[tool.azure-sdk-build] +breaking = false +mypy = true +pyright = true +verifytypes = true +pylint = true +type_check_samples = false diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/pyrightconfig.json b/sdk/agentserver/azure-ai-agentserver-invocations/pyrightconfig.json new file mode 100644 index 000000000000..f36c5a7fe0d3 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/pyrightconfig.json @@ -0,0 +1,11 @@ +{ + "reportOptionalMemberAccess": "warning", + "reportArgumentType": "warning", + "reportAttributeAccessIssue": "warning", + "reportMissingImports": "warning", + "reportGeneralTypeIssues": "warning", + "reportReturnType": "warning", + "exclude": [ + "**/samples/**" + ] +} diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/samples/async_invoke_agent/async_invoke_agent.py b/sdk/agentserver/azure-ai-agentserver-invocations/samples/async_invoke_agent/async_invoke_agent.py new file mode 100644 index 000000000000..40d0a4be8d7f --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/samples/async_invoke_agent/async_invoke_agent.py @@ -0,0 +1,170 @@ +"""Async invoke agent example. + +Demonstrates get_invocation and cancel_invocation for long-running work. +Invocations run in background tasks; callers poll or cancel by ID. + +.. warning:: + + **In-memory demo only.** This sample stores all invocation state + (``self._tasks``, ``self._results``) in process memory. Both in-flight + ``asyncio.Task`` objects and completed results are lost on process restart + — which *will* happen during platform rolling updates, health-check + failures, and scaling events. + + For production long-running invocations: + + * Persist results to durable storage (Redis, Cosmos DB, etc.) inside + ``_do_work`` **before** the method returns. + * On startup, rehydrate any incomplete work or mark it as failed. + * Consider an external task queue (Celery, Azure Queue, etc.) instead + of ``asyncio.create_task`` for work that must survive restarts. + +Usage:: + + # Start the agent + python async_invoke_agent.py + + # Start a long-running invocation + curl -X POST http://localhost:8088/invocations -H "Content-Type: application/json" -d '{"query": "analyze dataset"}' + # -> x-agent-invocation-id: abc-123 + # -> {"invocation_id": "abc-123", "status": "running"} + + # Poll for result + curl http://localhost:8088/invocations/abc-123 + # -> {"invocation_id": "abc-123", "status": "running"} (still working) + # -> {"invocation_id": "abc-123", "status": "completed"} (done) + + # Or cancel + curl -X POST http://localhost:8088/invocations/abc-123/cancel + # -> {"invocation_id": "abc-123", "status": "cancelled"} +""" +import asyncio +import json + +from starlette.requests import Request +from starlette.responses import JSONResponse, Response + +from azure.ai.agentserver.core import AgentHost +from azure.ai.agentserver.invocations import InvocationHandler + + +# In-memory state for demo purposes (see module docstring for production caveats) +_tasks: dict[str, asyncio.Task] = {} +_results: dict[str, bytes] = {} + +server = AgentHost() +invocations = InvocationHandler(server) + + +async def _do_work(invocation_id: str, data: dict) -> bytes: + """Simulate long-running work. + + :param invocation_id: The invocation ID for this task. + :type invocation_id: str + :param data: The parsed request data. + :type data: dict + :return: JSON result bytes. + :rtype: bytes + """ + await asyncio.sleep(10) + result = json.dumps({ + "invocation_id": invocation_id, + "status": "completed", + "output": f"Processed: {data}", + }).encode() + _results[invocation_id] = result + return result + + +@invocations.invoke_handler +async def handle_invoke(request: Request) -> Response: + """Start a long-running invocation in a background task. + + :param request: The raw Starlette request. + :type request: starlette.requests.Request + :return: JSON status indicating the task is running. + :rtype: starlette.responses.JSONResponse + """ + data = await request.json() + invocation_id = request.state.invocation_id + + task = asyncio.create_task(_do_work(invocation_id, data)) + _tasks[invocation_id] = task + + return JSONResponse({ + "invocation_id": invocation_id, + "status": "running", + }) + + +@invocations.get_invocation_handler +async def handle_get_invocation(request: Request) -> Response: + """Retrieve a previous invocation result. + + :param request: The raw Starlette request. + :type request: starlette.requests.Request + :return: JSON status or result. + :rtype: starlette.responses.JSONResponse + """ + invocation_id = request.state.invocation_id + + if invocation_id in _results: + return Response(content=_results[invocation_id], media_type="application/json") + + if invocation_id in _tasks: + task = _tasks[invocation_id] + if not task.done(): + return JSONResponse({ + "invocation_id": invocation_id, + "status": "running", + }) + result = task.result() + _results[invocation_id] = result + del _tasks[invocation_id] + return Response(content=result, media_type="application/json") + + return JSONResponse({"error": "not found"}, status_code=404) + + +@invocations.cancel_invocation_handler +async def handle_cancel_invocation(request: Request) -> Response: + """Cancel a running invocation. + + :param request: The raw Starlette request. + :type request: starlette.requests.Request + :return: JSON cancellation status. + :rtype: starlette.responses.JSONResponse + """ + invocation_id = request.state.invocation_id + + # Already completed — cannot cancel + if invocation_id in _results: + return JSONResponse({ + "invocation_id": invocation_id, + "status": "completed", + "error": "invocation already completed", + }) + + if invocation_id in _tasks: + task = _tasks[invocation_id] + if task.done(): + # Task finished between check — treat as completed + _results[invocation_id] = task.result() + del _tasks[invocation_id] + return JSONResponse({ + "invocation_id": invocation_id, + "status": "completed", + "error": "invocation already completed", + }) + task.cancel() + del _tasks[invocation_id] + return JSONResponse({ + "invocation_id": invocation_id, + "status": "cancelled", + }) + + return JSONResponse({"error": "not found"}, status_code=404) + + +if __name__ == "__main__": + server.run() diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/samples/async_invoke_agent/requirements.txt b/sdk/agentserver/azure-ai-agentserver-invocations/samples/async_invoke_agent/requirements.txt new file mode 100644 index 000000000000..bc5cf4644e14 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/samples/async_invoke_agent/requirements.txt @@ -0,0 +1 @@ +azure-ai-agentserver-invocations diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/samples/multiturn_invoke_agent/multiturn_invoke_agent.py b/sdk/agentserver/azure-ai-agentserver-invocations/samples/multiturn_invoke_agent/multiturn_invoke_agent.py new file mode 100644 index 000000000000..fde138d4c3c8 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/samples/multiturn_invoke_agent/multiturn_invoke_agent.py @@ -0,0 +1,104 @@ +"""Multi-turn session invoke agent example. + +Demonstrates session-based conversations where context accumulates +across multiple invocations via the ``agent_session_id`` query parameter. + +.. warning:: + + **In-memory demo only.** Session history is stored in process memory + and is lost on restart. For production use, persist history to + durable storage (Redis, Cosmos DB, etc.). + +Usage:: + + # Start the agent + python multiturn_invoke_agent.py + + # Turn 1 — start planning + curl -X POST "http://localhost:8088/invocations?agent_session_id=trip-001" \ + -H "Content-Type: application/json" \ + -d '{"message": "I want to plan a vacation"}' + # -> {"reply": "Welcome! Where would you like to go, and for how long?", ...} + + # Turn 2 — provide details + curl -X POST "http://localhost:8088/invocations?agent_session_id=trip-001" \ + -H "Content-Type: application/json" \ + -d '{"message": "Japan for 2 weeks, interested in culture and food"}' + # -> {"reply": "Great choice! What is your budget ...?", ...} + + # Turn 3 — add constraints + curl -X POST "http://localhost:8088/invocations?agent_session_id=trip-001" \ + -H "Content-Type: application/json" \ + -d '{"message": "Budget is $5000, prefer direct flights"}' + # -> {"reply": "Here is a suggested itinerary ...", ...} +""" +from starlette.requests import Request +from starlette.responses import JSONResponse, Response + +from azure.ai.agentserver.core import AgentHost +from azure.ai.agentserver.invocations import InvocationHandler + + +server = AgentHost() +invocations = InvocationHandler(server) + +# In-memory session store — keyed by session ID. +_sessions: dict[str, list[dict[str, str]]] = {} + + +def _build_reply(history: list[dict[str, str]]) -> str: + """Generate a contextual reply based on conversation history. + + In production this would call a language model with the full history. + + :param history: List of message dicts with ``role`` and ``content`` keys. + :type history: list[dict[str, str]] + :return: The assistant reply text. + :rtype: str + """ + turn = len([m for m in history if m["role"] == "user"]) + if turn == 1: + return "Welcome! Where would you like to go, and for how long?" + if turn == 2: + return ( + "Great choice! Could you share your budget range " + "and any travel preferences (direct flights, accommodation type)?" + ) + return ( + f"Thanks for all the details! Based on our {turn}-turn conversation, " + "here is a suggested itinerary. Let me know if you'd like to adjust anything." + ) + + +@invocations.invoke_handler +async def handle_invoke(request: Request) -> Response: + """Process a conversational turn, accumulating session context. + + The session ID comes from the ``agent_session_id`` query parameter + (set automatically on ``request.state.session_id`` by the framework). + + :param request: The raw Starlette request. + :type request: starlette.requests.Request + :return: JSON reply with session metadata. + :rtype: starlette.responses.JSONResponse + """ + data = await request.json() + session_id = request.state.session_id + user_message = data.get("message", "") + + # Retrieve or create session history + history = _sessions.setdefault(session_id, []) + history.append({"role": "user", "content": user_message}) + + reply = _build_reply(history) + history.append({"role": "assistant", "content": reply}) + + return JSONResponse({ + "reply": reply, + "session_id": session_id, + "turn": len([m for m in history if m["role"] == "user"]), + }) + + +if __name__ == "__main__": + server.run() diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/samples/multiturn_invoke_agent/requirements.txt b/sdk/agentserver/azure-ai-agentserver-invocations/samples/multiturn_invoke_agent/requirements.txt new file mode 100644 index 000000000000..bc5cf4644e14 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/samples/multiturn_invoke_agent/requirements.txt @@ -0,0 +1 @@ +azure-ai-agentserver-invocations diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/samples/simple_invoke_agent/requirements.txt b/sdk/agentserver/azure-ai-agentserver-invocations/samples/simple_invoke_agent/requirements.txt new file mode 100644 index 000000000000..bc5cf4644e14 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/samples/simple_invoke_agent/requirements.txt @@ -0,0 +1 @@ +azure-ai-agentserver-invocations diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/samples/simple_invoke_agent/simple_invoke_agent.py b/sdk/agentserver/azure-ai-agentserver-invocations/samples/simple_invoke_agent/simple_invoke_agent.py new file mode 100644 index 000000000000..212585120132 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/samples/simple_invoke_agent/simple_invoke_agent.py @@ -0,0 +1,40 @@ +"""Simple invoke agent example. + +Accepts JSON requests, echoes back with a greeting. + +Usage:: + + # Start the agent + python simple_invoke_agent.py + + # Send a greeting request + curl -X POST http://localhost:8088/invocations -H "Content-Type: application/json" -d '{"name": "Alice"}' + # -> {"greeting": "Hello, Alice!"} +""" +from starlette.requests import Request +from starlette.responses import JSONResponse, Response + +from azure.ai.agentserver.core import AgentHost +from azure.ai.agentserver.invocations import InvocationHandler + + +server = AgentHost() +invocations = InvocationHandler(server) + + +@invocations.invoke_handler +async def handle_invoke(request: Request) -> Response: + """Process the invocation by echoing a greeting. + + :param request: The raw Starlette request. + :type request: starlette.requests.Request + :return: JSON greeting response. + :rtype: starlette.responses.JSONResponse + """ + data = await request.json() + greeting = f"Hello, {data['name']}!" + return JSONResponse({"greeting": greeting}) + + +if __name__ == "__main__": + server.run() diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_invoke_agent/requirements.txt b/sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_invoke_agent/requirements.txt new file mode 100644 index 000000000000..bc5cf4644e14 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_invoke_agent/requirements.txt @@ -0,0 +1 @@ +azure-ai-agentserver-invocations diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_invoke_agent/streaming_invoke_agent.py b/sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_invoke_agent/streaming_invoke_agent.py new file mode 100644 index 000000000000..16a86121fef9 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_invoke_agent/streaming_invoke_agent.py @@ -0,0 +1,87 @@ +"""Streaming invoke agent example (SSE). + +Demonstrates returning results incrementally via Server-Sent Events. +Callers receive real-time partial output as tokens are generated. + +Usage:: + + # Start the agent + python streaming_invoke_agent.py + + # Send a streaming request + curl -N -X POST http://localhost:8088/invocations \ + -H "Content-Type: application/json" \ + -d '{"prompt": "Write a Calculator class with an Add method"}' + # -> data: {"token": "class"} + # -> data: {"token": " Calculator"} + # -> ... + # -> event: done + # -> data: {"invocation_id": "..."} +""" +import asyncio +import json +from collections.abc import AsyncGenerator # pylint: disable=import-error + +from starlette.requests import Request +from starlette.responses import Response, StreamingResponse + +from azure.ai.agentserver.core import AgentHost +from azure.ai.agentserver.invocations import InvocationHandler + + +server = AgentHost() +invocations = InvocationHandler(server) + +# Simulated tokens — in production these would come from a model. +_SIMULATED_TOKENS = [ + "class", " Calculator", ":", "\n", + " ", "def", " add", "(", "self", ",", " a", ",", " b", ")", ":", "\n", + " ", "return", " a", " +", " b", "\n", +] + + +async def _generate_tokens( + invocation_id: str, prompt: str # pylint: disable=unused-argument +) -> AsyncGenerator[bytes, None]: + """Yield SSE-formatted token events with simulated latency. + + Each token is sent as a ``data:`` line per the SSE specification. + A final ``event: done`` signals stream completion. + + :param invocation_id: The invocation ID for this request. + :type invocation_id: str + :param prompt: The user prompt (unused in this demo). + :type prompt: str + """ + for token in _SIMULATED_TOKENS: + payload = json.dumps({"token": token}) + yield f"data: {payload}\n\n".encode() + await asyncio.sleep(0.15) # simulate model latency + + # Signal completion + done_payload = json.dumps({"invocation_id": invocation_id}) + yield f"event: done\ndata: {done_payload}\n\n".encode() + + +@invocations.invoke_handler +async def handle_invoke(request: Request) -> Response: + """Stream code-generation tokens back to the caller via SSE. + + :param request: The raw Starlette request. + :type request: starlette.requests.Request + :return: Streaming SSE response. + :rtype: starlette.responses.StreamingResponse + """ + data = await request.json() + invocation_id = request.state.invocation_id + prompt = data.get("prompt", "") + + return StreamingResponse( + _generate_tokens(invocation_id, prompt), + media_type="text/event-stream", + headers={"Cache-Control": "no-cache", "Connection": "keep-alive"}, + ) + + +if __name__ == "__main__": + server.run() diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/tests/conftest.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/conftest.py new file mode 100644 index 000000000000..603bbcb45cda --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/conftest.py @@ -0,0 +1,208 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Shared fixtures and factory functions for invocations tests.""" +import json +from typing import Any + +import pytest +from httpx import ASGITransport, AsyncClient +from starlette.requests import Request +from starlette.responses import JSONResponse, Response, StreamingResponse + +from azure.ai.agentserver.core import AgentHost +from azure.ai.agentserver.invocations import InvocationHandler + +# --------------------------------------------------------------------------- +# Sample OpenAPI spec used by several tests +# --------------------------------------------------------------------------- + +SAMPLE_OPENAPI_SPEC: dict[str, Any] = { + "openapi": "3.0.0", + "info": {"title": "Echo Agent", "version": "1.0.0"}, + "paths": { + "/invocations": { + "post": { + "requestBody": { + "required": True, + "content": { + "application/json": { + "schema": { + "type": "object", + "required": ["message"], + "properties": { + "message": {"type": "string"}, + }, + } + } + }, + }, + "responses": { + "200": { + "description": "OK", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "reply": {"type": "string"}, + }, + } + } + }, + } + }, + } + } + }, +} + + +# --------------------------------------------------------------------------- +# Factory functions +# --------------------------------------------------------------------------- + + +def _make_echo_agent(**kwargs: Any) -> AgentHost: + """Create an AgentHost whose invoke handler echoes the request body.""" + server = AgentHost(**kwargs) + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + body = await request.body() + return Response(content=body, media_type="application/octet-stream") + + return server + + +def _make_streaming_agent(**kwargs: Any) -> AgentHost: + """Create an AgentHost whose invoke handler returns 3 JSON chunks.""" + server = AgentHost(**kwargs) + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> StreamingResponse: + async def generate(): + for i in range(3): + yield json.dumps({"chunk": i}) + "\n" + + return StreamingResponse(generate(), media_type="application/x-ndjson") + + return server + + +def _make_async_storage_agent(**kwargs: Any) -> AgentHost: + """Create an AgentHost with get/cancel handlers and in-memory store.""" + server = AgentHost(**kwargs) + invocations = InvocationHandler(server) + store: dict[str, Any] = {} + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + inv_id = request.state.invocation_id + body = await request.body() + store[inv_id] = body + return Response(content=body, media_type="application/octet-stream") + + @invocations.get_invocation_handler + async def get_handler(request: Request) -> Response: + inv_id = request.path_params["invocation_id"] + if inv_id not in store: + return JSONResponse( + {"error": {"code": "not_found", "message": "Not found"}}, + status_code=404, + ) + return Response(content=store[inv_id], media_type="application/octet-stream") + + @invocations.cancel_invocation_handler + async def cancel_handler(request: Request) -> Response: + inv_id = request.path_params["invocation_id"] + if inv_id not in store: + return JSONResponse( + {"error": {"code": "not_found", "message": "Not found"}}, + status_code=404, + ) + del store[inv_id] + return JSONResponse({"status": "cancelled"}) + + return server + + +def _make_validated_agent() -> AgentHost: + """Create an AgentHost with OpenAPI spec.""" + server = AgentHost() + invocations = InvocationHandler( + server, + openapi_spec=SAMPLE_OPENAPI_SPEC, + ) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + data = await request.json() + return JSONResponse({"reply": f"echo: {data['message']}"}) + + return server + + +def _make_failing_agent(**kwargs: Any) -> AgentHost: + """Create an AgentHost whose handler raises ValueError.""" + server = AgentHost(**kwargs) + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + raise ValueError("something went wrong") + + return server + + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + + +@pytest.fixture() +def echo_client(): + server = _make_echo_agent() + transport = ASGITransport(app=server.app) + return AsyncClient(transport=transport, base_url="http://testserver") + + +@pytest.fixture() +def streaming_client(): + server = _make_streaming_agent() + transport = ASGITransport(app=server.app) + return AsyncClient(transport=transport, base_url="http://testserver") + + +@pytest.fixture() +def async_storage_server(): + return _make_async_storage_agent() + + +@pytest.fixture() +def async_storage_client(async_storage_server): + transport = ASGITransport(app=async_storage_server.app) + return AsyncClient(transport=transport, base_url="http://testserver") + + +@pytest.fixture() +def validated_client(): + server = _make_validated_agent() + transport = ASGITransport(app=server.app) + return AsyncClient(transport=transport, base_url="http://testserver") + + +@pytest.fixture() +def no_spec_client(): + server = _make_echo_agent() + transport = ASGITransport(app=server.app) + return AsyncClient(transport=transport, base_url="http://testserver") + + +@pytest.fixture() +def failing_client(): + server = _make_failing_agent() + transport = ASGITransport(app=server.app) + return AsyncClient(transport=transport, base_url="http://testserver") diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_decorator_pattern.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_decorator_pattern.py new file mode 100644 index 000000000000..e8ff084d5358 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_decorator_pattern.py @@ -0,0 +1,252 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Tests for decorator-based handler registration on AgentHost + InvocationHandler.""" +import pytest +from httpx import ASGITransport, AsyncClient +from starlette.requests import Request +from starlette.responses import JSONResponse, Response + +from azure.ai.agentserver.core import AgentHost +from azure.ai.agentserver.invocations import InvocationHandler + + +# --------------------------------------------------------------------------- +# invoke_handler stores function +# --------------------------------------------------------------------------- + +def test_invoke_handler_stores_function(): + """@invocations.invoke_handler stores the function on the protocol object.""" + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + return Response(content=b"ok") + + assert invocations._invoke_fn is handle + + +# --------------------------------------------------------------------------- +# invoke_handler returns original function +# --------------------------------------------------------------------------- + +def test_invoke_handler_returns_original_function(): + """@invocations.invoke_handler returns the original function.""" + server = AgentHost() + invocations = InvocationHandler(server) + + async def handle(request: Request) -> Response: + return Response(content=b"ok") + + result = invocations.invoke_handler(handle) + assert result is handle + + +# --------------------------------------------------------------------------- +# get_invocation_handler stores function +# --------------------------------------------------------------------------- + +def test_get_invocation_handler_stores_function(): + """@invocations.get_invocation_handler stores the function.""" + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.get_invocation_handler + async def get_handler(request: Request) -> Response: + return Response(content=b"ok") + + assert invocations._get_invocation_fn is get_handler + + +# --------------------------------------------------------------------------- +# cancel_invocation_handler stores function +# --------------------------------------------------------------------------- + +def test_cancel_invocation_handler_stores_function(): + """@invocations.cancel_invocation_handler stores the function.""" + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.cancel_invocation_handler + async def cancel_handler(request: Request) -> Response: + return Response(content=b"ok") + + assert invocations._cancel_invocation_fn is cancel_handler + + +# --------------------------------------------------------------------------- +# shutdown_handler stores function +# --------------------------------------------------------------------------- + +def test_shutdown_handler_stores_function(): + """@server.shutdown_handler stores the function on the server.""" + server = AgentHost() + + @server.shutdown_handler + async def on_shutdown(): + pass + + assert server._shutdown_fn is on_shutdown + + +# --------------------------------------------------------------------------- +# Full request flow +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_full_request_flow(): + """Full lifecycle: invoke → get → cancel → get (404).""" + server = AgentHost() + invocations = InvocationHandler(server) + store: dict[str, bytes] = {} + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + body = await request.body() + store[request.state.invocation_id] = body + return Response(content=body, media_type="application/octet-stream") + + @invocations.get_invocation_handler + async def get_handler(request: Request) -> Response: + inv_id = request.path_params["invocation_id"] + if inv_id in store: + return Response(content=store[inv_id]) + return JSONResponse({"error": {"code": "not_found", "message": "Not found"}}, status_code=404) + + @invocations.cancel_invocation_handler + async def cancel_handler(request: Request) -> Response: + inv_id = request.path_params["invocation_id"] + if inv_id in store: + del store[inv_id] + return JSONResponse({"status": "cancelled"}) + return JSONResponse({"error": {"code": "not_found", "message": "Not found"}}, status_code=404) + + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + # Invoke + resp = await client.post("/invocations", content=b"lifecycle-test") + assert resp.status_code == 200 + inv_id = resp.headers["x-agent-invocation-id"] + + # Get + resp = await client.get(f"/invocations/{inv_id}") + assert resp.status_code == 200 + assert resp.content == b"lifecycle-test" + + # Cancel + resp = await client.post(f"/invocations/{inv_id}/cancel") + assert resp.status_code == 200 + + # Get after cancel + resp = await client.get(f"/invocations/{inv_id}") + assert resp.status_code == 404 + + +# --------------------------------------------------------------------------- +# Missing optional handlers return 404 +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_missing_invoke_handler_returns_501(): + """POST /invocations without registered handler returns 501.""" + server = AgentHost() + invocations = InvocationHandler(server) + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post("/invocations", content=b"test") + assert resp.status_code == 501 + + +@pytest.mark.asyncio +async def test_missing_get_handler_returns_404(): + """GET /invocations/{id} without registered handler returns 404.""" + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + return Response(content=b"ok") + + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.get("/invocations/some-id") + assert resp.status_code == 404 + + +@pytest.mark.asyncio +async def test_missing_cancel_handler_returns_404(): + """POST /invocations/{id}/cancel without registered handler returns 404.""" + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + return Response(content=b"ok") + + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post("/invocations/some-id/cancel") + assert resp.status_code == 404 + + +# --------------------------------------------------------------------------- +# Optional handler defaults and overrides +# --------------------------------------------------------------------------- + +def test_optional_handlers_default_none(): + """Get and cancel handlers default to None.""" + server = AgentHost() + invocations = InvocationHandler(server) + assert invocations._get_invocation_fn is None + assert invocations._cancel_invocation_fn is None + + +def test_optional_handler_override(): + """Setting an optional handler replaces None.""" + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.get_invocation_handler + async def get_handler(request: Request) -> Response: + return Response(content=b"ok") + + assert invocations._get_invocation_fn is not None + + +# --------------------------------------------------------------------------- +# Shutdown handler called during lifespan +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_shutdown_handler_called_during_lifespan(): + """Shutdown handler is called when the app lifespan ends.""" + called = [] + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + return Response(content=b"ok") + + @server.shutdown_handler + async def on_shutdown(): + called.append(True) + + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post("/invocations", content=b"test") + assert resp.status_code == 200 + # The lifespan exit runs when the ASGI app scope ends + # With ASGITransport, the lifespan is managed by the transport + # The shutdown handler should be called on transport cleanup + + +# --------------------------------------------------------------------------- +# Config passthrough +# --------------------------------------------------------------------------- + +def test_graceful_shutdown_timeout_passthrough(): + """graceful_shutdown_timeout is passed through to the base class.""" + server = AgentHost(graceful_shutdown_timeout=15) + assert server._graceful_shutdown_timeout == 15 diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_edge_cases.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_edge_cases.py new file mode 100644 index 000000000000..4054f3c4c71c --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_edge_cases.py @@ -0,0 +1,310 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Edge-case tests for AgentHost + InvocationHandler.""" +import asyncio +import uuid + +import pytest +from httpx import ASGITransport, AsyncClient +from starlette.requests import Request +from starlette.responses import Response, StreamingResponse + +from azure.ai.agentserver.core import AgentHost +from azure.ai.agentserver.invocations import InvocationHandler +from conftest import SAMPLE_OPENAPI_SPEC + + +# --------------------------------------------------------------------------- +# Factory helpers for edge cases +# --------------------------------------------------------------------------- + + +def _make_custom_header_agent() -> AgentHost: + """Agent whose handler sets its own x-agent-invocation-id (should be overwritten).""" + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + resp = Response(content=b"ok") + resp.headers["x-agent-invocation-id"] = "custom-id-from-handler" + return resp + + return server + + +def _make_empty_streaming_agent() -> AgentHost: + """Agent that returns an empty streaming response.""" + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> StreamingResponse: + async def generate(): + return + yield # noqa: E501 — make it a generator + + return StreamingResponse(generate(), media_type="text/plain") + + return server + + +def _make_large_payload_agent() -> AgentHost: + """Agent that echoes large payloads.""" + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + body = await request.body() + return Response(content=body, media_type="application/octet-stream") + + return server + + +# --------------------------------------------------------------------------- +# Method not allowed tests +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_get_invocations_returns_405(): + """GET /invocations returns 405 Method Not Allowed.""" + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + return Response(content=b"ok") + + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.get("/invocations") + assert resp.status_code == 405 + + +@pytest.mark.asyncio +async def test_put_invocations_returns_405(): + """PUT /invocations returns 405 Method Not Allowed.""" + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + return Response(content=b"ok") + + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.put("/invocations", content=b"test") + assert resp.status_code == 405 + + +@pytest.mark.asyncio +async def test_delete_invocation_returns_405(): + """DELETE /invocations/{id} returns 405 Method Not Allowed.""" + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + return Response(content=b"ok") + + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.delete("/invocations/some-id") + assert resp.status_code == 405 + + +@pytest.mark.asyncio +async def test_post_openapi_json_returns_405(): + """POST /invocations/docs/openapi.json returns 405.""" + server = AgentHost() + invocations = InvocationHandler(server, openapi_spec=SAMPLE_OPENAPI_SPEC) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + return Response(content=b"ok") + + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post("/invocations/docs/openapi.json", content=b"{}") + assert resp.status_code == 405 + + +# --------------------------------------------------------------------------- +# Response header tests +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_custom_invocation_id_overwritten(): + """Handler-set x-agent-invocation-id is overwritten by the server.""" + server = _make_custom_header_agent() + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post("/invocations", content=b"test") + # Server overwrites handler's value with a generated UUID + inv_id = resp.headers["x-agent-invocation-id"] + assert inv_id != "custom-id-from-handler" + uuid.UUID(inv_id) # Should be a valid UUID + + +@pytest.mark.asyncio +async def test_invocation_id_auto_injected(echo_client): + """Invocation ID is auto-injected when not provided.""" + resp = await echo_client.post("/invocations", content=b"test") + assert "x-agent-invocation-id" in resp.headers + + +@pytest.mark.asyncio +async def test_invocation_id_accepted_from_request(echo_client): + """Server accepts invocation ID from request header.""" + custom_id = str(uuid.uuid4()) + resp = await echo_client.post( + "/invocations", + content=b"test", + headers={"x-agent-invocation-id": custom_id}, + ) + assert resp.headers["x-agent-invocation-id"] == custom_id + + +@pytest.mark.asyncio +async def test_invocation_id_generated_when_empty(echo_client): + """When empty invocation ID is sent, server generates one.""" + resp = await echo_client.post( + "/invocations", + content=b"test", + headers={"x-agent-invocation-id": ""}, + ) + inv_id = resp.headers["x-agent-invocation-id"] + uuid.UUID(inv_id) # Should be a valid UUID + + +# --------------------------------------------------------------------------- +# Payload edge cases +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_large_payload(): + """Large payload (1MB) is handled correctly.""" + server = _make_large_payload_agent() + transport = ASGITransport(app=server.app) + payload = b"x" * (1024 * 1024) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post("/invocations", content=payload) + assert resp.status_code == 200 + assert len(resp.content) == 1024 * 1024 + + +@pytest.mark.asyncio +async def test_unicode_payload(echo_client): + """Unicode payload is preserved.""" + text = "Hello, 世界! 🌍" + resp = await echo_client.post("/invocations", content=text.encode("utf-8")) + assert resp.status_code == 200 + assert resp.content.decode("utf-8") == text + + +@pytest.mark.asyncio +async def test_binary_payload(echo_client): + """Binary payload with non-UTF-8 bytes is handled.""" + binary = bytes(range(256)) + resp = await echo_client.post("/invocations", content=binary) + assert resp.status_code == 200 + assert resp.content == binary + + +# --------------------------------------------------------------------------- +# Streaming edge cases +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_empty_streaming(): + """Empty streaming response doesn't crash.""" + server = _make_empty_streaming_agent() + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post("/invocations", content=b"test") + assert resp.status_code == 200 + assert resp.content == b"" + + +@pytest.mark.asyncio +async def test_streaming_has_invocation_id(): + """Streaming response has x-agent-invocation-id header.""" + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> StreamingResponse: + async def generate(): + yield b"chunk1" + + return StreamingResponse(generate(), media_type="text/plain") + + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post("/invocations", content=b"test") + assert "x-agent-invocation-id" in resp.headers + + +# --------------------------------------------------------------------------- +# Invocation lifecycle +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_multiple_gets(async_storage_client): + """Multiple GETs for the same invocation return the same result.""" + resp = await async_storage_client.post("/invocations", content=b"multi-get") + inv_id = resp.headers["x-agent-invocation-id"] + + for _ in range(3): + get_resp = await async_storage_client.get(f"/invocations/{inv_id}") + assert get_resp.status_code == 200 + assert get_resp.content == b"multi-get" + + +@pytest.mark.asyncio +async def test_double_cancel(async_storage_client): + """Cancelling twice: second cancel returns 404.""" + resp = await async_storage_client.post("/invocations", content=b"cancel-twice") + inv_id = resp.headers["x-agent-invocation-id"] + + cancel1 = await async_storage_client.post(f"/invocations/{inv_id}/cancel") + assert cancel1.status_code == 200 + + cancel2 = await async_storage_client.post(f"/invocations/{inv_id}/cancel") + assert cancel2.status_code == 404 + + +@pytest.mark.asyncio +async def test_invoke_cancel_get(async_storage_client): + """Invoke → cancel → get returns 404.""" + resp = await async_storage_client.post("/invocations", content=b"icg") + inv_id = resp.headers["x-agent-invocation-id"] + + await async_storage_client.post(f"/invocations/{inv_id}/cancel") + get_resp = await async_storage_client.get(f"/invocations/{inv_id}") + assert get_resp.status_code == 404 + + +# --------------------------------------------------------------------------- +# Concurrency +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_concurrent_invocations_get_unique_ids(): + """10 concurrent POSTs each get unique invocation IDs.""" + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + return Response(content=b"ok") + + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + tasks = [client.post("/invocations", content=b"test") for _ in range(10)] + responses = await asyncio.gather(*tasks) + + ids = {r.headers["x-agent-invocation-id"] for r in responses} + assert len(ids) == 10 diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_get_cancel.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_get_cancel.py new file mode 100644 index 000000000000..07e92f1cc0ac --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_get_cancel.py @@ -0,0 +1,129 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Tests for GET /invocations/{id} and POST /invocations/{id}/cancel.""" +import pytest +from httpx import ASGITransport, AsyncClient +from starlette.requests import Request +from starlette.responses import JSONResponse, Response + +from azure.ai.agentserver.core import AgentHost +from azure.ai.agentserver.invocations import InvocationHandler + + +# --------------------------------------------------------------------------- +# GET after invoke +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_get_after_invoke_returns_stored_result(async_storage_client): + """GET /invocations/{id} after invoke returns the stored result.""" + resp = await async_storage_client.post("/invocations", content=b"stored-data") + assert resp.status_code == 200 + inv_id = resp.headers["x-agent-invocation-id"] + + get_resp = await async_storage_client.get(f"/invocations/{inv_id}") + assert get_resp.status_code == 200 + assert get_resp.content == b"stored-data" + + +# --------------------------------------------------------------------------- +# GET unknown ID +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_get_unknown_id_returns_404(async_storage_client): + """GET /invocations/{unknown} returns 404.""" + resp = await async_storage_client.get("/invocations/unknown-id-12345") + assert resp.status_code == 404 + + +# --------------------------------------------------------------------------- +# Cancel after invoke +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_cancel_after_invoke_returns_cancelled(async_storage_client): + """POST /invocations/{id}/cancel after invoke returns cancelled status.""" + resp = await async_storage_client.post("/invocations", content=b"cancel-me") + inv_id = resp.headers["x-agent-invocation-id"] + + cancel_resp = await async_storage_client.post(f"/invocations/{inv_id}/cancel") + assert cancel_resp.status_code == 200 + assert cancel_resp.json()["status"] == "cancelled" + + +# --------------------------------------------------------------------------- +# Cancel unknown ID +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_cancel_unknown_id_returns_404(async_storage_client): + """POST /invocations/{unknown}/cancel returns 404.""" + resp = await async_storage_client.post("/invocations/unknown-id-12345/cancel") + assert resp.status_code == 404 + + +# --------------------------------------------------------------------------- +# GET after cancel +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_get_after_cancel_returns_404(async_storage_client): + """GET after cancel returns 404 (data has been removed).""" + resp = await async_storage_client.post("/invocations", content=b"temp") + inv_id = resp.headers["x-agent-invocation-id"] + await async_storage_client.post(f"/invocations/{inv_id}/cancel") + + get_resp = await async_storage_client.get(f"/invocations/{inv_id}") + assert get_resp.status_code == 404 + + +# --------------------------------------------------------------------------- +# GET error returns 500 (inline AgentHost) +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_get_invocation_error_returns_500(): + """GET handler raising an exception returns 500.""" + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + return Response(content=b"ok") + + @invocations.get_invocation_handler + async def get_handler(request: Request) -> Response: + raise RuntimeError("get failed") + + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.get("/invocations/some-id") + assert resp.status_code == 500 + assert resp.json()["error"]["code"] == "internal_error" + + +# --------------------------------------------------------------------------- +# Cancel error returns 500 (inline AgentHost) +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_cancel_invocation_error_returns_500(): + """Cancel handler raising an exception returns 500.""" + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + return Response(content=b"ok") + + @invocations.cancel_invocation_handler + async def cancel_handler(request: Request) -> Response: + raise RuntimeError("cancel failed") + + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post("/invocations/some-id/cancel") + assert resp.status_code == 500 + assert resp.json()["error"]["code"] == "internal_error" diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_graceful_shutdown.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_graceful_shutdown.py new file mode 100644 index 000000000000..6e6e3296a745 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_graceful_shutdown.py @@ -0,0 +1,229 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Tests for graceful shutdown with AgentHost.""" +import asyncio +import logging + +import pytest +from httpx import ASGITransport, AsyncClient +from starlette.requests import Request +from starlette.responses import Response + +from azure.ai.agentserver.core import AgentHost +from azure.ai.agentserver.invocations import InvocationHandler + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + +def _make_server_with_shutdown(**kwargs) -> tuple[AgentHost, list]: + """Create AgentHost with a tracked shutdown handler.""" + server = AgentHost(**kwargs) + invocations = InvocationHandler(server) + calls: list[str] = [] + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + return Response(content=b"ok") + + @server.shutdown_handler + async def on_shutdown(): + calls.append("shutdown") + + return server, calls + + +# --------------------------------------------------------------------------- +# Shutdown handler registration +# --------------------------------------------------------------------------- + +def test_shutdown_handler_registered(): + """Shutdown handler is stored on the server.""" + server, _ = _make_server_with_shutdown() + assert server._shutdown_fn is not None + + +def test_shutdown_handler_not_registered(): + """Without @shutdown_handler, _shutdown_fn is None.""" + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + return Response(content=b"ok") + + assert server._shutdown_fn is None + + +# --------------------------------------------------------------------------- +# ASGI lifespan helper +# --------------------------------------------------------------------------- + +async def _drive_lifespan(app): + """Drive a full ASGI lifespan startup+shutdown cycle.""" + scope = {"type": "lifespan"} + startup_done = asyncio.Event() + shutdown_done = asyncio.Event() + + async def receive(): + if not startup_done.is_set(): + startup_done.set() + return {"type": "lifespan.startup"} + await asyncio.sleep(0) + return {"type": "lifespan.shutdown"} + + async def send(message): + if message["type"] == "lifespan.shutdown.complete": + shutdown_done.set() + + await app(scope, receive, send) + return shutdown_done.is_set() + + +# --------------------------------------------------------------------------- +# Shutdown handler called during lifespan +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_shutdown_handler_called_on_lifespan_exit(): + """Shutdown handler runs when the ASGI lifespan exits.""" + server, calls = _make_server_with_shutdown() + + # Drive the lifespan via raw ASGI protocol + completed = await _drive_lifespan(server.app) + assert completed + assert "shutdown" in calls + + +# --------------------------------------------------------------------------- +# Shutdown handler timeout +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_shutdown_handler_timeout(caplog): + """Shutdown handler that exceeds timeout is warned about.""" + server = AgentHost(graceful_shutdown_timeout=1) + invocations = InvocationHandler(server) + calls: list[str] = [] + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + return Response(content=b"ok") + + @server.shutdown_handler + async def on_shutdown(): + await asyncio.sleep(10) + calls.append("completed") + + with caplog.at_level(logging.WARNING, logger="azure.ai.agentserver"): + await _drive_lifespan(server.app) + + # Shutdown should have been interrupted + assert "completed" not in calls + # Logger should have warned about timeout + assert any("did not complete" in r.message.lower() or "timeout" in r.message.lower() for r in caplog.records) + + +# --------------------------------------------------------------------------- +# Shutdown handler exception +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_shutdown_handler_exception(caplog): + """Shutdown handler that raises is caught and logged.""" + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + return Response(content=b"ok") + + @server.shutdown_handler + async def on_shutdown(): + raise RuntimeError("shutdown exploded") + + with caplog.at_level(logging.ERROR, logger="azure.ai.agentserver"): + await _drive_lifespan(server.app) + + # Should have logged the exception + assert any("on_shutdown" in r.message.lower() or "error" in r.message.lower() for r in caplog.records) + + +# --------------------------------------------------------------------------- +# Graceful shutdown timeout config +# --------------------------------------------------------------------------- + +def test_default_graceful_shutdown_timeout(): + """Default graceful shutdown timeout is 30 seconds.""" + server = AgentHost() + assert server._graceful_shutdown_timeout == 30 + + +def test_custom_graceful_shutdown_timeout(): + """Custom graceful_shutdown_timeout is stored.""" + server = AgentHost(graceful_shutdown_timeout=60) + assert server._graceful_shutdown_timeout == 60 + + +def test_zero_graceful_shutdown_timeout(): + """Zero timeout disables the drain period.""" + server = AgentHost(graceful_shutdown_timeout=0) + assert server._graceful_shutdown_timeout == 0 + + +# --------------------------------------------------------------------------- +# Health endpoint accessible during normal operation +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_health_endpoint_during_operation(): + """GET /readiness returns 200 during normal operation.""" + server, _ = _make_server_with_shutdown() + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.get("/readiness") + assert resp.status_code == 200 + assert resp.json() == {"status": "healthy"} + + +# --------------------------------------------------------------------------- +# No shutdown handler is no-op +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_no_shutdown_handler_is_noop(): + """Without a shutdown handler, lifespan exit succeeds silently.""" + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + return Response(content=b"ok") + + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post("/invocations", content=b"test") + assert resp.status_code == 200 + # No exception means success + + +# --------------------------------------------------------------------------- +# Multiple requests before shutdown +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_multiple_requests_before_shutdown(): + """Multiple requests can be served, then shutdown handler runs.""" + server, calls = _make_server_with_shutdown() + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + for i in range(5): + resp = await client.post("/invocations", content=f"request-{i}".encode()) + assert resp.status_code == 200 + + # Drive the lifespan to trigger shutdown + completed = await _drive_lifespan(server.app) + assert completed + assert "shutdown" in calls diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_invoke.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_invoke.py new file mode 100644 index 000000000000..5de15efd63cc --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_invoke.py @@ -0,0 +1,132 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Tests for the POST /invocations invoke dispatch.""" +import json +import uuid + +import pytest + + +# --------------------------------------------------------------------------- +# Echo body +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_invoke_echo_body(echo_client): + """POST /invocations echoes the request body.""" + resp = await echo_client.post("/invocations", content=b"hello world") + assert resp.status_code == 200 + assert resp.content == b"hello world" + + +# --------------------------------------------------------------------------- +# Headers +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_invoke_returns_invocation_id_header(echo_client): + """Response includes x-agent-invocation-id header.""" + resp = await echo_client.post("/invocations", content=b"test") + assert "x-agent-invocation-id" in resp.headers + # Should be a valid UUID + uuid.UUID(resp.headers["x-agent-invocation-id"]) + + +@pytest.mark.asyncio +async def test_invoke_returns_session_id_header(echo_client): + """Response includes x-agent-session-id header on POST /invocations.""" + resp = await echo_client.post("/invocations", content=b"test") + assert "x-agent-session-id" in resp.headers + # Should be a valid UUID (auto-generated) + uuid.UUID(resp.headers["x-agent-session-id"]) + + +@pytest.mark.asyncio +async def test_invoke_unique_invocation_ids(echo_client): + """Each invoke gets a unique invocation ID.""" + ids = set() + for _ in range(5): + resp = await echo_client.post("/invocations", content=b"test") + ids.add(resp.headers["x-agent-invocation-id"]) + assert len(ids) == 5 + + +@pytest.mark.asyncio +async def test_invoke_accepts_custom_invocation_id(echo_client): + """If the request sends x-agent-invocation-id, the server echoes it.""" + custom_id = str(uuid.uuid4()) + resp = await echo_client.post( + "/invocations", + content=b"test", + headers={"x-agent-invocation-id": custom_id}, + ) + assert resp.headers["x-agent-invocation-id"] == custom_id + + +# --------------------------------------------------------------------------- +# Streaming +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_streaming_returns_chunks(streaming_client): + """Streaming handler returns 3 JSON chunks.""" + resp = await streaming_client.post("/invocations", content=b"") + assert resp.status_code == 200 + lines = resp.text.strip().split("\n") + assert len(lines) == 3 + for i, line in enumerate(lines): + assert json.loads(line) == {"chunk": i} + + +@pytest.mark.asyncio +async def test_streaming_has_invocation_id_header(streaming_client): + """Streaming response includes invocation ID header.""" + resp = await streaming_client.post("/invocations", content=b"") + assert "x-agent-invocation-id" in resp.headers + uuid.UUID(resp.headers["x-agent-invocation-id"]) + + +# --------------------------------------------------------------------------- +# Empty body +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_invoke_empty_body(echo_client): + """Empty body doesn't crash the server.""" + resp = await echo_client.post("/invocations", content=b"") + assert resp.status_code == 200 + assert resp.content == b"" + + +# --------------------------------------------------------------------------- +# Error handling +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_invoke_error_returns_500(failing_client): + """Handler exception returns 500 with generic message.""" + resp = await failing_client.post("/invocations", content=b"test") + assert resp.status_code == 500 + body = resp.json() + assert body["error"]["code"] == "internal_error" + assert body["error"]["message"] == "Internal server error" + + +@pytest.mark.asyncio +async def test_invoke_error_has_invocation_id(failing_client): + """Error response still includes invocation ID header.""" + resp = await failing_client.post("/invocations", content=b"test") + assert "x-agent-invocation-id" in resp.headers + + +# --------------------------------------------------------------------------- +# Error handling +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_error_hides_details_by_default(failing_client): + """Exception message is hidden in error responses.""" + resp = await failing_client.post("/invocations", content=b"") + body = resp.json() + assert "something went wrong" not in body["error"]["message"] diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_multimodal_protocol.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_multimodal_protocol.py new file mode 100644 index 000000000000..f69cf5ca8de0 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_multimodal_protocol.py @@ -0,0 +1,279 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Tests for multi-modality payloads with AgentHost + InvocationHandler.""" +import json + +import pytest +from httpx import ASGITransport, AsyncClient +from starlette.requests import Request +from starlette.responses import JSONResponse, Response, StreamingResponse + +from azure.ai.agentserver.core import AgentHost +from azure.ai.agentserver.invocations import InvocationHandler + + +# --------------------------------------------------------------------------- +# Helper: content-type echo agent +# --------------------------------------------------------------------------- + +def _make_content_type_echo_agent() -> AgentHost: + """Agent that echoes body and returns the content-type it received.""" + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + body = await request.body() + ct = request.headers.get("content-type", "unknown") + return Response( + content=body, + media_type=ct, + headers={"x-received-content-type": ct}, + ) + + return server + + +def _make_status_code_agent() -> AgentHost: + """Agent that returns a custom HTTP status code from query param.""" + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + status = int(request.query_params.get("status", "200")) + body = await request.body() + return Response(content=body, status_code=status) + + return server + + +def _make_sse_agent() -> AgentHost: + """Agent that returns SSE-formatted streaming response.""" + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> StreamingResponse: + async def generate(): + for i in range(3): + yield f"data: {json.dumps({'event': i})}\n\n" + + return StreamingResponse(generate(), media_type="text/event-stream") + + return server + + +# --------------------------------------------------------------------------- +# Various content types +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_png_content_type(): + """PNG content type is accepted and echoed.""" + server = _make_content_type_echo_agent() + transport = ASGITransport(app=server.app) + fake_png = b"\x89PNG\r\n\x1a\n" + b"\x00" * 100 + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post( + "/invocations", + content=fake_png, + headers={"content-type": "image/png"}, + ) + assert resp.status_code == 200 + assert resp.headers["x-received-content-type"] == "image/png" + assert resp.content == fake_png + + +@pytest.mark.asyncio +async def test_jpeg_content_type(): + """JPEG content type is accepted.""" + server = _make_content_type_echo_agent() + transport = ASGITransport(app=server.app) + fake_jpeg = b"\xff\xd8\xff\xe0" + b"\x00" * 100 + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post( + "/invocations", + content=fake_jpeg, + headers={"content-type": "image/jpeg"}, + ) + assert resp.status_code == 200 + assert resp.headers["x-received-content-type"] == "image/jpeg" + + +@pytest.mark.asyncio +async def test_wav_content_type(): + """WAV audio content type is accepted.""" + server = _make_content_type_echo_agent() + transport = ASGITransport(app=server.app) + fake_wav = b"RIFF" + b"\x00" * 100 + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post( + "/invocations", + content=fake_wav, + headers={"content-type": "audio/wav"}, + ) + assert resp.status_code == 200 + assert resp.headers["x-received-content-type"] == "audio/wav" + + +@pytest.mark.asyncio +async def test_pdf_content_type(): + """PDF content type is accepted.""" + server = _make_content_type_echo_agent() + transport = ASGITransport(app=server.app) + fake_pdf = b"%PDF-1.4" + b"\x00" * 100 + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post( + "/invocations", + content=fake_pdf, + headers={"content-type": "application/pdf"}, + ) + assert resp.status_code == 200 + assert resp.headers["x-received-content-type"] == "application/pdf" + + +@pytest.mark.asyncio +async def test_octet_stream_content_type(): + """application/octet-stream is accepted.""" + server = _make_content_type_echo_agent() + transport = ASGITransport(app=server.app) + binary = bytes(range(256)) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post( + "/invocations", + content=binary, + headers={"content-type": "application/octet-stream"}, + ) + assert resp.status_code == 200 + assert resp.content == binary + + +@pytest.mark.asyncio +async def test_text_plain_content_type(): + """text/plain content type is accepted.""" + server = _make_content_type_echo_agent() + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post( + "/invocations", + content=b"Hello, world!", + headers={"content-type": "text/plain"}, + ) + assert resp.status_code == 200 + assert resp.content == b"Hello, world!" + + +# --------------------------------------------------------------------------- +# Custom HTTP status codes +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_custom_status_200(): + """Handler returning 200.""" + server = _make_status_code_agent() + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post("/invocations?status=200", content=b"ok") + assert resp.status_code == 200 + + +@pytest.mark.asyncio +async def test_custom_status_201(): + """Handler returning 201.""" + server = _make_status_code_agent() + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post("/invocations?status=201", content=b"created") + assert resp.status_code == 201 + + +@pytest.mark.asyncio +async def test_custom_status_202(): + """Handler returning 202.""" + server = _make_status_code_agent() + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post("/invocations?status=202", content=b"accepted") + assert resp.status_code == 202 + + +# --------------------------------------------------------------------------- +# Query strings +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_query_string_passed_to_handler(): + """Query string params are accessible in the handler.""" + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + name = request.query_params.get("name", "unknown") + return JSONResponse({"name": name}) + + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post("/invocations?name=Alice", content=b"") + assert resp.status_code == 200 + assert resp.json()["name"] == "Alice" + + +# --------------------------------------------------------------------------- +# SSE streaming +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_sse_streaming(): + """SSE-formatted streaming response works.""" + server = _make_sse_agent() + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post("/invocations", content=b"") + assert resp.status_code == 200 + assert "text/event-stream" in resp.headers.get("content-type", "") + lines = [line for line in resp.text.split("\n") if line.startswith("data:")] + assert len(lines) == 3 + + +# --------------------------------------------------------------------------- +# Large binary payloads +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_large_binary_payload(): + """Large binary payload (512KB) is handled correctly.""" + server = _make_content_type_echo_agent() + transport = ASGITransport(app=server.app) + payload = bytes(range(256)) * 2048 # 512KB + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post( + "/invocations", + content=payload, + headers={"content-type": "application/octet-stream"}, + ) + assert resp.status_code == 200 + assert len(resp.content) == len(payload) + + +# --------------------------------------------------------------------------- +# Health endpoint (updated from /healthy to /readiness) +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_health_endpoint_returns_200(): + """GET /readiness returns 200 with healthy status.""" + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + return Response(content=b"ok") + + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.get("/readiness") + assert resp.status_code == 200 + assert resp.json() == {"status": "healthy"} diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_request_limits.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_request_limits.py new file mode 100644 index 000000000000..1625cdb84e07 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_request_limits.py @@ -0,0 +1,45 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Tests for request processing (timeout feature removed per spec alignment).""" +import pytest +from httpx import ASGITransport, AsyncClient +from starlette.requests import Request +from starlette.responses import Response + +from azure.ai.agentserver.core import AgentHost +from azure.ai.agentserver.invocations import InvocationHandler + + +# --------------------------------------------------------------------------- +# AgentHost no longer accepts request_timeout +# --------------------------------------------------------------------------- + +def test_no_request_timeout_parameter(): + """AgentHost no longer accepts request_timeout.""" + with pytest.raises(TypeError): + AgentHost(request_timeout=10) + + +# --------------------------------------------------------------------------- +# Slow invoke completes without timeout +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_slow_invoke_completes(): + """Without timeout, handler runs to completion.""" + import asyncio + + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + await asyncio.sleep(0.1) + return Response(content=b"done") + + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post("/invocations", content=b"test") + assert resp.status_code == 200 + assert resp.content == b"done" diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_server_routes.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_server_routes.py new file mode 100644 index 000000000000..405735f10164 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_server_routes.py @@ -0,0 +1,103 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Tests for basic server route registration with AgentHost + InvocationHandler.""" +import uuid + +import pytest +from httpx import ASGITransport, AsyncClient +from starlette.requests import Request +from starlette.responses import Response + +from azure.ai.agentserver.core import AgentHost +from azure.ai.agentserver.invocations import InvocationHandler +from conftest import SAMPLE_OPENAPI_SPEC + + +# --------------------------------------------------------------------------- +# POST /invocations returns 200 +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_post_invocations_returns_200(echo_client): + """POST /invocations returns 200 OK.""" + resp = await echo_client.post("/invocations", content=b"test") + assert resp.status_code == 200 + + +# --------------------------------------------------------------------------- +# POST /invocations returns invocation-id header (UUID) +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_post_invocations_returns_uuid_invocation_id(echo_client): + """POST /invocations returns a valid UUID in x-agent-invocation-id.""" + resp = await echo_client.post("/invocations", content=b"test") + inv_id = resp.headers["x-agent-invocation-id"] + parsed = uuid.UUID(inv_id) + assert str(parsed) == inv_id + + +# --------------------------------------------------------------------------- +# GET openapi spec returns 404 when not set +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_get_openapi_spec_returns_404_when_not_set(no_spec_client): + """GET /invocations/docs/openapi.json returns 404 when no spec registered.""" + resp = await no_spec_client.get("/invocations/docs/openapi.json") + assert resp.status_code == 404 + + +# --------------------------------------------------------------------------- +# GET openapi spec returns spec when registered +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_get_openapi_spec_returns_spec_when_registered(): + """GET /invocations/docs/openapi.json returns the spec when registered.""" + server = AgentHost() + invocations = InvocationHandler(server, openapi_spec=SAMPLE_OPENAPI_SPEC) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + return Response(content=b"ok") + + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.get("/invocations/docs/openapi.json") + assert resp.status_code == 200 + assert resp.json() == SAMPLE_OPENAPI_SPEC + + +# --------------------------------------------------------------------------- +# GET /invocations/{id} returns 404 default +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_get_invocation_returns_404_default(echo_client): + """GET /invocations/{id} returns 404 when no get handler registered.""" + resp = await echo_client.get("/invocations/some-id") + assert resp.status_code == 404 + + +# --------------------------------------------------------------------------- +# POST /invocations/{id}/cancel returns 404 default +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_cancel_invocation_returns_404_default(echo_client): + """POST /invocations/{id}/cancel returns 404 when no cancel handler.""" + resp = await echo_client.post("/invocations/some-id/cancel") + assert resp.status_code == 404 + + +# --------------------------------------------------------------------------- +# Unknown route returns 404 +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_unknown_route_returns_404(echo_client): + """Unknown route returns 404.""" + resp = await echo_client.get("/nonexistent") + assert resp.status_code == 404 diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_session_id.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_session_id.py new file mode 100644 index 000000000000..23609ef1ecc9 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_session_id.py @@ -0,0 +1,112 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Tests for session ID resolution and x-agent-session-id header.""" +import os +import uuid +from unittest.mock import patch + +import pytest +from httpx import ASGITransport, AsyncClient +from starlette.requests import Request +from starlette.responses import Response + +from azure.ai.agentserver.core import AgentHost +from azure.ai.agentserver.invocations import InvocationHandler +from azure.ai.agentserver.invocations._constants import InvocationConstants + + +# --------------------------------------------------------------------------- +# Constants +# --------------------------------------------------------------------------- + +def test_session_id_header_constant(): + """SESSION_ID_HEADER constant is correct.""" + assert InvocationConstants.SESSION_ID_HEADER == "x-agent-session-id" + + +# --------------------------------------------------------------------------- +# POST /invocations response has x-agent-session-id header +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_post_invocations_has_session_id_header(echo_client): + """POST /invocations response includes x-agent-session-id header.""" + resp = await echo_client.post("/invocations", content=b"test") + assert "x-agent-session-id" in resp.headers + # Auto-generated should be a valid UUID + uuid.UUID(resp.headers["x-agent-session-id"]) + + +# --------------------------------------------------------------------------- +# POST /invocations with query param uses that value +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_post_invocations_with_query_param(): + """POST /invocations with agent_session_id query param uses that value.""" + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + return Response(content=b"ok") + + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post( + "/invocations?agent_session_id=my-custom-session", + content=b"test", + ) + assert resp.headers["x-agent-session-id"] == "my-custom-session" + + +# --------------------------------------------------------------------------- +# POST /invocations with env var +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_post_invocations_uses_env_var(): + """POST /invocations uses FOUNDRY_AGENT_SESSION_ID env var when no query param.""" + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + return Response(content=b"ok") + + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + with patch.dict(os.environ, {"FOUNDRY_AGENT_SESSION_ID": "env-session"}): + resp = await client.post("/invocations", content=b"test") + assert resp.headers["x-agent-session-id"] == "env-session" + + +# --------------------------------------------------------------------------- +# GET /invocations/{id} does NOT have x-agent-session-id header +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_get_invocation_no_session_id_header(async_storage_client): + """GET /invocations/{id} does NOT include x-agent-session-id.""" + resp = await async_storage_client.post("/invocations", content=b"data") + inv_id = resp.headers["x-agent-invocation-id"] + + get_resp = await async_storage_client.get(f"/invocations/{inv_id}") + assert get_resp.status_code == 200 + assert "x-agent-session-id" not in get_resp.headers + + +# --------------------------------------------------------------------------- +# POST /invocations/{id}/cancel does NOT have x-agent-session-id header +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_cancel_invocation_no_session_id_header(async_storage_client): + """POST /invocations/{id}/cancel does NOT include x-agent-session-id.""" + resp = await async_storage_client.post("/invocations", content=b"data") + inv_id = resp.headers["x-agent-invocation-id"] + + cancel_resp = await async_storage_client.post(f"/invocations/{inv_id}/cancel") + assert cancel_resp.status_code == 200 + assert "x-agent-session-id" not in cancel_resp.headers diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_span_parenting.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_span_parenting.py new file mode 100644 index 000000000000..d874db2b741f --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_span_parenting.py @@ -0,0 +1,129 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Tests that the invoke_agent span is set as the current span in context, +so that child spans created by framework handlers are correctly parented.""" +import os +from unittest.mock import patch + +import pytest +from httpx import ASGITransport, AsyncClient +from starlette.requests import Request +from starlette.responses import Response, StreamingResponse + +from azure.ai.agentserver.core import AgentHost +from azure.ai.agentserver.invocations import InvocationHandler + +try: + from opentelemetry import trace + from opentelemetry.sdk.trace import TracerProvider as SdkTracerProvider + from opentelemetry.sdk.trace.export import SimpleSpanProcessor + from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter + + _HAS_OTEL = True +except ImportError: + _HAS_OTEL = False + +if _HAS_OTEL: + _EXPORTER = InMemorySpanExporter() + _PROVIDER = SdkTracerProvider() + _PROVIDER.add_span_processor(SimpleSpanProcessor(_EXPORTER)) + trace.set_tracer_provider(_PROVIDER) +else: + _EXPORTER = None + +pytestmark = pytest.mark.skipif(not _HAS_OTEL, reason="opentelemetry not installed") + + +@pytest.fixture(autouse=True) +def _clear(): + if _EXPORTER: + _EXPORTER.clear() + + +def _get_spans(): + return list(_EXPORTER.get_finished_spans()) if _EXPORTER else [] + + +def _make_server_with_child_span(): + """Server whose handler creates a child span (simulating a framework).""" + with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): + server = AgentHost() + invocations = InvocationHandler(server) + child_tracer = trace.get_tracer("test.framework") + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + # Simulate a framework creating its own invoke_agent span + with child_tracer.start_as_current_span("framework_invoke_agent") as _span: + return Response(content=b"ok") + + return server + + +def _make_streaming_server_with_child_span(): + """Server with streaming response whose handler creates a child span.""" + with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): + server = AgentHost() + invocations = InvocationHandler(server) + child_tracer = trace.get_tracer("test.framework") + + @invocations.invoke_handler + async def handle(request: Request) -> StreamingResponse: + with child_tracer.start_as_current_span("framework_invoke_agent"): + async def generate(): + yield b"chunk\n" + return StreamingResponse(generate(), media_type="text/plain") + + return server + + +@pytest.mark.asyncio +async def test_framework_span_is_child_of_invoke_span(): + """A span created inside the handler should be a child of the + agentserver invoke_agent span, not a sibling.""" + server = _make_server_with_child_span() + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + await client.post("/invocations", content=b"test") + + spans = _get_spans() + parent_spans = [s for s in spans if "invoke_agent" in s.name] + child_spans = [s for s in spans if s.name == "framework_invoke_agent"] + + assert len(parent_spans) >= 1, f"Expected invoke_agent span, got: {[s.name for s in spans]}" + assert len(child_spans) == 1, f"Expected framework span, got: {[s.name for s in spans]}" + + parent = parent_spans[0] + child = child_spans[0] + + # The child span's parent should be the agentserver invoke_agent span + assert child.parent is not None, "Framework span has no parent — it's a root span (sibling)" + assert child.parent.span_id == parent.context.span_id, ( + f"Framework span parent ({format(child.parent.span_id, '016x')}) " + f"!= invoke_agent span ({format(parent.context.span_id, '016x')}). " + "Spans are siblings, not parent-child." + ) + + +@pytest.mark.asyncio +async def test_framework_span_is_child_streaming(): + """Same parent-child relationship holds for streaming responses.""" + server = _make_streaming_server_with_child_span() + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post("/invocations", content=b"test") + assert resp.status_code == 200 + + spans = _get_spans() + parent_spans = [s for s in spans if "invoke_agent" in s.name] + child_spans = [s for s in spans if s.name == "framework_invoke_agent"] + + assert len(parent_spans) >= 1 + assert len(child_spans) == 1 + + parent = parent_spans[0] + child = child_spans[0] + + assert child.parent is not None, "Framework span has no parent in streaming case" + assert child.parent.span_id == parent.context.span_id diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_tracing.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_tracing.py new file mode 100644 index 000000000000..49a1219c4ff0 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_tracing.py @@ -0,0 +1,483 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Tests for OpenTelemetry tracing in the invocations protocol.""" +import os +import uuid +from unittest.mock import patch + +import pytest +from httpx import ASGITransport, AsyncClient +from starlette.requests import Request +from starlette.responses import JSONResponse, Response, StreamingResponse + +from azure.ai.agentserver.core import AgentHost +from azure.ai.agentserver.invocations import InvocationHandler + +# --------------------------------------------------------------------------- +# Module-level OTel setup with in-memory exporter +# --------------------------------------------------------------------------- +# We use the real OTel SDK to capture spans in memory. + +try: + from opentelemetry import trace + from opentelemetry.sdk.trace import TracerProvider as SdkTracerProvider + from opentelemetry.sdk.trace.export import SimpleSpanProcessor + from opentelemetry.sdk.trace.export.in_memory import InMemorySpanExporter + + _HAS_OTEL = True +except ImportError: + _HAS_OTEL = False + +# Module-level provider so all tests share the same exporter +if _HAS_OTEL: + _MODULE_EXPORTER = InMemorySpanExporter() + _MODULE_PROVIDER = SdkTracerProvider() + _MODULE_PROVIDER.add_span_processor(SimpleSpanProcessor(_MODULE_EXPORTER)) + trace.set_tracer_provider(_MODULE_PROVIDER) +else: + _MODULE_EXPORTER = None + _MODULE_PROVIDER = None + +pytestmark = pytest.mark.skipif(not _HAS_OTEL, reason="opentelemetry not installed") + + +@pytest.fixture(autouse=True) +def _clear_spans(): + """Clear exported spans before each test.""" + if _MODULE_EXPORTER: + _MODULE_EXPORTER.clear() + + +def _get_spans(): + """Return all captured spans.""" + if _MODULE_EXPORTER: + return _MODULE_EXPORTER.get_finished_spans() + return [] + + +# --------------------------------------------------------------------------- +# Helper: create tracing-enabled server +# --------------------------------------------------------------------------- + +def _make_tracing_server(**kwargs): + """Create an AgentHost with tracing enabled.""" + with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): + server = AgentHost(**kwargs) + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + body = await request.body() + return Response(content=body, media_type="application/octet-stream") + + return server + + +def _make_tracing_server_with_get_cancel(**kwargs): + """Create a tracing-enabled server with get/cancel handlers.""" + with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): + server = AgentHost(**kwargs) + invocations = InvocationHandler(server) + + store: dict[str, bytes] = {} + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + body = await request.body() + store[request.state.invocation_id] = body + return Response(content=body, media_type="application/octet-stream") + + @invocations.get_invocation_handler + async def get_handler(request: Request) -> Response: + inv_id = request.path_params["invocation_id"] + if inv_id in store: + return Response(content=store[inv_id]) + return JSONResponse({"error": {"code": "not_found", "message": "Not found"}}, status_code=404) + + @invocations.cancel_invocation_handler + async def cancel_handler(request: Request) -> Response: + inv_id = request.path_params["invocation_id"] + if inv_id in store: + del store[inv_id] + return JSONResponse({"status": "cancelled"}) + return JSONResponse({"error": {"code": "not_found", "message": "Not found"}}, status_code=404) + + return server + + +def _make_failing_tracing_server(**kwargs): + """Create a tracing-enabled server whose handler raises.""" + with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): + server = AgentHost(**kwargs) + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + raise ValueError("tracing error test") + + return server + + +def _make_streaming_tracing_server(**kwargs): + """Create a tracing-enabled server with streaming response.""" + with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): + server = AgentHost(**kwargs) + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> StreamingResponse: + async def generate(): + yield b"chunk1\n" + yield b"chunk2\n" + + return StreamingResponse(generate(), media_type="text/plain") + + return server + + +# --------------------------------------------------------------------------- +# Tracing disabled by default +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_tracing_disabled_by_default(): + """No spans are created when tracing is not enabled.""" + if _MODULE_EXPORTER: + _MODULE_EXPORTER.clear() + + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + return Response(content=b"ok") + + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + await client.post("/invocations", content=b"test") + + # No spans should be created (server has no tracing helper) + # The module-level provider may capture unrelated spans, + # but none should be from our server + spans = _get_spans() + invoke_spans = [s for s in spans if "invoke_agent" in s.name] + assert len(invoke_spans) == 0 + + +# --------------------------------------------------------------------------- +# Tracing enabled creates invoke span with correct name +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_tracing_enabled_creates_invoke_span(): + """Tracing enabled creates a span named 'invoke_agent'.""" + server = _make_tracing_server() + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + await client.post("/invocations", content=b"test") + + spans = _get_spans() + invoke_spans = [s for s in spans if "invoke_agent" in s.name] + assert len(invoke_spans) >= 1 + assert invoke_spans[0].name.startswith("invoke_agent") + + +# --------------------------------------------------------------------------- +# Invoke error records exception +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_invoke_error_records_exception(): + """When handler raises, the span records the exception.""" + server = _make_failing_tracing_server() + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post("/invocations", content=b"test") + assert resp.status_code == 500 + + spans = _get_spans() + invoke_spans = [s for s in spans if "invoke_agent" in s.name] + assert len(invoke_spans) >= 1 + span = invoke_spans[0] + # Should have error status + assert span.status.status_code.name == "ERROR" + + +# --------------------------------------------------------------------------- +# GET/cancel create spans +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_get_invocation_creates_span(): + """GET /invocations/{id} creates a span.""" + server = _make_tracing_server_with_get_cancel() + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post("/invocations", content=b"data") + inv_id = resp.headers["x-agent-invocation-id"] + await client.get(f"/invocations/{inv_id}") + + spans = _get_spans() + get_spans = [s for s in spans if "get_invocation" in s.name] + assert len(get_spans) >= 1 + + +@pytest.mark.asyncio +async def test_cancel_invocation_creates_span(): + """POST /invocations/{id}/cancel creates a span.""" + server = _make_tracing_server_with_get_cancel() + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post("/invocations", content=b"data") + inv_id = resp.headers["x-agent-invocation-id"] + await client.post(f"/invocations/{inv_id}/cancel") + + spans = _get_spans() + cancel_spans = [s for s in spans if "cancel_invocation" in s.name] + assert len(cancel_spans) >= 1 + + +# --------------------------------------------------------------------------- +# Tracing via env var +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_tracing_via_appinsights_env_var(): + """Tracing is enabled when APPLICATIONINSIGHTS_CONNECTION_STRING is set.""" + with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): + with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + return Response(content=b"ok") + + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + await client.post("/invocations", content=b"test") + + spans = _get_spans() + invoke_spans = [s for s in spans if "invoke_agent" in s.name] + assert len(invoke_spans) >= 1 + + +# --------------------------------------------------------------------------- +# No tracing when no endpoints configured +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_no_tracing_when_no_endpoints(): + """Tracing is disabled when no connection string or OTLP endpoint is set.""" + env = os.environ.copy() + env.pop("APPLICATIONINSIGHTS_CONNECTION_STRING", None) + env.pop("OTEL_EXPORTER_OTLP_ENDPOINT", None) + with patch.dict(os.environ, env, clear=True): + server = AgentHost() + invocations = InvocationHandler(server) + + @invocations.invoke_handler + async def handle(request: Request) -> Response: + return Response(content=b"ok") + + if _MODULE_EXPORTER: + _MODULE_EXPORTER.clear() + + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + await client.post("/invocations", content=b"test") + + spans = _get_spans() + invoke_spans = [s for s in spans if "invoke_agent" in s.name] + assert len(invoke_spans) == 0 + + +# --------------------------------------------------------------------------- +# Traceparent propagation +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_traceparent_propagation(): + """Server propagates traceparent header into span context.""" + server = _make_tracing_server() + transport = ASGITransport(app=server.app) + + # Create a traceparent + trace_id_hex = uuid.uuid4().hex + span_id_hex = uuid.uuid4().hex[:16] + traceparent = f"00-{trace_id_hex}-{span_id_hex}-01" + + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + await client.post( + "/invocations", + content=b"test", + headers={"traceparent": traceparent}, + ) + + spans = _get_spans() + invoke_spans = [s for s in spans if "invoke_agent" in s.name] + assert len(invoke_spans) >= 1 + span = invoke_spans[0] + # The span should have the same trace ID as the traceparent + actual_trace_id = format(span.context.trace_id, "032x") + assert actual_trace_id == trace_id_hex + + +# --------------------------------------------------------------------------- +# Streaming spans +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_streaming_creates_span(): + """Streaming response creates and completes a span.""" + server = _make_streaming_tracing_server() + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post("/invocations", content=b"test") + assert resp.status_code == 200 + + spans = _get_spans() + invoke_spans = [s for s in spans if "invoke_agent" in s.name] + assert len(invoke_spans) >= 1 + + +# --------------------------------------------------------------------------- +# GenAI attributes on invoke span +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_genai_attributes_on_invoke_span(): + """Invoke span has GenAI semantic convention attributes.""" + server = _make_tracing_server() + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + await client.post("/invocations", content=b"test") + + spans = _get_spans() + invoke_spans = [s for s in spans if "invoke_agent" in s.name] + assert len(invoke_spans) >= 1 + attrs = dict(invoke_spans[0].attributes) + + assert attrs.get("gen_ai.provider.name") == "AzureAI Hosted Agents" + assert attrs.get("gen_ai.system") == "azure.ai.agentserver" + assert attrs.get("service.name") == "azure.ai.agentserver" + + +# --------------------------------------------------------------------------- +# Session ID in gen_ai.conversation.id +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_session_id_in_conversation_id(): + """Session ID is set as gen_ai.conversation.id on invoke span.""" + server = _make_tracing_server() + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + await client.post( + "/invocations?agent_session_id=test-session", + content=b"test", + ) + + spans = _get_spans() + invoke_spans = [s for s in spans if "invoke_agent" in s.name] + assert len(invoke_spans) >= 1 + attrs = dict(invoke_spans[0].attributes) + assert attrs.get("gen_ai.conversation.id") == "test-session" + + +# --------------------------------------------------------------------------- +# GenAI attributes on get_invocation span +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_genai_attributes_on_get_span(): + """GET invocation span has GenAI attributes.""" + server = _make_tracing_server_with_get_cancel() + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post("/invocations", content=b"data") + inv_id = resp.headers["x-agent-invocation-id"] + await client.get(f"/invocations/{inv_id}") + + spans = _get_spans() + get_spans = [s for s in spans if "get_invocation" in s.name] + assert len(get_spans) >= 1 + attrs = dict(get_spans[0].attributes) + assert attrs.get("gen_ai.system") == "azure.ai.agentserver" + assert attrs.get("gen_ai.provider.name") == "AzureAI Hosted Agents" + + +# --------------------------------------------------------------------------- +# Namespaced invocation_id attribute +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_namespaced_invocation_id_attribute(): + """Invoke span has azure.ai.agentserver.invocations.invocation_id.""" + server = _make_tracing_server() + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + resp = await client.post("/invocations", content=b"test") + inv_id = resp.headers["x-agent-invocation-id"] + + spans = _get_spans() + invoke_spans = [s for s in spans if "invoke_agent" in s.name] + assert len(invoke_spans) >= 1 + attrs = dict(invoke_spans[0].attributes) + assert attrs.get("azure.ai.agentserver.invocations.invocation_id") == inv_id + + +# --------------------------------------------------------------------------- +# Agent name/version in span names +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_agent_name_in_span_name(): + """Agent name from env var appears in span name.""" + with patch.dict(os.environ, { + "FOUNDRY_AGENT_NAME": "my-agent", + "FOUNDRY_AGENT_VERSION": "2.0", + }): + server = _make_tracing_server() + + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + await client.post("/invocations", content=b"test") + + spans = _get_spans() + invoke_spans = [s for s in spans if "invoke_agent" in s.name] + assert len(invoke_spans) >= 1 + assert "my-agent" in invoke_spans[0].name + assert "2.0" in invoke_spans[0].name + + +@pytest.mark.asyncio +async def test_agent_name_only_in_span_name(): + """Agent name without version in span name.""" + env_override = {"FOUNDRY_AGENT_NAME": "solo-agent"} + env_copy = os.environ.copy() + env_copy.pop("FOUNDRY_AGENT_VERSION", None) + env_copy.update(env_override) + with patch.dict(os.environ, env_copy, clear=True): + server = _make_tracing_server() + + transport = ASGITransport(app=server.app) + async with AsyncClient(transport=transport, base_url="http://testserver") as client: + await client.post("/invocations", content=b"test") + + spans = _get_spans() + invoke_spans = [s for s in spans if "invoke_agent" in s.name] + assert len(invoke_spans) >= 1 + assert "solo-agent" in invoke_spans[0].name + + +# --------------------------------------------------------------------------- +# Project endpoint attribute +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_project_endpoint_env_var(): + """FOUNDRY_PROJECT_ENDPOINT constant matches the expected env var name.""" + from azure.ai.agentserver.core import Constants + assert Constants.FOUNDRY_PROJECT_ENDPOINT == "FOUNDRY_PROJECT_ENDPOINT" diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/pyproject.toml b/sdk/agentserver/azure-ai-agentserver-langgraph/pyproject.toml index 54bce651f401..56eea835f958 100644 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/pyproject.toml +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/pyproject.toml @@ -8,6 +8,7 @@ authors = [ ] license = "MIT" classifiers = [ + "Development Status :: 7 - Inactive", "Programming Language :: Python", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3", diff --git a/sdk/agentserver/ci.yml b/sdk/agentserver/ci.yml index 8d3a31fdaf21..777c6d8a4485 100644 --- a/sdk/agentserver/ci.yml +++ b/sdk/agentserver/ci.yml @@ -41,9 +41,9 @@ extends: Selection: sparse GenerateVMJobs: true Artifacts: + - name: azure-ai-agentserver-invocations + safeName: azureaiagentserverinvocations - name: azure-ai-agentserver-core safeName: azureaiagentservercore - - name: azure-ai-agentserver-agentframework - safeName: azureaiagentserveragentframework - name: azure-ai-agentserver-github safeName: azureaiagentservergithub