diff --git a/pyproject.toml b/pyproject.toml
index 5e508c6c..346c47cf 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,10 +1,10 @@
[project]
name = "zep-cloud"
-version = "3.19.0"
+version = "3.22.0"
[tool.poetry]
name = "zep-cloud"
-version = "3.20.0"
+version = "3.22.0"
description = ""
readme = "README.md"
authors = []
diff --git a/reference.md b/reference.md
index a3b9b3eb..ddd0baa3 100644
--- a/reference.md
+++ b/reference.md
@@ -1,4 +1,551 @@
# Reference
+## Batch
+client.batch.list(...)
+
+-
+
+#### 📝 Description
+
+
+-
+
+
+-
+
+List batches for the current project, optionally filtered by batch status.
+
+
+
+
+
+#### 🔌 Usage
+
+
+-
+
+
+-
+
+```python
+from zep_cloud import Zep
+
+client = Zep(
+ api_key="YOUR_API_KEY",
+)
+client.batch.list(
+ limit=1,
+ cursor=1,
+ status="status",
+)
+
+```
+
+
+
+
+
+#### ⚙️ Parameters
+
+
+-
+
+
+-
+
+**limit:** `typing.Optional[int]` — Maximum number of batches to return.
+
+
+
+
+
+-
+
+**cursor:** `typing.Optional[int]` — Pagination cursor from a previous response.
+
+
+
+
+
+-
+
+**status:** `typing.Optional[str]` — Batch status filter.
+
+
+
+
+
+-
+
+**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
+
+
+
+
+
+
+
+
+
+
+
+client.batch.create(...)
+
+-
+
+#### 📝 Description
+
+
+-
+
+
+-
+
+Create a draft batch that can be filled with graph episodes and thread messages.
+
+
+
+
+
+#### 🔌 Usage
+
+
+-
+
+
+-
+
+```python
+from zep_cloud import Zep
+
+client = Zep(
+ api_key="YOUR_API_KEY",
+)
+client.batch.create()
+
+```
+
+
+
+
+
+#### ⚙️ Parameters
+
+
+-
+
+
+-
+
+**metadata:** `typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]`
+
+
+
+
+
+-
+
+**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
+
+
+
+
+
+
+
+
+
+
+
+client.batch.get(...)
+
+-
+
+#### 📝 Description
+
+
+-
+
+
+-
+
+Get a batch summary, including runtime progress when the batch has been processed.
+
+
+
+
+
+#### 🔌 Usage
+
+
+-
+
+
+-
+
+```python
+from zep_cloud import Zep
+
+client = Zep(
+ api_key="YOUR_API_KEY",
+)
+client.batch.get(
+ batch_id="batchId",
+)
+
+```
+
+
+
+
+
+#### ⚙️ Parameters
+
+
+-
+
+
+-
+
+**batch_id:** `str` — The batch ID.
+
+
+
+
+
+-
+
+**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
+
+
+
+
+
+
+
+
+
+
+
+client.batch.delete(...)
+
+-
+
+#### 📝 Description
+
+
+-
+
+
+-
+
+Delete a draft or invalid unprocessed batch. Processed batches cannot be deleted.
+
+
+
+
+
+#### 🔌 Usage
+
+
+-
+
+
+-
+
+```python
+from zep_cloud import Zep
+
+client = Zep(
+ api_key="YOUR_API_KEY",
+)
+client.batch.delete(
+ batch_id="batchId",
+)
+
+```
+
+
+
+
+
+#### ⚙️ Parameters
+
+
+-
+
+
+-
+
+**batch_id:** `str` — The batch ID.
+
+
+
+
+
+-
+
+**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
+
+
+
+
+
+
+
+
+
+
+
+client.batch.list_items(...)
+
+-
+
+#### 📝 Description
+
+
+-
+
+
+-
+
+List items in a batch, including derived runtime status when the batch has been processed.
+
+
+
+
+
+#### 🔌 Usage
+
+
+-
+
+
+-
+
+```python
+from zep_cloud import Zep
+
+client = Zep(
+ api_key="YOUR_API_KEY",
+)
+client.batch.list_items(
+ batch_id="batchId",
+ limit=1,
+ cursor=1,
+ status="status",
+)
+
+```
+
+
+
+
+
+#### ⚙️ Parameters
+
+
+-
+
+
+-
+
+**batch_id:** `str` — The batch ID.
+
+
+
+
+
+-
+
+**limit:** `typing.Optional[int]` — Maximum number of batch items to return.
+
+
+
+
+
+-
+
+**cursor:** `typing.Optional[int]` — Pagination cursor from a previous response.
+
+
+
+
+
+-
+
+**status:** `typing.Optional[str]` — Batch item status filter.
+
+
+
+
+
+-
+
+**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
+
+
+
+
+
+
+
+
+
+
+
+client.batch.add(...)
+
+-
+
+#### 📝 Description
+
+
+-
+
+
+-
+
+Add graph episodes and thread messages to a draft batch. Items are appended in request order.
+
+
+
+
+
+#### 🔌 Usage
+
+
+-
+
+
+-
+
+```python
+from zep_cloud import BatchAddItem, Zep
+
+client = Zep(
+ api_key="YOUR_API_KEY",
+)
+client.batch.add(
+ batch_id="batchId",
+ items=[
+ BatchAddItem(
+ type="graph_episode",
+ )
+ ],
+)
+
+```
+
+
+
+
+
+#### ⚙️ Parameters
+
+
+-
+
+
+-
+
+**batch_id:** `str` — The batch ID.
+
+
+
+
+
+-
+
+**items:** `typing.Sequence[BatchAddItem]`
+
+
+
+
+
+-
+
+**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
+
+
+
+
+
+
+
+
+
+
+
+client.batch.process(...)
+
+-
+
+#### 📝 Description
+
+
+-
+
+
+-
+
+Start processing a filled batch. Repeated calls return the existing batch run.
+
+
+
+
+
+#### 🔌 Usage
+
+
+-
+
+
+-
+
+```python
+from zep_cloud import Zep
+
+client = Zep(
+ api_key="YOUR_API_KEY",
+)
+client.batch.process(
+ batch_id="batchId",
+)
+
+```
+
+
+
+
+
+#### ⚙️ Parameters
+
+
+-
+
+
+-
+
+**batch_id:** `str` — The batch ID.
+
+
+
+
+
+-
+
+**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
+
+
+
+
+
+
+
+
+
+
+
## Context
client.context.list_context_templates()
@@ -1775,7 +2322,7 @@ client.graph.search(
-
-**max_characters:** `typing.Optional[int]` — Maximum total characters across all selected results when scope=auto. Defaults to 2000. Limited to 50000.
+**max_characters:** `typing.Optional[int]` — Maximum total characters across all selected results when scope=auto. Defaults to 2500. Limited to 50000.
@@ -1791,7 +2338,11 @@ client.graph.search(
-
-**reranker:** `typing.Optional[Reranker]` — Defaults to RRF
+**reranker:** `typing.Optional[Reranker]`
+
+Defaults to RRF. When scope=auto, this only affects graph-service retrieval
+shape for graph facts, observations, and thread summaries; source-episode
+retrieval uses RRF, and auto search applies its own internal rerank after retrieval.
@@ -1799,7 +2350,11 @@ client.graph.search(
-
-**return_raw_results:** `typing.Optional[bool]` — When scope=auto, include the selected raw graph results alongside the materialized context block.
+**return_raw_results:** `typing.Optional[bool]`
+
+When scope=auto, include the selected raw graph results alongside the materialized context block.
+For graph-service-backed auto mode, selected raw results may include episodes,
+edges, nodes, observations, and thread_summaries.
diff --git a/src/zep_cloud/__init__.py b/src/zep_cloud/__init__.py
index b69f9ad4..81b1e881 100644
--- a/src/zep_cloud/__init__.py
+++ b/src/zep_cloud/__init__.py
@@ -7,6 +7,17 @@
AddThreadMessagesResponse,
AddTripleResponse,
ApiError,
+ ApidataBatchAddItemRole,
+ ApidataBatchAddItemType,
+ BatchAddItem,
+ BatchItemDetail,
+ BatchItemKind,
+ BatchItemListResponse,
+ BatchItemStatus,
+ BatchListResponse,
+ BatchProgress,
+ BatchStatus,
+ BatchSummary,
CloneGraphResponse,
ClusterDetectConfig,
CoOccurrenceDetectConfig,
@@ -73,8 +84,8 @@
UserListResponse,
UserNodeResponse,
)
-from .errors import BadRequestError, ForbiddenError, InternalServerError, NotFoundError
-from . import context, graph, project, task, thread, user
+from .errors import BadRequestError, ConflictError, ForbiddenError, InternalServerError, NotFoundError
+from . import batch, context, graph, project, task, thread, user
from .client import AsyncZep, Zep
from .environment import ZepEnvironment
from .version import __version__
@@ -84,12 +95,24 @@
"AddThreadMessagesResponse",
"AddTripleResponse",
"ApiError",
+ "ApidataBatchAddItemRole",
+ "ApidataBatchAddItemType",
"AsyncZep",
"BadRequestError",
+ "BatchAddItem",
+ "BatchItemDetail",
+ "BatchItemKind",
+ "BatchItemListResponse",
+ "BatchItemStatus",
+ "BatchListResponse",
+ "BatchProgress",
+ "BatchStatus",
+ "BatchSummary",
"CloneGraphResponse",
"ClusterDetectConfig",
"CoOccurrenceDetectConfig",
"ComparisonOperator",
+ "ConflictError",
"ContextTemplateResponse",
"CustomInstruction",
"DateFilter",
@@ -157,6 +180,7 @@
"Zep",
"ZepEnvironment",
"__version__",
+ "batch",
"context",
"graph",
"project",
diff --git a/src/zep_cloud/base_client.py b/src/zep_cloud/base_client.py
index ae0b8c9d..e59bb3cd 100644
--- a/src/zep_cloud/base_client.py
+++ b/src/zep_cloud/base_client.py
@@ -4,6 +4,7 @@
import typing
import httpx
+from .batch.client import AsyncBatchClient, BatchClient
from .context.client import AsyncContextClient, ContextClient
from .core.api_error import ApiError
from .core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
@@ -82,6 +83,7 @@ def __init__(
else httpx.Client(timeout=_defaulted_timeout),
timeout=_defaulted_timeout,
)
+ self.batch = BatchClient(client_wrapper=self._client_wrapper)
self.context = ContextClient(client_wrapper=self._client_wrapper)
self.graph = GraphClient(client_wrapper=self._client_wrapper)
self.project = ProjectClient(client_wrapper=self._client_wrapper)
@@ -157,6 +159,7 @@ def __init__(
else httpx.AsyncClient(timeout=_defaulted_timeout),
timeout=_defaulted_timeout,
)
+ self.batch = AsyncBatchClient(client_wrapper=self._client_wrapper)
self.context = AsyncContextClient(client_wrapper=self._client_wrapper)
self.graph = AsyncGraphClient(client_wrapper=self._client_wrapper)
self.project = AsyncProjectClient(client_wrapper=self._client_wrapper)
diff --git a/src/zep_cloud/batch/__init__.py b/src/zep_cloud/batch/__init__.py
new file mode 100644
index 00000000..5cde0202
--- /dev/null
+++ b/src/zep_cloud/batch/__init__.py
@@ -0,0 +1,4 @@
+# This file was auto-generated by Fern from our API Definition.
+
+# isort: skip_file
+
diff --git a/src/zep_cloud/batch/client.py b/src/zep_cloud/batch/client.py
new file mode 100644
index 00000000..6a810052
--- /dev/null
+++ b/src/zep_cloud/batch/client.py
@@ -0,0 +1,646 @@
+# This file was auto-generated by Fern from our API Definition.
+
+import typing
+
+from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
+from ..core.request_options import RequestOptions
+from ..types.batch_add_item import BatchAddItem
+from ..types.batch_item_detail import BatchItemDetail
+from ..types.batch_item_list_response import BatchItemListResponse
+from ..types.batch_list_response import BatchListResponse
+from ..types.batch_summary import BatchSummary
+from ..types.success_response import SuccessResponse
+from .raw_client import AsyncRawBatchClient, RawBatchClient
+
+# this is used as the default value for optional parameters
+OMIT = typing.cast(typing.Any, ...)
+
+
+class BatchClient:
+ def __init__(self, *, client_wrapper: SyncClientWrapper):
+ self._raw_client = RawBatchClient(client_wrapper=client_wrapper)
+
+ @property
+ def with_raw_response(self) -> RawBatchClient:
+ """
+ Retrieves a raw implementation of this client that returns raw responses.
+
+ Returns
+ -------
+ RawBatchClient
+ """
+ return self._raw_client
+
+ def list(
+ self,
+ *,
+ limit: typing.Optional[int] = None,
+ cursor: typing.Optional[int] = None,
+ status: typing.Optional[str] = None,
+ request_options: typing.Optional[RequestOptions] = None,
+ ) -> BatchListResponse:
+ """
+ List batches for the current project, optionally filtered by batch status.
+
+ Parameters
+ ----------
+ limit : typing.Optional[int]
+ Maximum number of batches to return.
+
+ cursor : typing.Optional[int]
+ Pagination cursor from a previous response.
+
+ status : typing.Optional[str]
+ Batch status filter.
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ BatchListResponse
+ Batch list
+
+ Examples
+ --------
+ from zep_cloud import Zep
+
+ client = Zep(
+ api_key="YOUR_API_KEY",
+ )
+ client.batch.list(
+ limit=1,
+ cursor=1,
+ status="status",
+ )
+ """
+ _response = self._raw_client.list(limit=limit, cursor=cursor, status=status, request_options=request_options)
+ return _response.data
+
+ def create(
+ self,
+ *,
+ metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
+ request_options: typing.Optional[RequestOptions] = None,
+ ) -> BatchSummary:
+ """
+ Create a draft batch that can be filled with graph episodes and thread messages.
+
+ Parameters
+ ----------
+ metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ BatchSummary
+ Created batch
+
+ Examples
+ --------
+ from zep_cloud import Zep
+
+ client = Zep(
+ api_key="YOUR_API_KEY",
+ )
+ client.batch.create()
+ """
+ _response = self._raw_client.create(metadata=metadata, request_options=request_options)
+ return _response.data
+
+ def get(self, batch_id: str, *, request_options: typing.Optional[RequestOptions] = None) -> BatchSummary:
+ """
+ Get a batch summary, including runtime progress when the batch has been processed.
+
+ Parameters
+ ----------
+ batch_id : str
+ The batch ID.
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ BatchSummary
+ Batch summary
+
+ Examples
+ --------
+ from zep_cloud import Zep
+
+ client = Zep(
+ api_key="YOUR_API_KEY",
+ )
+ client.batch.get(
+ batch_id="batchId",
+ )
+ """
+ _response = self._raw_client.get(batch_id, request_options=request_options)
+ return _response.data
+
+ def delete(self, batch_id: str, *, request_options: typing.Optional[RequestOptions] = None) -> SuccessResponse:
+ """
+ Delete a draft or invalid unprocessed batch. Processed batches cannot be deleted.
+
+ Parameters
+ ----------
+ batch_id : str
+ The batch ID.
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ SuccessResponse
+ Deleted batch
+
+ Examples
+ --------
+ from zep_cloud import Zep
+
+ client = Zep(
+ api_key="YOUR_API_KEY",
+ )
+ client.batch.delete(
+ batch_id="batchId",
+ )
+ """
+ _response = self._raw_client.delete(batch_id, request_options=request_options)
+ return _response.data
+
+ def list_items(
+ self,
+ batch_id: str,
+ *,
+ limit: typing.Optional[int] = None,
+ cursor: typing.Optional[int] = None,
+ status: typing.Optional[str] = None,
+ request_options: typing.Optional[RequestOptions] = None,
+ ) -> BatchItemListResponse:
+ """
+ List items in a batch, including derived runtime status when the batch has been processed.
+
+ Parameters
+ ----------
+ batch_id : str
+ The batch ID.
+
+ limit : typing.Optional[int]
+ Maximum number of batch items to return.
+
+ cursor : typing.Optional[int]
+ Pagination cursor from a previous response.
+
+ status : typing.Optional[str]
+ Batch item status filter.
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ BatchItemListResponse
+ Batch item list
+
+ Examples
+ --------
+ from zep_cloud import Zep
+
+ client = Zep(
+ api_key="YOUR_API_KEY",
+ )
+ client.batch.list_items(
+ batch_id="batchId",
+ limit=1,
+ cursor=1,
+ status="status",
+ )
+ """
+ _response = self._raw_client.list_items(
+ batch_id, limit=limit, cursor=cursor, status=status, request_options=request_options
+ )
+ return _response.data
+
+ def add(
+ self,
+ batch_id: str,
+ *,
+ items: typing.Sequence[BatchAddItem],
+ request_options: typing.Optional[RequestOptions] = None,
+ ) -> typing.List[BatchItemDetail]:
+ """
+ Add graph episodes and thread messages to a draft batch. Items are appended in request order.
+
+ Parameters
+ ----------
+ batch_id : str
+ The batch ID.
+
+ items : typing.Sequence[BatchAddItem]
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ typing.List[BatchItemDetail]
+ Added batch items
+
+ Examples
+ --------
+ from zep_cloud import BatchAddItem, Zep
+
+ client = Zep(
+ api_key="YOUR_API_KEY",
+ )
+ client.batch.add(
+ batch_id="batchId",
+ items=[
+ BatchAddItem(
+ type="graph_episode",
+ )
+ ],
+ )
+ """
+ _response = self._raw_client.add(batch_id, items=items, request_options=request_options)
+ return _response.data
+
+ def process(self, batch_id: str, *, request_options: typing.Optional[RequestOptions] = None) -> BatchSummary:
+ """
+ Start processing a filled batch. Repeated calls return the existing batch run.
+
+ Parameters
+ ----------
+ batch_id : str
+ The batch ID.
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ BatchSummary
+ Batch processing state
+
+ Examples
+ --------
+ from zep_cloud import Zep
+
+ client = Zep(
+ api_key="YOUR_API_KEY",
+ )
+ client.batch.process(
+ batch_id="batchId",
+ )
+ """
+ _response = self._raw_client.process(batch_id, request_options=request_options)
+ return _response.data
+
+
+class AsyncBatchClient:
+ def __init__(self, *, client_wrapper: AsyncClientWrapper):
+ self._raw_client = AsyncRawBatchClient(client_wrapper=client_wrapper)
+
+ @property
+ def with_raw_response(self) -> AsyncRawBatchClient:
+ """
+ Retrieves a raw implementation of this client that returns raw responses.
+
+ Returns
+ -------
+ AsyncRawBatchClient
+ """
+ return self._raw_client
+
+ async def list(
+ self,
+ *,
+ limit: typing.Optional[int] = None,
+ cursor: typing.Optional[int] = None,
+ status: typing.Optional[str] = None,
+ request_options: typing.Optional[RequestOptions] = None,
+ ) -> BatchListResponse:
+ """
+ List batches for the current project, optionally filtered by batch status.
+
+ Parameters
+ ----------
+ limit : typing.Optional[int]
+ Maximum number of batches to return.
+
+ cursor : typing.Optional[int]
+ Pagination cursor from a previous response.
+
+ status : typing.Optional[str]
+ Batch status filter.
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ BatchListResponse
+ Batch list
+
+ Examples
+ --------
+ import asyncio
+
+ from zep_cloud import AsyncZep
+
+ client = AsyncZep(
+ api_key="YOUR_API_KEY",
+ )
+
+
+ async def main() -> None:
+ await client.batch.list(
+ limit=1,
+ cursor=1,
+ status="status",
+ )
+
+
+ asyncio.run(main())
+ """
+ _response = await self._raw_client.list(
+ limit=limit, cursor=cursor, status=status, request_options=request_options
+ )
+ return _response.data
+
+ async def create(
+ self,
+ *,
+ metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
+ request_options: typing.Optional[RequestOptions] = None,
+ ) -> BatchSummary:
+ """
+ Create a draft batch that can be filled with graph episodes and thread messages.
+
+ Parameters
+ ----------
+ metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ BatchSummary
+ Created batch
+
+ Examples
+ --------
+ import asyncio
+
+ from zep_cloud import AsyncZep
+
+ client = AsyncZep(
+ api_key="YOUR_API_KEY",
+ )
+
+
+ async def main() -> None:
+ await client.batch.create()
+
+
+ asyncio.run(main())
+ """
+ _response = await self._raw_client.create(metadata=metadata, request_options=request_options)
+ return _response.data
+
+ async def get(self, batch_id: str, *, request_options: typing.Optional[RequestOptions] = None) -> BatchSummary:
+ """
+ Get a batch summary, including runtime progress when the batch has been processed.
+
+ Parameters
+ ----------
+ batch_id : str
+ The batch ID.
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ BatchSummary
+ Batch summary
+
+ Examples
+ --------
+ import asyncio
+
+ from zep_cloud import AsyncZep
+
+ client = AsyncZep(
+ api_key="YOUR_API_KEY",
+ )
+
+
+ async def main() -> None:
+ await client.batch.get(
+ batch_id="batchId",
+ )
+
+
+ asyncio.run(main())
+ """
+ _response = await self._raw_client.get(batch_id, request_options=request_options)
+ return _response.data
+
+ async def delete(
+ self, batch_id: str, *, request_options: typing.Optional[RequestOptions] = None
+ ) -> SuccessResponse:
+ """
+ Delete a draft or invalid unprocessed batch. Processed batches cannot be deleted.
+
+ Parameters
+ ----------
+ batch_id : str
+ The batch ID.
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ SuccessResponse
+ Deleted batch
+
+ Examples
+ --------
+ import asyncio
+
+ from zep_cloud import AsyncZep
+
+ client = AsyncZep(
+ api_key="YOUR_API_KEY",
+ )
+
+
+ async def main() -> None:
+ await client.batch.delete(
+ batch_id="batchId",
+ )
+
+
+ asyncio.run(main())
+ """
+ _response = await self._raw_client.delete(batch_id, request_options=request_options)
+ return _response.data
+
+ async def list_items(
+ self,
+ batch_id: str,
+ *,
+ limit: typing.Optional[int] = None,
+ cursor: typing.Optional[int] = None,
+ status: typing.Optional[str] = None,
+ request_options: typing.Optional[RequestOptions] = None,
+ ) -> BatchItemListResponse:
+ """
+ List items in a batch, including derived runtime status when the batch has been processed.
+
+ Parameters
+ ----------
+ batch_id : str
+ The batch ID.
+
+ limit : typing.Optional[int]
+ Maximum number of batch items to return.
+
+ cursor : typing.Optional[int]
+ Pagination cursor from a previous response.
+
+ status : typing.Optional[str]
+ Batch item status filter.
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ BatchItemListResponse
+ Batch item list
+
+ Examples
+ --------
+ import asyncio
+
+ from zep_cloud import AsyncZep
+
+ client = AsyncZep(
+ api_key="YOUR_API_KEY",
+ )
+
+
+ async def main() -> None:
+ await client.batch.list_items(
+ batch_id="batchId",
+ limit=1,
+ cursor=1,
+ status="status",
+ )
+
+
+ asyncio.run(main())
+ """
+ _response = await self._raw_client.list_items(
+ batch_id, limit=limit, cursor=cursor, status=status, request_options=request_options
+ )
+ return _response.data
+
+ async def add(
+ self,
+ batch_id: str,
+ *,
+ items: typing.Sequence[BatchAddItem],
+ request_options: typing.Optional[RequestOptions] = None,
+ ) -> typing.List[BatchItemDetail]:
+ """
+ Add graph episodes and thread messages to a draft batch. Items are appended in request order.
+
+ Parameters
+ ----------
+ batch_id : str
+ The batch ID.
+
+ items : typing.Sequence[BatchAddItem]
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ typing.List[BatchItemDetail]
+ Added batch items
+
+ Examples
+ --------
+ import asyncio
+
+ from zep_cloud import AsyncZep, BatchAddItem
+
+ client = AsyncZep(
+ api_key="YOUR_API_KEY",
+ )
+
+
+ async def main() -> None:
+ await client.batch.add(
+ batch_id="batchId",
+ items=[
+ BatchAddItem(
+ type="graph_episode",
+ )
+ ],
+ )
+
+
+ asyncio.run(main())
+ """
+ _response = await self._raw_client.add(batch_id, items=items, request_options=request_options)
+ return _response.data
+
+ async def process(self, batch_id: str, *, request_options: typing.Optional[RequestOptions] = None) -> BatchSummary:
+ """
+ Start processing a filled batch. Repeated calls return the existing batch run.
+
+ Parameters
+ ----------
+ batch_id : str
+ The batch ID.
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ BatchSummary
+ Batch processing state
+
+ Examples
+ --------
+ import asyncio
+
+ from zep_cloud import AsyncZep
+
+ client = AsyncZep(
+ api_key="YOUR_API_KEY",
+ )
+
+
+ async def main() -> None:
+ await client.batch.process(
+ batch_id="batchId",
+ )
+
+
+ asyncio.run(main())
+ """
+ _response = await self._raw_client.process(batch_id, request_options=request_options)
+ return _response.data
diff --git a/src/zep_cloud/batch/raw_client.py b/src/zep_cloud/batch/raw_client.py
new file mode 100644
index 00000000..e13a90df
--- /dev/null
+++ b/src/zep_cloud/batch/raw_client.py
@@ -0,0 +1,1397 @@
+# This file was auto-generated by Fern from our API Definition.
+
+import typing
+from json.decoder import JSONDecodeError
+
+from ..core.api_error import ApiError as core_api_error_ApiError
+from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
+from ..core.http_response import AsyncHttpResponse, HttpResponse
+from ..core.jsonable_encoder import jsonable_encoder
+from ..core.pydantic_utilities import parse_obj_as
+from ..core.request_options import RequestOptions
+from ..core.serialization import convert_and_respect_annotation_metadata
+from ..errors.bad_request_error import BadRequestError
+from ..errors.conflict_error import ConflictError
+from ..errors.forbidden_error import ForbiddenError
+from ..errors.internal_server_error import InternalServerError
+from ..errors.not_found_error import NotFoundError
+from ..types.api_error import ApiError as types_api_error_ApiError
+from ..types.batch_add_item import BatchAddItem
+from ..types.batch_item_detail import BatchItemDetail
+from ..types.batch_item_list_response import BatchItemListResponse
+from ..types.batch_list_response import BatchListResponse
+from ..types.batch_summary import BatchSummary
+from ..types.success_response import SuccessResponse
+
+# this is used as the default value for optional parameters
+OMIT = typing.cast(typing.Any, ...)
+
+
+class RawBatchClient:
+ def __init__(self, *, client_wrapper: SyncClientWrapper):
+ self._client_wrapper = client_wrapper
+
+ def list(
+ self,
+ *,
+ limit: typing.Optional[int] = None,
+ cursor: typing.Optional[int] = None,
+ status: typing.Optional[str] = None,
+ request_options: typing.Optional[RequestOptions] = None,
+ ) -> HttpResponse[BatchListResponse]:
+ """
+ List batches for the current project, optionally filtered by batch status.
+
+ Parameters
+ ----------
+ limit : typing.Optional[int]
+ Maximum number of batches to return.
+
+ cursor : typing.Optional[int]
+ Pagination cursor from a previous response.
+
+ status : typing.Optional[str]
+ Batch status filter.
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ HttpResponse[BatchListResponse]
+ Batch list
+ """
+ _response = self._client_wrapper.httpx_client.request(
+ "batches",
+ method="GET",
+ params={
+ "limit": limit,
+ "cursor": cursor,
+ "status": status,
+ },
+ request_options=request_options,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ _data = typing.cast(
+ BatchListResponse,
+ parse_obj_as(
+ type_=BatchListResponse, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ return HttpResponse(response=_response, data=_data)
+ if _response.status_code == 400:
+ raise BadRequestError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ typing.Optional[typing.Any],
+ parse_obj_as(
+ type_=typing.Optional[typing.Any], # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 403:
+ raise ForbiddenError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 500:
+ raise InternalServerError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise core_api_error_ApiError(
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response.text
+ )
+ raise core_api_error_ApiError(
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response_json
+ )
+
+ def create(
+ self,
+ *,
+ metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
+ request_options: typing.Optional[RequestOptions] = None,
+ ) -> HttpResponse[BatchSummary]:
+ """
+ Create a draft batch that can be filled with graph episodes and thread messages.
+
+ Parameters
+ ----------
+ metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ HttpResponse[BatchSummary]
+ Created batch
+ """
+ _response = self._client_wrapper.httpx_client.request(
+ "batches",
+ method="POST",
+ json={
+ "metadata": metadata,
+ },
+ headers={
+ "content-type": "application/json",
+ },
+ request_options=request_options,
+ omit=OMIT,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ _data = typing.cast(
+ BatchSummary,
+ parse_obj_as(
+ type_=BatchSummary, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ return HttpResponse(response=_response, data=_data)
+ if _response.status_code == 400:
+ raise BadRequestError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ typing.Optional[typing.Any],
+ parse_obj_as(
+ type_=typing.Optional[typing.Any], # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 403:
+ raise ForbiddenError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 500:
+ raise InternalServerError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise core_api_error_ApiError(
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response.text
+ )
+ raise core_api_error_ApiError(
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response_json
+ )
+
+ def get(
+ self, batch_id: str, *, request_options: typing.Optional[RequestOptions] = None
+ ) -> HttpResponse[BatchSummary]:
+ """
+ Get a batch summary, including runtime progress when the batch has been processed.
+
+ Parameters
+ ----------
+ batch_id : str
+ The batch ID.
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ HttpResponse[BatchSummary]
+ Batch summary
+ """
+ _response = self._client_wrapper.httpx_client.request(
+ f"batches/{jsonable_encoder(batch_id)}",
+ method="GET",
+ request_options=request_options,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ _data = typing.cast(
+ BatchSummary,
+ parse_obj_as(
+ type_=BatchSummary, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ return HttpResponse(response=_response, data=_data)
+ if _response.status_code == 400:
+ raise BadRequestError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ typing.Optional[typing.Any],
+ parse_obj_as(
+ type_=typing.Optional[typing.Any], # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 403:
+ raise ForbiddenError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 404:
+ raise NotFoundError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 500:
+ raise InternalServerError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise core_api_error_ApiError(
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response.text
+ )
+ raise core_api_error_ApiError(
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response_json
+ )
+
+ def delete(
+ self, batch_id: str, *, request_options: typing.Optional[RequestOptions] = None
+ ) -> HttpResponse[SuccessResponse]:
+ """
+ Delete a draft or invalid unprocessed batch. Processed batches cannot be deleted.
+
+ Parameters
+ ----------
+ batch_id : str
+ The batch ID.
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ HttpResponse[SuccessResponse]
+ Deleted batch
+ """
+ _response = self._client_wrapper.httpx_client.request(
+ f"batches/{jsonable_encoder(batch_id)}",
+ method="DELETE",
+ request_options=request_options,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ _data = typing.cast(
+ SuccessResponse,
+ parse_obj_as(
+ type_=SuccessResponse, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ return HttpResponse(response=_response, data=_data)
+ if _response.status_code == 400:
+ raise BadRequestError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ typing.Optional[typing.Any],
+ parse_obj_as(
+ type_=typing.Optional[typing.Any], # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 403:
+ raise ForbiddenError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 404:
+ raise NotFoundError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 409:
+ raise ConflictError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 500:
+ raise InternalServerError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise core_api_error_ApiError(
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response.text
+ )
+ raise core_api_error_ApiError(
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response_json
+ )
+
+ def list_items(
+ self,
+ batch_id: str,
+ *,
+ limit: typing.Optional[int] = None,
+ cursor: typing.Optional[int] = None,
+ status: typing.Optional[str] = None,
+ request_options: typing.Optional[RequestOptions] = None,
+ ) -> HttpResponse[BatchItemListResponse]:
+ """
+ List items in a batch, including derived runtime status when the batch has been processed.
+
+ Parameters
+ ----------
+ batch_id : str
+ The batch ID.
+
+ limit : typing.Optional[int]
+ Maximum number of batch items to return.
+
+ cursor : typing.Optional[int]
+ Pagination cursor from a previous response.
+
+ status : typing.Optional[str]
+ Batch item status filter.
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ HttpResponse[BatchItemListResponse]
+ Batch item list
+ """
+ _response = self._client_wrapper.httpx_client.request(
+ f"batches/{jsonable_encoder(batch_id)}/items",
+ method="GET",
+ params={
+ "limit": limit,
+ "cursor": cursor,
+ "status": status,
+ },
+ request_options=request_options,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ _data = typing.cast(
+ BatchItemListResponse,
+ parse_obj_as(
+ type_=BatchItemListResponse, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ return HttpResponse(response=_response, data=_data)
+ if _response.status_code == 400:
+ raise BadRequestError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ typing.Optional[typing.Any],
+ parse_obj_as(
+ type_=typing.Optional[typing.Any], # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 403:
+ raise ForbiddenError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 404:
+ raise NotFoundError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 500:
+ raise InternalServerError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise core_api_error_ApiError(
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response.text
+ )
+ raise core_api_error_ApiError(
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response_json
+ )
+
+ def add(
+ self,
+ batch_id: str,
+ *,
+ items: typing.Sequence[BatchAddItem],
+ request_options: typing.Optional[RequestOptions] = None,
+ ) -> HttpResponse[typing.List[BatchItemDetail]]:
+ """
+ Add graph episodes and thread messages to a draft batch. Items are appended in request order.
+
+ Parameters
+ ----------
+ batch_id : str
+ The batch ID.
+
+ items : typing.Sequence[BatchAddItem]
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ HttpResponse[typing.List[BatchItemDetail]]
+ Added batch items
+ """
+ _response = self._client_wrapper.httpx_client.request(
+ f"batches/{jsonable_encoder(batch_id)}/items",
+ method="POST",
+ json={
+ "items": convert_and_respect_annotation_metadata(
+ object_=items, annotation=typing.Sequence[BatchAddItem], direction="write"
+ ),
+ },
+ headers={
+ "content-type": "application/json",
+ },
+ request_options=request_options,
+ omit=OMIT,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ _data = typing.cast(
+ typing.List[BatchItemDetail],
+ parse_obj_as(
+ type_=typing.List[BatchItemDetail], # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ return HttpResponse(response=_response, data=_data)
+ if _response.status_code == 400:
+ raise BadRequestError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ typing.Optional[typing.Any],
+ parse_obj_as(
+ type_=typing.Optional[typing.Any], # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 403:
+ raise ForbiddenError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 404:
+ raise NotFoundError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 409:
+ raise ConflictError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 500:
+ raise InternalServerError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise core_api_error_ApiError(
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response.text
+ )
+ raise core_api_error_ApiError(
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response_json
+ )
+
+ def process(
+ self, batch_id: str, *, request_options: typing.Optional[RequestOptions] = None
+ ) -> HttpResponse[BatchSummary]:
+ """
+ Start processing a filled batch. Repeated calls return the existing batch run.
+
+ Parameters
+ ----------
+ batch_id : str
+ The batch ID.
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ HttpResponse[BatchSummary]
+ Batch processing state
+ """
+ _response = self._client_wrapper.httpx_client.request(
+ f"batches/{jsonable_encoder(batch_id)}/process",
+ method="POST",
+ request_options=request_options,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ _data = typing.cast(
+ BatchSummary,
+ parse_obj_as(
+ type_=BatchSummary, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ return HttpResponse(response=_response, data=_data)
+ if _response.status_code == 400:
+ raise BadRequestError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ typing.Optional[typing.Any],
+ parse_obj_as(
+ type_=typing.Optional[typing.Any], # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 403:
+ raise ForbiddenError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 404:
+ raise NotFoundError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 409:
+ raise ConflictError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 500:
+ raise InternalServerError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise core_api_error_ApiError(
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response.text
+ )
+ raise core_api_error_ApiError(
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response_json
+ )
+
+
+class AsyncRawBatchClient:
+ def __init__(self, *, client_wrapper: AsyncClientWrapper):
+ self._client_wrapper = client_wrapper
+
+ async def list(
+ self,
+ *,
+ limit: typing.Optional[int] = None,
+ cursor: typing.Optional[int] = None,
+ status: typing.Optional[str] = None,
+ request_options: typing.Optional[RequestOptions] = None,
+ ) -> AsyncHttpResponse[BatchListResponse]:
+ """
+ List batches for the current project, optionally filtered by batch status.
+
+ Parameters
+ ----------
+ limit : typing.Optional[int]
+ Maximum number of batches to return.
+
+ cursor : typing.Optional[int]
+ Pagination cursor from a previous response.
+
+ status : typing.Optional[str]
+ Batch status filter.
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ AsyncHttpResponse[BatchListResponse]
+ Batch list
+ """
+ _response = await self._client_wrapper.httpx_client.request(
+ "batches",
+ method="GET",
+ params={
+ "limit": limit,
+ "cursor": cursor,
+ "status": status,
+ },
+ request_options=request_options,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ _data = typing.cast(
+ BatchListResponse,
+ parse_obj_as(
+ type_=BatchListResponse, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ return AsyncHttpResponse(response=_response, data=_data)
+ if _response.status_code == 400:
+ raise BadRequestError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ typing.Optional[typing.Any],
+ parse_obj_as(
+ type_=typing.Optional[typing.Any], # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 403:
+ raise ForbiddenError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 500:
+ raise InternalServerError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise core_api_error_ApiError(
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response.text
+ )
+ raise core_api_error_ApiError(
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response_json
+ )
+
+ async def create(
+ self,
+ *,
+ metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
+ request_options: typing.Optional[RequestOptions] = None,
+ ) -> AsyncHttpResponse[BatchSummary]:
+ """
+ Create a draft batch that can be filled with graph episodes and thread messages.
+
+ Parameters
+ ----------
+ metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ AsyncHttpResponse[BatchSummary]
+ Created batch
+ """
+ _response = await self._client_wrapper.httpx_client.request(
+ "batches",
+ method="POST",
+ json={
+ "metadata": metadata,
+ },
+ headers={
+ "content-type": "application/json",
+ },
+ request_options=request_options,
+ omit=OMIT,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ _data = typing.cast(
+ BatchSummary,
+ parse_obj_as(
+ type_=BatchSummary, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ return AsyncHttpResponse(response=_response, data=_data)
+ if _response.status_code == 400:
+ raise BadRequestError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ typing.Optional[typing.Any],
+ parse_obj_as(
+ type_=typing.Optional[typing.Any], # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 403:
+ raise ForbiddenError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 500:
+ raise InternalServerError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise core_api_error_ApiError(
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response.text
+ )
+ raise core_api_error_ApiError(
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response_json
+ )
+
+ async def get(
+ self, batch_id: str, *, request_options: typing.Optional[RequestOptions] = None
+ ) -> AsyncHttpResponse[BatchSummary]:
+ """
+ Get a batch summary, including runtime progress when the batch has been processed.
+
+ Parameters
+ ----------
+ batch_id : str
+ The batch ID.
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ AsyncHttpResponse[BatchSummary]
+ Batch summary
+ """
+ _response = await self._client_wrapper.httpx_client.request(
+ f"batches/{jsonable_encoder(batch_id)}",
+ method="GET",
+ request_options=request_options,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ _data = typing.cast(
+ BatchSummary,
+ parse_obj_as(
+ type_=BatchSummary, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ return AsyncHttpResponse(response=_response, data=_data)
+ if _response.status_code == 400:
+ raise BadRequestError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ typing.Optional[typing.Any],
+ parse_obj_as(
+ type_=typing.Optional[typing.Any], # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 403:
+ raise ForbiddenError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 404:
+ raise NotFoundError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 500:
+ raise InternalServerError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise core_api_error_ApiError(
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response.text
+ )
+ raise core_api_error_ApiError(
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response_json
+ )
+
+ async def delete(
+ self, batch_id: str, *, request_options: typing.Optional[RequestOptions] = None
+ ) -> AsyncHttpResponse[SuccessResponse]:
+ """
+ Delete a draft or invalid unprocessed batch. Processed batches cannot be deleted.
+
+ Parameters
+ ----------
+ batch_id : str
+ The batch ID.
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ AsyncHttpResponse[SuccessResponse]
+ Deleted batch
+ """
+ _response = await self._client_wrapper.httpx_client.request(
+ f"batches/{jsonable_encoder(batch_id)}",
+ method="DELETE",
+ request_options=request_options,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ _data = typing.cast(
+ SuccessResponse,
+ parse_obj_as(
+ type_=SuccessResponse, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ return AsyncHttpResponse(response=_response, data=_data)
+ if _response.status_code == 400:
+ raise BadRequestError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ typing.Optional[typing.Any],
+ parse_obj_as(
+ type_=typing.Optional[typing.Any], # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 403:
+ raise ForbiddenError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 404:
+ raise NotFoundError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 409:
+ raise ConflictError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 500:
+ raise InternalServerError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise core_api_error_ApiError(
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response.text
+ )
+ raise core_api_error_ApiError(
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response_json
+ )
+
+ async def list_items(
+ self,
+ batch_id: str,
+ *,
+ limit: typing.Optional[int] = None,
+ cursor: typing.Optional[int] = None,
+ status: typing.Optional[str] = None,
+ request_options: typing.Optional[RequestOptions] = None,
+ ) -> AsyncHttpResponse[BatchItemListResponse]:
+ """
+ List items in a batch, including derived runtime status when the batch has been processed.
+
+ Parameters
+ ----------
+ batch_id : str
+ The batch ID.
+
+ limit : typing.Optional[int]
+ Maximum number of batch items to return.
+
+ cursor : typing.Optional[int]
+ Pagination cursor from a previous response.
+
+ status : typing.Optional[str]
+ Batch item status filter.
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ AsyncHttpResponse[BatchItemListResponse]
+ Batch item list
+ """
+ _response = await self._client_wrapper.httpx_client.request(
+ f"batches/{jsonable_encoder(batch_id)}/items",
+ method="GET",
+ params={
+ "limit": limit,
+ "cursor": cursor,
+ "status": status,
+ },
+ request_options=request_options,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ _data = typing.cast(
+ BatchItemListResponse,
+ parse_obj_as(
+ type_=BatchItemListResponse, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ return AsyncHttpResponse(response=_response, data=_data)
+ if _response.status_code == 400:
+ raise BadRequestError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ typing.Optional[typing.Any],
+ parse_obj_as(
+ type_=typing.Optional[typing.Any], # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 403:
+ raise ForbiddenError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 404:
+ raise NotFoundError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 500:
+ raise InternalServerError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise core_api_error_ApiError(
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response.text
+ )
+ raise core_api_error_ApiError(
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response_json
+ )
+
+ async def add(
+ self,
+ batch_id: str,
+ *,
+ items: typing.Sequence[BatchAddItem],
+ request_options: typing.Optional[RequestOptions] = None,
+ ) -> AsyncHttpResponse[typing.List[BatchItemDetail]]:
+ """
+ Add graph episodes and thread messages to a draft batch. Items are appended in request order.
+
+ Parameters
+ ----------
+ batch_id : str
+ The batch ID.
+
+ items : typing.Sequence[BatchAddItem]
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ AsyncHttpResponse[typing.List[BatchItemDetail]]
+ Added batch items
+ """
+ _response = await self._client_wrapper.httpx_client.request(
+ f"batches/{jsonable_encoder(batch_id)}/items",
+ method="POST",
+ json={
+ "items": convert_and_respect_annotation_metadata(
+ object_=items, annotation=typing.Sequence[BatchAddItem], direction="write"
+ ),
+ },
+ headers={
+ "content-type": "application/json",
+ },
+ request_options=request_options,
+ omit=OMIT,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ _data = typing.cast(
+ typing.List[BatchItemDetail],
+ parse_obj_as(
+ type_=typing.List[BatchItemDetail], # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ return AsyncHttpResponse(response=_response, data=_data)
+ if _response.status_code == 400:
+ raise BadRequestError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ typing.Optional[typing.Any],
+ parse_obj_as(
+ type_=typing.Optional[typing.Any], # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 403:
+ raise ForbiddenError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 404:
+ raise NotFoundError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 409:
+ raise ConflictError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 500:
+ raise InternalServerError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise core_api_error_ApiError(
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response.text
+ )
+ raise core_api_error_ApiError(
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response_json
+ )
+
+ async def process(
+ self, batch_id: str, *, request_options: typing.Optional[RequestOptions] = None
+ ) -> AsyncHttpResponse[BatchSummary]:
+ """
+ Start processing a filled batch. Repeated calls return the existing batch run.
+
+ Parameters
+ ----------
+ batch_id : str
+ The batch ID.
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ AsyncHttpResponse[BatchSummary]
+ Batch processing state
+ """
+ _response = await self._client_wrapper.httpx_client.request(
+ f"batches/{jsonable_encoder(batch_id)}/process",
+ method="POST",
+ request_options=request_options,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ _data = typing.cast(
+ BatchSummary,
+ parse_obj_as(
+ type_=BatchSummary, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ return AsyncHttpResponse(response=_response, data=_data)
+ if _response.status_code == 400:
+ raise BadRequestError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ typing.Optional[typing.Any],
+ parse_obj_as(
+ type_=typing.Optional[typing.Any], # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 403:
+ raise ForbiddenError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 404:
+ raise NotFoundError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 409:
+ raise ConflictError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ if _response.status_code == 500:
+ raise InternalServerError(
+ headers=dict(_response.headers),
+ body=typing.cast(
+ types_api_error_ApiError,
+ parse_obj_as(
+ type_=types_api_error_ApiError, # type: ignore
+ object_=_response.json(),
+ ),
+ ),
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise core_api_error_ApiError(
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response.text
+ )
+ raise core_api_error_ApiError(
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response_json
+ )
diff --git a/src/zep_cloud/context/raw_client.py b/src/zep_cloud/context/raw_client.py
index 800f0ac6..4fe59872 100644
--- a/src/zep_cloud/context/raw_client.py
+++ b/src/zep_cloud/context/raw_client.py
@@ -60,9 +60,9 @@ def list_context_templates(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -136,9 +136,9 @@ def create_context_template(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -201,9 +201,9 @@ def get_context_template(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -287,9 +287,9 @@ def update_context_template(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -363,9 +363,9 @@ def delete_context_template(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -441,9 +441,9 @@ async def list_context_templates(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -517,9 +517,9 @@ async def create_context_template(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -582,9 +582,9 @@ async def get_context_template(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -668,9 +668,9 @@ async def update_context_template(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -744,9 +744,9 @@ async def delete_context_template(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
diff --git a/src/zep_cloud/core/client_wrapper.py b/src/zep_cloud/core/client_wrapper.py
index b75a27ce..55159b5c 100644
--- a/src/zep_cloud/core/client_wrapper.py
+++ b/src/zep_cloud/core/client_wrapper.py
@@ -22,10 +22,10 @@ def __init__(
def get_headers(self) -> typing.Dict[str, str]:
headers: typing.Dict[str, str] = {
- "User-Agent": "zep-cloud/3.21.0",
+ "User-Agent": "zep-cloud/3.22.0",
"X-Fern-Language": "Python",
"X-Fern-SDK-Name": "zep-cloud",
- "X-Fern-SDK-Version": "3.21.0",
+ "X-Fern-SDK-Version": "3.22.0",
**(self.get_custom_headers() or {}),
}
headers["Authorization"] = f"Api-Key {self.api_key}"
diff --git a/src/zep_cloud/errors/__init__.py b/src/zep_cloud/errors/__init__.py
index 3455a818..28fad1dd 100644
--- a/src/zep_cloud/errors/__init__.py
+++ b/src/zep_cloud/errors/__init__.py
@@ -3,8 +3,9 @@
# isort: skip_file
from .bad_request_error import BadRequestError
+from .conflict_error import ConflictError
from .forbidden_error import ForbiddenError
from .internal_server_error import InternalServerError
from .not_found_error import NotFoundError
-__all__ = ["BadRequestError", "ForbiddenError", "InternalServerError", "NotFoundError"]
+__all__ = ["BadRequestError", "ConflictError", "ForbiddenError", "InternalServerError", "NotFoundError"]
diff --git a/src/zep_cloud/errors/bad_request_error.py b/src/zep_cloud/errors/bad_request_error.py
index 3cf1d9ca..baf5be4f 100644
--- a/src/zep_cloud/errors/bad_request_error.py
+++ b/src/zep_cloud/errors/bad_request_error.py
@@ -2,10 +2,9 @@
import typing
-from ..core.api_error import ApiError as core_api_error_ApiError
-from ..types.api_error import ApiError as types_api_error_ApiError
+from ..core.api_error import ApiError
-class BadRequestError(core_api_error_ApiError):
- def __init__(self, body: types_api_error_ApiError, headers: typing.Optional[typing.Dict[str, str]] = None):
+class BadRequestError(ApiError):
+ def __init__(self, body: typing.Optional[typing.Any], headers: typing.Optional[typing.Dict[str, str]] = None):
super().__init__(status_code=400, headers=headers, body=body)
diff --git a/src/zep_cloud/errors/conflict_error.py b/src/zep_cloud/errors/conflict_error.py
new file mode 100644
index 00000000..a1954586
--- /dev/null
+++ b/src/zep_cloud/errors/conflict_error.py
@@ -0,0 +1,11 @@
+# This file was auto-generated by Fern from our API Definition.
+
+import typing
+
+from ..core.api_error import ApiError as core_api_error_ApiError
+from ..types.api_error import ApiError as types_api_error_ApiError
+
+
+class ConflictError(core_api_error_ApiError):
+ def __init__(self, body: types_api_error_ApiError, headers: typing.Optional[typing.Dict[str, str]] = None):
+ super().__init__(status_code=409, headers=headers, body=body)
diff --git a/src/zep_cloud/graph/client.py b/src/zep_cloud/graph/client.py
index 3a169c45..6c7c25d5 100644
--- a/src/zep_cloud/graph/client.py
+++ b/src/zep_cloud/graph/client.py
@@ -838,16 +838,20 @@ def search(
The maximum number of facts to retrieve. Defaults to 10. Limited to 50.
max_characters : typing.Optional[int]
- Maximum total characters across all selected results when scope=auto. Defaults to 2000. Limited to 50000.
+ Maximum total characters across all selected results when scope=auto. Defaults to 2500. Limited to 50000.
mmr_lambda : typing.Optional[float]
weighting for maximal marginal relevance
reranker : typing.Optional[Reranker]
- Defaults to RRF
+ Defaults to RRF. When scope=auto, this only affects graph-service retrieval
+ shape for graph facts, observations, and thread summaries; source-episode
+ retrieval uses RRF, and auto search applies its own internal rerank after retrieval.
return_raw_results : typing.Optional[bool]
When scope=auto, include the selected raw graph results alongside the materialized context block.
+ For graph-service-backed auto mode, selected raw results may include episodes,
+ edges, nodes, observations, and thread_summaries.
scope : typing.Optional[GraphSearchScope]
Defaults to Edges.
@@ -1899,16 +1903,20 @@ async def search(
The maximum number of facts to retrieve. Defaults to 10. Limited to 50.
max_characters : typing.Optional[int]
- Maximum total characters across all selected results when scope=auto. Defaults to 2000. Limited to 50000.
+ Maximum total characters across all selected results when scope=auto. Defaults to 2500. Limited to 50000.
mmr_lambda : typing.Optional[float]
weighting for maximal marginal relevance
reranker : typing.Optional[Reranker]
- Defaults to RRF
+ Defaults to RRF. When scope=auto, this only affects graph-service retrieval
+ shape for graph facts, observations, and thread summaries; source-episode
+ retrieval uses RRF, and auto search applies its own internal rerank after retrieval.
return_raw_results : typing.Optional[bool]
When scope=auto, include the selected raw graph results alongside the materialized context block.
+ For graph-service-backed auto mode, selected raw results may include episodes,
+ edges, nodes, observations, and thread_summaries.
scope : typing.Optional[GraphSearchScope]
Defaults to Edges.
diff --git a/src/zep_cloud/graph/edge/raw_client.py b/src/zep_cloud/graph/edge/raw_client.py
index 2fc8e42a..fe651c3c 100644
--- a/src/zep_cloud/graph/edge/raw_client.py
+++ b/src/zep_cloud/graph/edge/raw_client.py
@@ -81,9 +81,9 @@ def get_by_graph_id(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -165,9 +165,9 @@ def get_by_user_id(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -228,9 +228,9 @@ def get(self, uuid_: str, *, request_options: typing.Optional[RequestOptions] =
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -304,9 +304,9 @@ def delete(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -419,9 +419,9 @@ def update(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -519,9 +519,9 @@ async def get_by_graph_id(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -603,9 +603,9 @@ async def get_by_user_id(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -668,9 +668,9 @@ async def get(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -744,9 +744,9 @@ async def delete(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -859,9 +859,9 @@ async def update(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
diff --git a/src/zep_cloud/graph/episode/raw_client.py b/src/zep_cloud/graph/episode/raw_client.py
index 20c871bc..eba044f8 100644
--- a/src/zep_cloud/graph/episode/raw_client.py
+++ b/src/zep_cloud/graph/episode/raw_client.py
@@ -75,9 +75,9 @@ def get_by_graph_id(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -150,9 +150,9 @@ def get_by_user_id(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -213,9 +213,9 @@ def get(self, uuid_: str, *, request_options: typing.Optional[RequestOptions] =
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -278,9 +278,9 @@ def delete(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -368,9 +368,9 @@ def update(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -455,9 +455,9 @@ def get_nodes_and_edges(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -535,9 +535,9 @@ async def get_by_graph_id(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -610,9 +610,9 @@ async def get_by_user_id(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -675,9 +675,9 @@ async def get(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -740,9 +740,9 @@ async def delete(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -830,9 +830,9 @@ async def update(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -917,9 +917,9 @@ async def get_nodes_and_edges(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
diff --git a/src/zep_cloud/graph/node/raw_client.py b/src/zep_cloud/graph/node/raw_client.py
index 5467a088..6e524024 100644
--- a/src/zep_cloud/graph/node/raw_client.py
+++ b/src/zep_cloud/graph/node/raw_client.py
@@ -83,9 +83,9 @@ def get_by_graph_id(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -167,9 +167,9 @@ def get_by_user_id(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -232,9 +232,9 @@ def get_edges(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -297,9 +297,9 @@ def get_episodes(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -360,9 +360,9 @@ def get(self, uuid_: str, *, request_options: typing.Optional[RequestOptions] =
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -436,9 +436,9 @@ def delete(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -541,9 +541,9 @@ def update(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -641,9 +641,9 @@ async def get_by_graph_id(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -725,9 +725,9 @@ async def get_by_user_id(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -790,9 +790,9 @@ async def get_edges(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -855,9 +855,9 @@ async def get_episodes(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -920,9 +920,9 @@ async def get(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -996,9 +996,9 @@ async def delete(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -1101,9 +1101,9 @@ async def update(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
diff --git a/src/zep_cloud/graph/observation/raw_client.py b/src/zep_cloud/graph/observation/raw_client.py
index 247868b9..11a9b68d 100644
--- a/src/zep_cloud/graph/observation/raw_client.py
+++ b/src/zep_cloud/graph/observation/raw_client.py
@@ -80,9 +80,9 @@ def get_by_graph_id(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -175,9 +175,9 @@ def get_by_user_id(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -249,9 +249,9 @@ def get(self, uuid_: str, *, request_options: typing.Optional[RequestOptions] =
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -349,9 +349,9 @@ async def get_by_graph_id(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -444,9 +444,9 @@ async def get_by_user_id(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -520,9 +520,9 @@ async def get(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
diff --git a/src/zep_cloud/graph/raw_client.py b/src/zep_cloud/graph/raw_client.py
index 276af35c..7dc1f0f6 100644
--- a/src/zep_cloud/graph/raw_client.py
+++ b/src/zep_cloud/graph/raw_client.py
@@ -94,9 +94,9 @@ def list_custom_instructions(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -192,9 +192,9 @@ def add_custom_instructions(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -288,9 +288,9 @@ def delete_custom_instructions(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -375,9 +375,9 @@ def list_entity_types(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -476,9 +476,9 @@ def set_entity_types_internal(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -588,9 +588,9 @@ def add(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -674,9 +674,9 @@ def add_batch(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -851,9 +851,9 @@ def add_fact_triple(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -941,9 +941,9 @@ def clone(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -1023,9 +1023,9 @@ def create(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -1114,9 +1114,9 @@ def list_all(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -1253,9 +1253,9 @@ def detect_patterns(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -1340,16 +1340,20 @@ def search(
The maximum number of facts to retrieve. Defaults to 10. Limited to 50.
max_characters : typing.Optional[int]
- Maximum total characters across all selected results when scope=auto. Defaults to 2000. Limited to 50000.
+ Maximum total characters across all selected results when scope=auto. Defaults to 2500. Limited to 50000.
mmr_lambda : typing.Optional[float]
weighting for maximal marginal relevance
reranker : typing.Optional[Reranker]
- Defaults to RRF
+ Defaults to RRF. When scope=auto, this only affects graph-service retrieval
+ shape for graph facts, observations, and thread summaries; source-episode
+ retrieval uses RRF, and auto search applies its own internal rerank after retrieval.
return_raw_results : typing.Optional[bool]
When scope=auto, include the selected raw graph results alongside the materialized context block.
+ For graph-service-backed auto mode, selected raw results may include episodes,
+ edges, nodes, observations, and thread_summaries.
scope : typing.Optional[GraphSearchScope]
Defaults to Edges.
@@ -1407,9 +1411,9 @@ def search(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -1535,9 +1539,9 @@ def delete(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -1628,9 +1632,9 @@ def update(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -1720,9 +1724,9 @@ async def list_custom_instructions(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -1818,9 +1822,9 @@ async def add_custom_instructions(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -1914,9 +1918,9 @@ async def delete_custom_instructions(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -2001,9 +2005,9 @@ async def list_entity_types(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -2102,9 +2106,9 @@ async def set_entity_types_internal(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -2214,9 +2218,9 @@ async def add(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -2300,9 +2304,9 @@ async def add_batch(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -2477,9 +2481,9 @@ async def add_fact_triple(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -2567,9 +2571,9 @@ async def clone(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -2649,9 +2653,9 @@ async def create(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -2740,9 +2744,9 @@ async def list_all(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -2879,9 +2883,9 @@ async def detect_patterns(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -2966,16 +2970,20 @@ async def search(
The maximum number of facts to retrieve. Defaults to 10. Limited to 50.
max_characters : typing.Optional[int]
- Maximum total characters across all selected results when scope=auto. Defaults to 2000. Limited to 50000.
+ Maximum total characters across all selected results when scope=auto. Defaults to 2500. Limited to 50000.
mmr_lambda : typing.Optional[float]
weighting for maximal marginal relevance
reranker : typing.Optional[Reranker]
- Defaults to RRF
+ Defaults to RRF. When scope=auto, this only affects graph-service retrieval
+ shape for graph facts, observations, and thread summaries; source-episode
+ retrieval uses RRF, and auto search applies its own internal rerank after retrieval.
return_raw_results : typing.Optional[bool]
When scope=auto, include the selected raw graph results alongside the materialized context block.
+ For graph-service-backed auto mode, selected raw results may include episodes,
+ edges, nodes, observations, and thread_summaries.
scope : typing.Optional[GraphSearchScope]
Defaults to Edges.
@@ -3033,9 +3041,9 @@ async def search(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -3163,9 +3171,9 @@ async def delete(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -3256,9 +3264,9 @@ async def update(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
diff --git a/src/zep_cloud/graph/thread_summary/raw_client.py b/src/zep_cloud/graph/thread_summary/raw_client.py
index 9e30f8a6..af0483f4 100644
--- a/src/zep_cloud/graph/thread_summary/raw_client.py
+++ b/src/zep_cloud/graph/thread_summary/raw_client.py
@@ -80,9 +80,9 @@ def get_by_graph_id(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -175,9 +175,9 @@ def get_by_user_id(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -275,9 +275,9 @@ async def get_by_graph_id(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -370,9 +370,9 @@ async def get_by_user_id(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
diff --git a/src/zep_cloud/project/raw_client.py b/src/zep_cloud/project/raw_client.py
index 4b74c71b..bd2e85b6 100644
--- a/src/zep_cloud/project/raw_client.py
+++ b/src/zep_cloud/project/raw_client.py
@@ -52,9 +52,9 @@ def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> Htt
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -130,9 +130,9 @@ async def get(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
diff --git a/src/zep_cloud/thread/raw_client.py b/src/zep_cloud/thread/raw_client.py
index ac1b34bb..35b17a9d 100644
--- a/src/zep_cloud/thread/raw_client.py
+++ b/src/zep_cloud/thread/raw_client.py
@@ -92,9 +92,9 @@ def list_all(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -168,9 +168,9 @@ def create(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -724,9 +724,9 @@ async def list_all(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -800,9 +800,9 @@ async def create(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
diff --git a/src/zep_cloud/types/__init__.py b/src/zep_cloud/types/__init__.py
index be720e31..d6ea2404 100644
--- a/src/zep_cloud/types/__init__.py
+++ b/src/zep_cloud/types/__init__.py
@@ -6,6 +6,17 @@
from .add_thread_messages_response import AddThreadMessagesResponse
from .add_triple_response import AddTripleResponse
from .api_error import ApiError
+from .apidata_batch_add_item_role import ApidataBatchAddItemRole
+from .apidata_batch_add_item_type import ApidataBatchAddItemType
+from .batch_add_item import BatchAddItem
+from .batch_item_detail import BatchItemDetail
+from .batch_item_kind import BatchItemKind
+from .batch_item_list_response import BatchItemListResponse
+from .batch_item_status import BatchItemStatus
+from .batch_list_response import BatchListResponse
+from .batch_progress import BatchProgress
+from .batch_status import BatchStatus
+from .batch_summary import BatchSummary
from .clone_graph_response import CloneGraphResponse
from .cluster_detect_config import ClusterDetectConfig
from .co_occurrence_detect_config import CoOccurrenceDetectConfig
@@ -77,6 +88,17 @@
"AddThreadMessagesResponse",
"AddTripleResponse",
"ApiError",
+ "ApidataBatchAddItemRole",
+ "ApidataBatchAddItemType",
+ "BatchAddItem",
+ "BatchItemDetail",
+ "BatchItemKind",
+ "BatchItemListResponse",
+ "BatchItemStatus",
+ "BatchListResponse",
+ "BatchProgress",
+ "BatchStatus",
+ "BatchSummary",
"CloneGraphResponse",
"ClusterDetectConfig",
"CoOccurrenceDetectConfig",
diff --git a/src/zep_cloud/types/apidata_batch_add_item_role.py b/src/zep_cloud/types/apidata_batch_add_item_role.py
new file mode 100644
index 00000000..4f51bde1
--- /dev/null
+++ b/src/zep_cloud/types/apidata_batch_add_item_role.py
@@ -0,0 +1,7 @@
+# This file was auto-generated by Fern from our API Definition.
+
+import typing
+
+ApidataBatchAddItemRole = typing.Union[
+ typing.Literal["norole", "system", "assistant", "user", "function", "tool"], typing.Any
+]
diff --git a/src/zep_cloud/types/apidata_batch_add_item_type.py b/src/zep_cloud/types/apidata_batch_add_item_type.py
new file mode 100644
index 00000000..f659edea
--- /dev/null
+++ b/src/zep_cloud/types/apidata_batch_add_item_type.py
@@ -0,0 +1,5 @@
+# This file was auto-generated by Fern from our API Definition.
+
+import typing
+
+ApidataBatchAddItemType = typing.Union[typing.Literal["graph_episode", "thread_message"], typing.Any]
diff --git a/src/zep_cloud/types/batch_add_item.py b/src/zep_cloud/types/batch_add_item.py
new file mode 100644
index 00000000..a95cb4ab
--- /dev/null
+++ b/src/zep_cloud/types/batch_add_item.py
@@ -0,0 +1,33 @@
+# This file was auto-generated by Fern from our API Definition.
+
+import typing
+
+import pydantic
+from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
+from .apidata_batch_add_item_role import ApidataBatchAddItemRole
+from .apidata_batch_add_item_type import ApidataBatchAddItemType
+from .graph_data_type import GraphDataType
+
+
+class BatchAddItem(UniversalBaseModel):
+ content: typing.Optional[str] = None
+ created_at: typing.Optional[str] = None
+ data: typing.Optional[str] = None
+ data_type: typing.Optional[GraphDataType] = None
+ graph_id: typing.Optional[str] = None
+ metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = None
+ name: typing.Optional[str] = None
+ role: typing.Optional[ApidataBatchAddItemRole] = None
+ source_description: typing.Optional[str] = None
+ thread_id: typing.Optional[str] = None
+ type: ApidataBatchAddItemType
+ user_id: typing.Optional[str] = None
+
+ if IS_PYDANTIC_V2:
+ model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
+ else:
+
+ class Config:
+ frozen = True
+ smart_union = True
+ extra = pydantic.Extra.allow
diff --git a/src/zep_cloud/types/batch_item_detail.py b/src/zep_cloud/types/batch_item_detail.py
new file mode 100644
index 00000000..5bc0831e
--- /dev/null
+++ b/src/zep_cloud/types/batch_item_detail.py
@@ -0,0 +1,32 @@
+# This file was auto-generated by Fern from our API Definition.
+
+import typing
+
+import pydantic
+from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
+from .batch_item_kind import BatchItemKind
+from .batch_item_status import BatchItemStatus
+
+
+class BatchItemDetail(UniversalBaseModel):
+ created_at: typing.Optional[str] = None
+ episode_uuid: typing.Optional[str] = None
+ error: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = None
+ graph_id: typing.Optional[str] = None
+ item_id: typing.Optional[str] = None
+ kind: typing.Optional[BatchItemKind] = None
+ sequence_index: typing.Optional[int] = None
+ source_uuid: typing.Optional[str] = None
+ status: typing.Optional[BatchItemStatus] = None
+ thread_id: typing.Optional[str] = None
+ updated_at: typing.Optional[str] = None
+ user_id: typing.Optional[str] = None
+
+ if IS_PYDANTIC_V2:
+ model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
+ else:
+
+ class Config:
+ frozen = True
+ smart_union = True
+ extra = pydantic.Extra.allow
diff --git a/src/zep_cloud/types/batch_item_kind.py b/src/zep_cloud/types/batch_item_kind.py
new file mode 100644
index 00000000..5d08ec0a
--- /dev/null
+++ b/src/zep_cloud/types/batch_item_kind.py
@@ -0,0 +1,5 @@
+# This file was auto-generated by Fern from our API Definition.
+
+import typing
+
+BatchItemKind = typing.Union[typing.Literal["graph_episode", "thread_message"], typing.Any]
diff --git a/src/zep_cloud/types/batch_item_list_response.py b/src/zep_cloud/types/batch_item_list_response.py
new file mode 100644
index 00000000..b082dcea
--- /dev/null
+++ b/src/zep_cloud/types/batch_item_list_response.py
@@ -0,0 +1,21 @@
+# This file was auto-generated by Fern from our API Definition.
+
+import typing
+
+import pydantic
+from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
+from .batch_item_detail import BatchItemDetail
+
+
+class BatchItemListResponse(UniversalBaseModel):
+ items: typing.Optional[typing.List[BatchItemDetail]] = None
+ next_cursor: typing.Optional[int] = None
+
+ if IS_PYDANTIC_V2:
+ model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
+ else:
+
+ class Config:
+ frozen = True
+ smart_union = True
+ extra = pydantic.Extra.allow
diff --git a/src/zep_cloud/types/batch_item_status.py b/src/zep_cloud/types/batch_item_status.py
new file mode 100644
index 00000000..4247df0a
--- /dev/null
+++ b/src/zep_cloud/types/batch_item_status.py
@@ -0,0 +1,7 @@
+# This file was auto-generated by Fern from our API Definition.
+
+import typing
+
+BatchItemStatus = typing.Union[
+ typing.Literal["pending", "queued", "processing", "succeeded", "failed", "skipped"], typing.Any
+]
diff --git a/src/zep_cloud/types/batch_list_response.py b/src/zep_cloud/types/batch_list_response.py
new file mode 100644
index 00000000..0400a33f
--- /dev/null
+++ b/src/zep_cloud/types/batch_list_response.py
@@ -0,0 +1,21 @@
+# This file was auto-generated by Fern from our API Definition.
+
+import typing
+
+import pydantic
+from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
+from .batch_summary import BatchSummary
+
+
+class BatchListResponse(UniversalBaseModel):
+ batches: typing.Optional[typing.List[BatchSummary]] = None
+ next_cursor: typing.Optional[int] = None
+
+ if IS_PYDANTIC_V2:
+ model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
+ else:
+
+ class Config:
+ frozen = True
+ smart_union = True
+ extra = pydantic.Extra.allow
diff --git a/src/zep_cloud/types/batch_progress.py b/src/zep_cloud/types/batch_progress.py
new file mode 100644
index 00000000..477331e2
--- /dev/null
+++ b/src/zep_cloud/types/batch_progress.py
@@ -0,0 +1,25 @@
+# This file was auto-generated by Fern from our API Definition.
+
+import typing
+
+import pydantic
+from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
+
+
+class BatchProgress(UniversalBaseModel):
+ failed_items: typing.Optional[int] = None
+ percent_complete: typing.Optional[float] = None
+ processing_items: typing.Optional[int] = None
+ queued_items: typing.Optional[int] = None
+ skipped_items: typing.Optional[int] = None
+ succeeded_items: typing.Optional[int] = None
+ total_items: typing.Optional[int] = None
+
+ if IS_PYDANTIC_V2:
+ model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
+ else:
+
+ class Config:
+ frozen = True
+ smart_union = True
+ extra = pydantic.Extra.allow
diff --git a/src/zep_cloud/types/batch_status.py b/src/zep_cloud/types/batch_status.py
new file mode 100644
index 00000000..400d3d56
--- /dev/null
+++ b/src/zep_cloud/types/batch_status.py
@@ -0,0 +1,7 @@
+# This file was auto-generated by Fern from our API Definition.
+
+import typing
+
+BatchStatus = typing.Union[
+ typing.Literal["draft", "invalid", "queued", "processing", "succeeded", "partial", "failed"], typing.Any
+]
diff --git a/src/zep_cloud/types/batch_summary.py b/src/zep_cloud/types/batch_summary.py
new file mode 100644
index 00000000..f0857745
--- /dev/null
+++ b/src/zep_cloud/types/batch_summary.py
@@ -0,0 +1,29 @@
+# This file was auto-generated by Fern from our API Definition.
+
+import typing
+
+import pydantic
+from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
+from .batch_progress import BatchProgress
+from .batch_status import BatchStatus
+
+
+class BatchSummary(UniversalBaseModel):
+ batch_id: typing.Optional[str] = None
+ completed_at: typing.Optional[str] = None
+ created_at: typing.Optional[str] = None
+ item_count: typing.Optional[int] = None
+ metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = None
+ processed_at: typing.Optional[str] = None
+ progress: typing.Optional[BatchProgress] = None
+ status: typing.Optional[BatchStatus] = None
+ updated_at: typing.Optional[str] = None
+
+ if IS_PYDANTIC_V2:
+ model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
+ else:
+
+ class Config:
+ frozen = True
+ smart_union = True
+ extra = pydantic.Extra.allow
diff --git a/src/zep_cloud/user/raw_client.py b/src/zep_cloud/user/raw_client.py
index f9ba4182..5eec6c94 100644
--- a/src/zep_cloud/user/raw_client.py
+++ b/src/zep_cloud/user/raw_client.py
@@ -71,9 +71,9 @@ def list_user_summary_instructions(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -164,9 +164,9 @@ def add_user_summary_instructions(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -255,9 +255,9 @@ def delete_user_summary_instructions(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -366,9 +366,9 @@ def add(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -457,9 +457,9 @@ def list_ordered(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -684,9 +684,9 @@ def update(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -952,9 +952,9 @@ async def list_user_summary_instructions(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -1045,9 +1045,9 @@ async def add_user_summary_instructions(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -1136,9 +1136,9 @@ async def delete_user_summary_instructions(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -1247,9 +1247,9 @@ async def add(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -1338,9 +1338,9 @@ async def list_ordered(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
@@ -1567,9 +1567,9 @@ async def update(
raise BadRequestError(
headers=dict(_response.headers),
body=typing.cast(
- types_api_error_ApiError,
+ typing.Optional[typing.Any],
parse_obj_as(
- type_=types_api_error_ApiError, # type: ignore
+ type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),