diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9c28de3543..3ad9f39206 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,12 +1,14 @@ name: CI on: push: - branches-ignore: - - 'generated' - - 'codegen/**' - - 'integrated/**' - - 'stl-preview-head/**' - - 'stl-preview-base/**' + branches: + - '**' + - '!integrated/**' + - '!stl-preview-head/**' + - '!stl-preview-base/**' + - '!generated' + - '!codegen/**' + - 'codegen/stl/**' pull_request: branches-ignore: - 'stl-preview-head/**' diff --git a/.release-please-manifest.json b/.release-please-manifest.json index fa4d75253f..3a5058bfce 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.28.0" + ".": "2.29.0" } \ No newline at end of file diff --git a/.stats.yml b/.stats.yml index 3e6307edaf..45b3b3f373 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 152 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-cb3e4451108eed58d59cff25bf77ec0dc960ec9c6f3dba68f90e7a9847c09d21.yml -openapi_spec_hash: dec6d9be64a5ba8f474a1f2a7a4fafef -config_hash: e922f01e25accd07d8fd3641c37fbd62 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-5a660d5b485aae5061d55301f7c8522654a533c7d7d9596c4da54f0e481d8880.yml +openapi_spec_hash: 50297cf7c625ec3c4bb0bc0f5a9d318a +config_hash: 96fbf82cf74a44ccd513f5acf0956ffd diff --git a/CHANGELOG.md b/CHANGELOG.md index dfc7ddca89..bfe71b83eb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,26 @@ # Changelog +## 2.29.0 (2026-03-17) + +Full Changelog: [v2.28.0...v2.29.0](https://github.com/openai/openai-python/compare/v2.28.0...v2.29.0) + +### Features + +* **api:** add /v1/videos endpoint to batches create method ([c0e7a16](https://github.com/openai/openai-python/commit/c0e7a161a996854021e9eb69ea2a60ca0d08047f)) +* **api:** add defer_loading field to ToolFunction ([3167595](https://github.com/openai/openai-python/commit/3167595432bdda2f90721901d30ad316db49323e)) +* **api:** add in and nin operators to ComparisonFilter type ([664f02b](https://github.com/openai/openai-python/commit/664f02b051af84e1ca3fa313981ec72fdea269b3)) + + +### Bug Fixes + +* **deps:** bump minimum typing-extensions version ([a2fb2ca](https://github.com/openai/openai-python/commit/a2fb2ca55142c6658a18be7bd1392a01f5a83f35)) +* **pydantic:** do not pass `by_alias` unless set ([8ebe8fb](https://github.com/openai/openai-python/commit/8ebe8fbcb011c6a005a715cae50c6400a8596ee0)) + + +### Chores + +* **internal:** tweak CI branches ([96ccc3c](https://github.com/openai/openai-python/commit/96ccc3cca35645fd3140f99b0fc8e55545065212)) + ## 2.28.0 (2026-03-13) Full Changelog: [v2.27.0...v2.28.0](https://github.com/openai/openai-python/compare/v2.27.0...v2.28.0) diff --git a/pyproject.toml b/pyproject.toml index ea82be70c4..46a72007df 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "openai" -version = "2.28.0" +version = "2.29.0" description = "The official Python library for the openai API" dynamic = ["readme"] license = "Apache-2.0" @@ -11,7 +11,7 @@ authors = [ dependencies = [ "httpx>=0.23.0, <1", "pydantic>=1.9.0, <3", - "typing-extensions>=4.11, <5", + "typing-extensions>=4.11, <5", "typing-extensions>=4.14, <5", "anyio>=3.5.0, <5", "distro>=1.7.0, <2", "sniffio", diff --git a/src/openai/_compat.py b/src/openai/_compat.py index 020ffeb2ca..340c91a6d0 100644 --- a/src/openai/_compat.py +++ b/src/openai/_compat.py @@ -2,7 +2,7 @@ from typing import TYPE_CHECKING, Any, Union, Generic, TypeVar, Callable, cast, overload from datetime import date, datetime -from typing_extensions import Self, Literal +from typing_extensions import Self, Literal, TypedDict import pydantic from pydantic.fields import FieldInfo @@ -131,6 +131,10 @@ def model_json(model: pydantic.BaseModel, *, indent: int | None = None) -> str: return model.model_dump_json(indent=indent) +class _ModelDumpKwargs(TypedDict, total=False): + by_alias: bool + + def model_dump( model: pydantic.BaseModel, *, @@ -142,6 +146,9 @@ def model_dump( by_alias: bool | None = None, ) -> dict[str, Any]: if (not PYDANTIC_V1) or hasattr(model, "model_dump"): + kwargs: _ModelDumpKwargs = {} + if by_alias is not None: + kwargs["by_alias"] = by_alias return model.model_dump( mode=mode, exclude=exclude, @@ -149,7 +156,7 @@ def model_dump( exclude_defaults=exclude_defaults, # warnings are not supported in Pydantic v1 warnings=True if PYDANTIC_V1 else warnings, - by_alias=by_alias, + **kwargs, ) return cast( "dict[str, Any]", diff --git a/src/openai/_version.py b/src/openai/_version.py index 45ae8eb37a..b8a1b37e13 100644 --- a/src/openai/_version.py +++ b/src/openai/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. __title__ = "openai" -__version__ = "2.28.0" # x-release-please-version +__version__ = "2.29.0" # x-release-please-version diff --git a/src/openai/resources/batches.py b/src/openai/resources/batches.py index 005a32870e..c269b93e2c 100644 --- a/src/openai/resources/batches.py +++ b/src/openai/resources/batches.py @@ -56,6 +56,7 @@ def create( "/v1/moderations", "/v1/images/generations", "/v1/images/edits", + "/v1/videos", ], input_file_id: str, metadata: Optional[Metadata] | Omit = omit, @@ -76,9 +77,10 @@ def create( endpoint: The endpoint to be used for all requests in the batch. Currently `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, `/v1/completions`, - `/v1/moderations`, `/v1/images/generations`, and `/v1/images/edits` are - supported. Note that `/v1/embeddings` batches are also restricted to a maximum - of 50,000 embedding inputs across all requests in the batch. + `/v1/moderations`, `/v1/images/generations`, `/v1/images/edits`, and + `/v1/videos` are supported. Note that `/v1/embeddings` batches are also + restricted to a maximum of 50,000 embedding inputs across all requests in the + batch. input_file_id: The ID of an uploaded file that contains requests for the new batch. @@ -282,6 +284,7 @@ async def create( "/v1/moderations", "/v1/images/generations", "/v1/images/edits", + "/v1/videos", ], input_file_id: str, metadata: Optional[Metadata] | Omit = omit, @@ -302,9 +305,10 @@ async def create( endpoint: The endpoint to be used for all requests in the batch. Currently `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, `/v1/completions`, - `/v1/moderations`, `/v1/images/generations`, and `/v1/images/edits` are - supported. Note that `/v1/embeddings` batches are also restricted to a maximum - of 50,000 embedding inputs across all requests in the batch. + `/v1/moderations`, `/v1/images/generations`, `/v1/images/edits`, and + `/v1/videos` are supported. Note that `/v1/embeddings` batches are also + restricted to a maximum of 50,000 embedding inputs across all requests in the + batch. input_file_id: The ID of an uploaded file that contains requests for the new batch. diff --git a/src/openai/types/batch_create_params.py b/src/openai/types/batch_create_params.py index 1bcd48aace..97bd2c67ed 100644 --- a/src/openai/types/batch_create_params.py +++ b/src/openai/types/batch_create_params.py @@ -26,15 +26,16 @@ class BatchCreateParams(TypedDict, total=False): "/v1/moderations", "/v1/images/generations", "/v1/images/edits", + "/v1/videos", ] ] """The endpoint to be used for all requests in the batch. Currently `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, - `/v1/completions`, `/v1/moderations`, `/v1/images/generations`, and - `/v1/images/edits` are supported. Note that `/v1/embeddings` batches are also - restricted to a maximum of 50,000 embedding inputs across all requests in the - batch. + `/v1/completions`, `/v1/moderations`, `/v1/images/generations`, + `/v1/images/edits`, and `/v1/videos` are supported. Note that `/v1/embeddings` + batches are also restricted to a maximum of 50,000 embedding inputs across all + requests in the batch. """ input_file_id: Required[str] diff --git a/src/openai/types/responses/namespace_tool.py b/src/openai/types/responses/namespace_tool.py index 2c311dbe17..88f76a9732 100644 --- a/src/openai/types/responses/namespace_tool.py +++ b/src/openai/types/responses/namespace_tool.py @@ -15,6 +15,9 @@ class ToolFunction(BaseModel): type: Literal["function"] + defer_loading: Optional[bool] = None + """Whether this function should be deferred and discovered via tool search.""" + description: Optional[str] = None parameters: Optional[object] = None diff --git a/src/openai/types/responses/namespace_tool_param.py b/src/openai/types/responses/namespace_tool_param.py index 4bda2ecd83..cb1e5e17f4 100644 --- a/src/openai/types/responses/namespace_tool_param.py +++ b/src/openai/types/responses/namespace_tool_param.py @@ -15,6 +15,9 @@ class ToolFunction(TypedDict, total=False): type: Required[Literal["function"]] + defer_loading: bool + """Whether this function should be deferred and discovered via tool search.""" + description: Optional[str] parameters: Optional[object] diff --git a/src/openai/types/shared/comparison_filter.py b/src/openai/types/shared/comparison_filter.py index 852cac1738..57c26cd016 100644 --- a/src/openai/types/shared/comparison_filter.py +++ b/src/openai/types/shared/comparison_filter.py @@ -16,7 +16,7 @@ class ComparisonFilter(BaseModel): key: str """The key to compare against the value.""" - type: Literal["eq", "ne", "gt", "gte", "lt", "lte"] + type: Literal["eq", "ne", "gt", "gte", "lt", "lte", "in", "nin"] """ Specifies the comparison operator: `eq`, `ne`, `gt`, `gte`, `lt`, `lte`, `in`, `nin`. diff --git a/src/openai/types/shared_params/comparison_filter.py b/src/openai/types/shared_params/comparison_filter.py index 363688e467..005f4d1f0d 100644 --- a/src/openai/types/shared_params/comparison_filter.py +++ b/src/openai/types/shared_params/comparison_filter.py @@ -18,7 +18,7 @@ class ComparisonFilter(TypedDict, total=False): key: Required[str] """The key to compare against the value.""" - type: Required[Literal["eq", "ne", "gt", "gte", "lt", "lte"]] + type: Required[Literal["eq", "ne", "gt", "gte", "lt", "lte", "in", "nin"]] """ Specifies the comparison operator: `eq`, `ne`, `gt`, `gte`, `lt`, `lte`, `in`, `nin`.