diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e5dc504..d721fb5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -19,7 +19,7 @@ jobs: runs-on: ${{ github.repository == 'stainless-sdks/droidrun-cloud-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} if: github.event_name == 'push' || github.event.pull_request.head.repo.fork steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Install Rye run: | @@ -44,7 +44,7 @@ jobs: id-token: write runs-on: ${{ github.repository == 'stainless-sdks/droidrun-cloud-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Install Rye run: | @@ -63,7 +63,7 @@ jobs: - name: Get GitHub OIDC Token if: github.repository == 'stainless-sdks/droidrun-cloud-python' id: github-oidc - uses: actions/github-script@v6 + uses: actions/github-script@v8 with: script: core.setOutput('github_token', await core.getIDToken()); @@ -81,7 +81,7 @@ jobs: runs-on: ${{ github.repository == 'stainless-sdks/droidrun-cloud-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} if: github.event_name == 'push' || github.event.pull_request.head.repo.fork steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Install Rye run: | diff --git a/.github/workflows/publish-pypi.yml b/.github/workflows/publish-pypi.yml index 33be2b4..965bfe5 100644 --- a/.github/workflows/publish-pypi.yml +++ b/.github/workflows/publish-pypi.yml @@ -14,7 +14,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Install Rye run: | diff --git a/.github/workflows/release-doctor.yml b/.github/workflows/release-doctor.yml index 66b9372..9f5c3c4 100644 --- a/.github/workflows/release-doctor.yml +++ b/.github/workflows/release-doctor.yml @@ -12,7 +12,7 @@ jobs: if: github.repository == 'droidrun/mobilerun-sdk-python' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch' || startsWith(github.head_ref, 'release-please') || github.head_ref == 'next') steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Check release environment run: | diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 65f558e..656a2ef 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.0.0" + ".": "2.1.0" } \ No newline at end of file diff --git a/.stats.yml b/.stats.yml index 7a32e45..15d9546 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ -configured_endpoints: 49 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/droidrun%2Fdroidrun-cloud-f80ecf1ef8ff0bf85d545b660eeef8677c62d571dc692b47fc044fc82378d330.yml -openapi_spec_hash: 51d80499a2291f8d223276f759392574 -config_hash: 12fc3bd7f141a7f09f5ad38cfa42ba3d +configured_endpoints: 50 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/droidrun%2Fdroidrun-cloud-ae29ab2212af152dcd463c735f0797b4ce99d43a9535aca3da4d406c881332b1.yml +openapi_spec_hash: 46915814e9a50b6958a997551ca790c8 +config_hash: e86cf4289cfec730125313d2222d09e8 diff --git a/CHANGELOG.md b/CHANGELOG.md index ffa3740..2b2a5eb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,24 @@ # Changelog +## 2.1.0 (2026-01-24) + +Full Changelog: [v2.0.0...v2.1.0](https://github.com/droidrun/mobilerun-sdk-python/compare/v2.0.0...v2.1.0) + +### Features + +* **api:** api update ([678264c](https://github.com/droidrun/mobilerun-sdk-python/commit/678264c95364ad4fff7c70b61bd4794819f28292)) +* **api:** api update ([e42aa53](https://github.com/droidrun/mobilerun-sdk-python/commit/e42aa5309759f931ceab3eee40792e559f21d3a3)) +* **api:** api update ([866c20e](https://github.com/droidrun/mobilerun-sdk-python/commit/866c20ec6ebd177c861777fee900259186347ef0)) +* **api:** api update ([416142e](https://github.com/droidrun/mobilerun-sdk-python/commit/416142eb345a07aefea8404a97231ed0acd74678)) +* **api:** expose device count endpoint ([ab1191d](https://github.com/droidrun/mobilerun-sdk-python/commit/ab1191d28943441844e81c6c1189aebc34f54980)) +* **client:** add support for binary request streaming ([c6668af](https://github.com/droidrun/mobilerun-sdk-python/commit/c6668af5dbd83d7ab1dd1fe4f68253422e055e73)) + + +### Chores + +* **ci:** upgrade `actions/github-script` ([00033ad](https://github.com/droidrun/mobilerun-sdk-python/commit/00033ad7ffac1d3d650a05e841bbdecca964192e)) +* **internal:** update `actions/checkout` version ([75af377](https://github.com/droidrun/mobilerun-sdk-python/commit/75af377b29fca57b52843186af3e9775bfc78c13)) + ## 2.0.0 (2026-01-12) Full Changelog: [v0.1.0...v2.0.0](https://github.com/droidrun/mobilerun-sdk-python/compare/v0.1.0...v2.0.0) diff --git a/api.md b/api.md index 20d08ea..dc69b81 100644 --- a/api.md +++ b/api.md @@ -24,7 +24,7 @@ Methods: - client.tasks.attach(task_id) -> None - client.tasks.get_status(task_id) -> TaskGetStatusResponse - client.tasks.get_trajectory(task_id) -> TaskGetTrajectoryResponse -- client.tasks.run(\*\*params) -> TaskRunResponse +- client.tasks.run() -> TaskRunResponse - client.tasks.run_streamed(\*\*params) -> None - client.tasks.stop(task_id) -> TaskStopResponse @@ -59,7 +59,7 @@ Methods: Types: ```python -from mobilerun.types import Device, DeviceListResponse +from mobilerun.types import Device, DeviceListResponse, DeviceCountResponse ``` Methods: @@ -67,6 +67,7 @@ Methods: - client.devices.create(\*\*params) -> Device - client.devices.retrieve(device_id) -> Device - client.devices.list(\*\*params) -> DeviceListResponse +- client.devices.count() -> DeviceCountResponse - client.devices.terminate(device_id) -> None - client.devices.wait_ready(device_id) -> Device diff --git a/pyproject.toml b/pyproject.toml index 4874197..2ca5b27 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "mobilerun-sdk" -version = "2.0.0" +version = "2.1.0" description = "The official Python library for the mobilerun API" dynamic = ["readme"] license = "Apache-2.0" diff --git a/src/mobilerun/_base_client.py b/src/mobilerun/_base_client.py index f24e0dc..5177829 100644 --- a/src/mobilerun/_base_client.py +++ b/src/mobilerun/_base_client.py @@ -9,6 +9,7 @@ import inspect import logging import platform +import warnings import email.utils from types import TracebackType from random import random @@ -51,9 +52,11 @@ ResponseT, AnyMapping, PostParser, + BinaryTypes, RequestFiles, HttpxSendArgs, RequestOptions, + AsyncBinaryTypes, HttpxRequestFiles, ModelBuilderProtocol, not_given, @@ -477,8 +480,19 @@ def _build_request( retries_taken: int = 0, ) -> httpx.Request: if log.isEnabledFor(logging.DEBUG): - log.debug("Request options: %s", model_dump(options, exclude_unset=True)) - + log.debug( + "Request options: %s", + model_dump( + options, + exclude_unset=True, + # Pydantic v1 can't dump every type we support in content, so we exclude it for now. + exclude={ + "content", + } + if PYDANTIC_V1 + else {}, + ), + ) kwargs: dict[str, Any] = {} json_data = options.json_data @@ -532,7 +546,13 @@ def _build_request( is_body_allowed = options.method.lower() != "get" if is_body_allowed: - if isinstance(json_data, bytes): + if options.content is not None and json_data is not None: + raise TypeError("Passing both `content` and `json_data` is not supported") + if options.content is not None and files is not None: + raise TypeError("Passing both `content` and `files` is not supported") + if options.content is not None: + kwargs["content"] = options.content + elif isinstance(json_data, bytes): kwargs["content"] = json_data else: kwargs["json"] = json_data if is_given(json_data) else None @@ -1194,6 +1214,7 @@ def post( *, cast_to: Type[ResponseT], body: Body | None = None, + content: BinaryTypes | None = None, options: RequestOptions = {}, files: RequestFiles | None = None, stream: Literal[False] = False, @@ -1206,6 +1227,7 @@ def post( *, cast_to: Type[ResponseT], body: Body | None = None, + content: BinaryTypes | None = None, options: RequestOptions = {}, files: RequestFiles | None = None, stream: Literal[True], @@ -1219,6 +1241,7 @@ def post( *, cast_to: Type[ResponseT], body: Body | None = None, + content: BinaryTypes | None = None, options: RequestOptions = {}, files: RequestFiles | None = None, stream: bool, @@ -1231,13 +1254,25 @@ def post( *, cast_to: Type[ResponseT], body: Body | None = None, + content: BinaryTypes | None = None, options: RequestOptions = {}, files: RequestFiles | None = None, stream: bool = False, stream_cls: type[_StreamT] | None = None, ) -> ResponseT | _StreamT: + if body is not None and content is not None: + raise TypeError("Passing both `body` and `content` is not supported") + if files is not None and content is not None: + raise TypeError("Passing both `files` and `content` is not supported") + if isinstance(body, bytes): + warnings.warn( + "Passing raw bytes as `body` is deprecated and will be removed in a future version. " + "Please pass raw bytes via the `content` parameter instead.", + DeprecationWarning, + stacklevel=2, + ) opts = FinalRequestOptions.construct( - method="post", url=path, json_data=body, files=to_httpx_files(files), **options + method="post", url=path, json_data=body, content=content, files=to_httpx_files(files), **options ) return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)) @@ -1247,11 +1282,23 @@ def patch( *, cast_to: Type[ResponseT], body: Body | None = None, + content: BinaryTypes | None = None, files: RequestFiles | None = None, options: RequestOptions = {}, ) -> ResponseT: + if body is not None and content is not None: + raise TypeError("Passing both `body` and `content` is not supported") + if files is not None and content is not None: + raise TypeError("Passing both `files` and `content` is not supported") + if isinstance(body, bytes): + warnings.warn( + "Passing raw bytes as `body` is deprecated and will be removed in a future version. " + "Please pass raw bytes via the `content` parameter instead.", + DeprecationWarning, + stacklevel=2, + ) opts = FinalRequestOptions.construct( - method="patch", url=path, json_data=body, files=to_httpx_files(files), **options + method="patch", url=path, json_data=body, content=content, files=to_httpx_files(files), **options ) return self.request(cast_to, opts) @@ -1261,11 +1308,23 @@ def put( *, cast_to: Type[ResponseT], body: Body | None = None, + content: BinaryTypes | None = None, files: RequestFiles | None = None, options: RequestOptions = {}, ) -> ResponseT: + if body is not None and content is not None: + raise TypeError("Passing both `body` and `content` is not supported") + if files is not None and content is not None: + raise TypeError("Passing both `files` and `content` is not supported") + if isinstance(body, bytes): + warnings.warn( + "Passing raw bytes as `body` is deprecated and will be removed in a future version. " + "Please pass raw bytes via the `content` parameter instead.", + DeprecationWarning, + stacklevel=2, + ) opts = FinalRequestOptions.construct( - method="put", url=path, json_data=body, files=to_httpx_files(files), **options + method="put", url=path, json_data=body, content=content, files=to_httpx_files(files), **options ) return self.request(cast_to, opts) @@ -1275,9 +1334,19 @@ def delete( *, cast_to: Type[ResponseT], body: Body | None = None, + content: BinaryTypes | None = None, options: RequestOptions = {}, ) -> ResponseT: - opts = FinalRequestOptions.construct(method="delete", url=path, json_data=body, **options) + if body is not None and content is not None: + raise TypeError("Passing both `body` and `content` is not supported") + if isinstance(body, bytes): + warnings.warn( + "Passing raw bytes as `body` is deprecated and will be removed in a future version. " + "Please pass raw bytes via the `content` parameter instead.", + DeprecationWarning, + stacklevel=2, + ) + opts = FinalRequestOptions.construct(method="delete", url=path, json_data=body, content=content, **options) return self.request(cast_to, opts) def get_api_list( @@ -1717,6 +1786,7 @@ async def post( *, cast_to: Type[ResponseT], body: Body | None = None, + content: AsyncBinaryTypes | None = None, files: RequestFiles | None = None, options: RequestOptions = {}, stream: Literal[False] = False, @@ -1729,6 +1799,7 @@ async def post( *, cast_to: Type[ResponseT], body: Body | None = None, + content: AsyncBinaryTypes | None = None, files: RequestFiles | None = None, options: RequestOptions = {}, stream: Literal[True], @@ -1742,6 +1813,7 @@ async def post( *, cast_to: Type[ResponseT], body: Body | None = None, + content: AsyncBinaryTypes | None = None, files: RequestFiles | None = None, options: RequestOptions = {}, stream: bool, @@ -1754,13 +1826,25 @@ async def post( *, cast_to: Type[ResponseT], body: Body | None = None, + content: AsyncBinaryTypes | None = None, files: RequestFiles | None = None, options: RequestOptions = {}, stream: bool = False, stream_cls: type[_AsyncStreamT] | None = None, ) -> ResponseT | _AsyncStreamT: + if body is not None and content is not None: + raise TypeError("Passing both `body` and `content` is not supported") + if files is not None and content is not None: + raise TypeError("Passing both `files` and `content` is not supported") + if isinstance(body, bytes): + warnings.warn( + "Passing raw bytes as `body` is deprecated and will be removed in a future version. " + "Please pass raw bytes via the `content` parameter instead.", + DeprecationWarning, + stacklevel=2, + ) opts = FinalRequestOptions.construct( - method="post", url=path, json_data=body, files=await async_to_httpx_files(files), **options + method="post", url=path, json_data=body, content=content, files=await async_to_httpx_files(files), **options ) return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls) @@ -1770,11 +1854,28 @@ async def patch( *, cast_to: Type[ResponseT], body: Body | None = None, + content: AsyncBinaryTypes | None = None, files: RequestFiles | None = None, options: RequestOptions = {}, ) -> ResponseT: + if body is not None and content is not None: + raise TypeError("Passing both `body` and `content` is not supported") + if files is not None and content is not None: + raise TypeError("Passing both `files` and `content` is not supported") + if isinstance(body, bytes): + warnings.warn( + "Passing raw bytes as `body` is deprecated and will be removed in a future version. " + "Please pass raw bytes via the `content` parameter instead.", + DeprecationWarning, + stacklevel=2, + ) opts = FinalRequestOptions.construct( - method="patch", url=path, json_data=body, files=await async_to_httpx_files(files), **options + method="patch", + url=path, + json_data=body, + content=content, + files=await async_to_httpx_files(files), + **options, ) return await self.request(cast_to, opts) @@ -1784,11 +1885,23 @@ async def put( *, cast_to: Type[ResponseT], body: Body | None = None, + content: AsyncBinaryTypes | None = None, files: RequestFiles | None = None, options: RequestOptions = {}, ) -> ResponseT: + if body is not None and content is not None: + raise TypeError("Passing both `body` and `content` is not supported") + if files is not None and content is not None: + raise TypeError("Passing both `files` and `content` is not supported") + if isinstance(body, bytes): + warnings.warn( + "Passing raw bytes as `body` is deprecated and will be removed in a future version. " + "Please pass raw bytes via the `content` parameter instead.", + DeprecationWarning, + stacklevel=2, + ) opts = FinalRequestOptions.construct( - method="put", url=path, json_data=body, files=await async_to_httpx_files(files), **options + method="put", url=path, json_data=body, content=content, files=await async_to_httpx_files(files), **options ) return await self.request(cast_to, opts) @@ -1798,9 +1911,19 @@ async def delete( *, cast_to: Type[ResponseT], body: Body | None = None, + content: AsyncBinaryTypes | None = None, options: RequestOptions = {}, ) -> ResponseT: - opts = FinalRequestOptions.construct(method="delete", url=path, json_data=body, **options) + if body is not None and content is not None: + raise TypeError("Passing both `body` and `content` is not supported") + if isinstance(body, bytes): + warnings.warn( + "Passing raw bytes as `body` is deprecated and will be removed in a future version. " + "Please pass raw bytes via the `content` parameter instead.", + DeprecationWarning, + stacklevel=2, + ) + opts = FinalRequestOptions.construct(method="delete", url=path, json_data=body, content=content, **options) return await self.request(cast_to, opts) def get_api_list( diff --git a/src/mobilerun/_models.py b/src/mobilerun/_models.py index ca9500b..29070e0 100644 --- a/src/mobilerun/_models.py +++ b/src/mobilerun/_models.py @@ -3,7 +3,20 @@ import os import inspect import weakref -from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, Callable, Optional, cast +from typing import ( + IO, + TYPE_CHECKING, + Any, + Type, + Union, + Generic, + TypeVar, + Callable, + Iterable, + Optional, + AsyncIterable, + cast, +) from datetime import date, datetime from typing_extensions import ( List, @@ -787,6 +800,7 @@ class FinalRequestOptionsInput(TypedDict, total=False): timeout: float | Timeout | None files: HttpxRequestFiles | None idempotency_key: str + content: Union[bytes, bytearray, IO[bytes], Iterable[bytes], AsyncIterable[bytes], None] json_data: Body extra_json: AnyMapping follow_redirects: bool @@ -805,6 +819,7 @@ class FinalRequestOptions(pydantic.BaseModel): post_parser: Union[Callable[[Any], Any], NotGiven] = NotGiven() follow_redirects: Union[bool, None] = None + content: Union[bytes, bytearray, IO[bytes], Iterable[bytes], AsyncIterable[bytes], None] = None # It should be noted that we cannot use `json` here as that would override # a BaseModel method in an incompatible fashion. json_data: Union[Body, None] = None diff --git a/src/mobilerun/_types.py b/src/mobilerun/_types.py index f9f729e..6fa6541 100644 --- a/src/mobilerun/_types.py +++ b/src/mobilerun/_types.py @@ -13,9 +13,11 @@ Mapping, TypeVar, Callable, + Iterable, Iterator, Optional, Sequence, + AsyncIterable, ) from typing_extensions import ( Set, @@ -56,6 +58,13 @@ else: Base64FileInput = Union[IO[bytes], PathLike] FileContent = Union[IO[bytes], bytes, PathLike] # PathLike is not subscriptable in Python 3.8. + + +# Used for sending raw binary data / streaming data in request bodies +# e.g. for file uploads without multipart encoding +BinaryTypes = Union[bytes, bytearray, IO[bytes], Iterable[bytes]] +AsyncBinaryTypes = Union[bytes, bytearray, IO[bytes], AsyncIterable[bytes]] + FileTypes = Union[ # file (or bytes) FileContent, diff --git a/src/mobilerun/_version.py b/src/mobilerun/_version.py index ed7458a..f24f62b 100644 --- a/src/mobilerun/_version.py +++ b/src/mobilerun/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. __title__ = "mobilerun" -__version__ = "2.0.0" # x-release-please-version +__version__ = "2.1.0" # x-release-please-version diff --git a/src/mobilerun/resources/devices/devices.py b/src/mobilerun/resources/devices/devices.py index bc1dbe8..63be619 100644 --- a/src/mobilerun/resources/devices/devices.py +++ b/src/mobilerun/resources/devices/devices.py @@ -69,6 +69,7 @@ from ..._base_client import make_request_options from ...types.device import Device from ...types.device_list_response import DeviceListResponse +from ...types.device_count_response import DeviceCountResponse __all__ = ["DevicesResource", "AsyncDevicesResource"] @@ -263,6 +264,25 @@ def list( cast_to=DeviceListResponse, ) + def count( + self, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> DeviceCountResponse: + """Count claimed devices""" + return self._get( + "/devices/count", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=DeviceCountResponse, + ) + def terminate( self, device_id: str, @@ -521,6 +541,25 @@ async def list( cast_to=DeviceListResponse, ) + async def count( + self, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> DeviceCountResponse: + """Count claimed devices""" + return await self._get( + "/devices/count", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=DeviceCountResponse, + ) + async def terminate( self, device_id: str, @@ -602,6 +641,9 @@ def __init__(self, devices: DevicesResource) -> None: self.list = to_raw_response_wrapper( devices.list, ) + self.count = to_raw_response_wrapper( + devices.count, + ) self.terminate = to_raw_response_wrapper( devices.terminate, ) @@ -647,6 +689,9 @@ def __init__(self, devices: AsyncDevicesResource) -> None: self.list = async_to_raw_response_wrapper( devices.list, ) + self.count = async_to_raw_response_wrapper( + devices.count, + ) self.terminate = async_to_raw_response_wrapper( devices.terminate, ) @@ -692,6 +737,9 @@ def __init__(self, devices: DevicesResource) -> None: self.list = to_streamed_response_wrapper( devices.list, ) + self.count = to_streamed_response_wrapper( + devices.count, + ) self.terminate = to_streamed_response_wrapper( devices.terminate, ) @@ -737,6 +785,9 @@ def __init__(self, devices: AsyncDevicesResource) -> None: self.list = async_to_streamed_response_wrapper( devices.list, ) + self.count = async_to_streamed_response_wrapper( + devices.count, + ) self.terminate = async_to_streamed_response_wrapper( devices.terminate, ) diff --git a/src/mobilerun/resources/tasks/tasks.py b/src/mobilerun/resources/tasks/tasks.py index 73debb6..be44133 100644 --- a/src/mobilerun/resources/tasks/tasks.py +++ b/src/mobilerun/resources/tasks/tasks.py @@ -7,7 +7,7 @@ import httpx -from ...types import LlmModel, TaskStatus, task_run_params, task_list_params, task_run_streamed_params +from ...types import LlmModel, TaskStatus, task_list_params, task_run_streamed_params from ..._types import Body, Omit, Query, Headers, NoneType, NotGiven, SequenceNotStr, omit, not_given from ..._utils import maybe_transform, async_maybe_transform from ..._compat import cached_property @@ -270,20 +270,6 @@ def get_trajectory( def run( self, *, - llm_model: LlmModel, - task: str, - apps: SequenceNotStr[str] | Omit = omit, - credentials: Iterable[task_run_params.Credential] | Omit = omit, - device_id: Optional[str] | Omit = omit, - display_id: int | Omit = omit, - execution_timeout: int | Omit = omit, - files: SequenceNotStr[str] | Omit = omit, - max_steps: int | Omit = omit, - output_schema: Optional[Dict[str, object]] | Omit = omit, - reasoning: bool | Omit = omit, - temperature: float | Omit = omit, - vision: bool | Omit = omit, - vpn_country: Optional[Literal["US", "BR", "FR", "DE", "IN", "JP", "KR", "ZA"]] | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -291,43 +277,9 @@ def run( extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> TaskRunResponse: - """ - Run Task - - Args: - device_id: The ID of the device to run the task on. - - display_id: The display ID of the device to run the task on. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ + """Run Task""" return self._post( "/tasks/", - body=maybe_transform( - { - "llm_model": llm_model, - "task": task, - "apps": apps, - "credentials": credentials, - "device_id": device_id, - "display_id": display_id, - "execution_timeout": execution_timeout, - "files": files, - "max_steps": max_steps, - "output_schema": output_schema, - "reasoning": reasoning, - "temperature": temperature, - "vision": vision, - "vpn_country": vpn_country, - }, - task_run_params.TaskRunParams, - ), options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -659,20 +611,6 @@ async def get_trajectory( async def run( self, *, - llm_model: LlmModel, - task: str, - apps: SequenceNotStr[str] | Omit = omit, - credentials: Iterable[task_run_params.Credential] | Omit = omit, - device_id: Optional[str] | Omit = omit, - display_id: int | Omit = omit, - execution_timeout: int | Omit = omit, - files: SequenceNotStr[str] | Omit = omit, - max_steps: int | Omit = omit, - output_schema: Optional[Dict[str, object]] | Omit = omit, - reasoning: bool | Omit = omit, - temperature: float | Omit = omit, - vision: bool | Omit = omit, - vpn_country: Optional[Literal["US", "BR", "FR", "DE", "IN", "JP", "KR", "ZA"]] | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -680,43 +618,9 @@ async def run( extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> TaskRunResponse: - """ - Run Task - - Args: - device_id: The ID of the device to run the task on. - - display_id: The display ID of the device to run the task on. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ + """Run Task""" return await self._post( "/tasks/", - body=await async_maybe_transform( - { - "llm_model": llm_model, - "task": task, - "apps": apps, - "credentials": credentials, - "device_id": device_id, - "display_id": display_id, - "execution_timeout": execution_timeout, - "files": files, - "max_steps": max_steps, - "output_schema": output_schema, - "reasoning": reasoning, - "temperature": temperature, - "vision": vision, - "vpn_country": vpn_country, - }, - task_run_params.TaskRunParams, - ), options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), diff --git a/src/mobilerun/types/__init__.py b/src/mobilerun/types/__init__.py index e5925bb..59932b1 100644 --- a/src/mobilerun/types/__init__.py +++ b/src/mobilerun/types/__init__.py @@ -7,7 +7,6 @@ from .llm_model import LlmModel as LlmModel from .task_status import TaskStatus as TaskStatus from .app_list_params import AppListParams as AppListParams -from .task_run_params import TaskRunParams as TaskRunParams from .hook_list_params import HookListParams as HookListParams from .task_list_params import TaskListParams as TaskListParams from .app_list_response import AppListResponse as AppListResponse @@ -20,6 +19,7 @@ from .device_create_params import DeviceCreateParams as DeviceCreateParams from .device_list_response import DeviceListResponse as DeviceListResponse from .hook_update_response import HookUpdateResponse as HookUpdateResponse +from .device_count_response import DeviceCountResponse as DeviceCountResponse from .hook_perform_response import HookPerformResponse as HookPerformResponse from .hook_subscribe_params import HookSubscribeParams as HookSubscribeParams from .hook_retrieve_response import HookRetrieveResponse as HookRetrieveResponse diff --git a/src/mobilerun/types/device_count_response.py b/src/mobilerun/types/device_count_response.py new file mode 100644 index 0000000..517cdeb --- /dev/null +++ b/src/mobilerun/types/device_count_response.py @@ -0,0 +1,22 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional + +from pydantic import Field as FieldInfo + +from .._models import BaseModel + +__all__ = ["DeviceCountResponse"] + + +class DeviceCountResponse(BaseModel): + limrun: int + + personal: int + + remote: int + + roidrun: int + + schema_: Optional[str] = FieldInfo(alias="$schema", default=None) + """A URL to the JSON Schema for this object.""" diff --git a/src/mobilerun/types/llm_model.py b/src/mobilerun/types/llm_model.py index b42bc72..f0a0dca 100644 --- a/src/mobilerun/types/llm_model.py +++ b/src/mobilerun/types/llm_model.py @@ -5,9 +5,11 @@ __all__ = ["LlmModel"] LlmModel: TypeAlias = Literal[ - "openai/gpt-5", + "openai/gpt-5.1", + "openai/gpt-5.2", "google/gemini-2.5-flash", "google/gemini-2.5-pro", + "google/gemini-3-flash", "google/gemini-3-pro-preview", "anthropic/claude-sonnet-4.5", "minimax/minimax-m2", diff --git a/src/mobilerun/types/task_run_params.py b/src/mobilerun/types/task_run_params.py deleted file mode 100644 index 81389c6..0000000 --- a/src/mobilerun/types/task_run_params.py +++ /dev/null @@ -1,52 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Dict, Iterable, Optional -from typing_extensions import Literal, Required, Annotated, TypedDict - -from .._types import SequenceNotStr -from .._utils import PropertyInfo -from .llm_model import LlmModel - -__all__ = ["TaskRunParams", "Credential"] - - -class TaskRunParams(TypedDict, total=False): - llm_model: Required[Annotated[LlmModel, PropertyInfo(alias="llmModel")]] - - task: Required[str] - - apps: SequenceNotStr[str] - - credentials: Iterable[Credential] - - device_id: Annotated[Optional[str], PropertyInfo(alias="deviceId")] - """The ID of the device to run the task on.""" - - display_id: Annotated[int, PropertyInfo(alias="displayId")] - """The display ID of the device to run the task on.""" - - execution_timeout: Annotated[int, PropertyInfo(alias="executionTimeout")] - - files: SequenceNotStr[str] - - max_steps: Annotated[int, PropertyInfo(alias="maxSteps")] - - output_schema: Annotated[Optional[Dict[str, object]], PropertyInfo(alias="outputSchema")] - - reasoning: bool - - temperature: float - - vision: bool - - vpn_country: Annotated[ - Optional[Literal["US", "BR", "FR", "DE", "IN", "JP", "KR", "ZA"]], PropertyInfo(alias="vpnCountry") - ] - - -class Credential(TypedDict, total=False): - credential_names: Required[Annotated[SequenceNotStr[str], PropertyInfo(alias="credentialNames")]] - - package_name: Required[Annotated[str, PropertyInfo(alias="packageName")]] diff --git a/tests/api_resources/test_devices.py b/tests/api_resources/test_devices.py index 0f3babb..f762064 100644 --- a/tests/api_resources/test_devices.py +++ b/tests/api_resources/test_devices.py @@ -9,7 +9,7 @@ from mobilerun import Mobilerun, AsyncMobilerun from tests.utils import assert_matches_type -from mobilerun.types import Device, DeviceListResponse +from mobilerun.types import Device, DeviceListResponse, DeviceCountResponse base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") @@ -150,6 +150,34 @@ def test_streaming_response_list(self, client: Mobilerun) -> None: assert cast(Any, response.is_closed) is True + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_count(self, client: Mobilerun) -> None: + device = client.devices.count() + assert_matches_type(DeviceCountResponse, device, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_count(self, client: Mobilerun) -> None: + response = client.devices.with_raw_response.count() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + device = response.parse() + assert_matches_type(DeviceCountResponse, device, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_count(self, client: Mobilerun) -> None: + with client.devices.with_streaming_response.count() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + device = response.parse() + assert_matches_type(DeviceCountResponse, device, path=["response"]) + + assert cast(Any, response.is_closed) is True + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_terminate(self, client: Mobilerun) -> None: @@ -373,6 +401,34 @@ async def test_streaming_response_list(self, async_client: AsyncMobilerun) -> No assert cast(Any, response.is_closed) is True + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_count(self, async_client: AsyncMobilerun) -> None: + device = await async_client.devices.count() + assert_matches_type(DeviceCountResponse, device, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_count(self, async_client: AsyncMobilerun) -> None: + response = await async_client.devices.with_raw_response.count() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + device = await response.parse() + assert_matches_type(DeviceCountResponse, device, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_count(self, async_client: AsyncMobilerun) -> None: + async with async_client.devices.with_streaming_response.count() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + device = await response.parse() + assert_matches_type(DeviceCountResponse, device, path=["response"]) + + assert cast(Any, response.is_closed) is True + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_terminate(self, async_client: AsyncMobilerun) -> None: diff --git a/tests/api_resources/test_tasks.py b/tests/api_resources/test_tasks.py index d14af58..eede77a 100644 --- a/tests/api_resources/test_tasks.py +++ b/tests/api_resources/test_tasks.py @@ -236,45 +236,13 @@ def test_path_params_get_trajectory(self, client: Mobilerun) -> None: @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_run(self, client: Mobilerun) -> None: - task = client.tasks.run( - llm_model="openai/gpt-5", - task="x", - ) - assert_matches_type(TaskRunResponse, task, path=["response"]) - - @pytest.mark.skip(reason="Prism tests are disabled") - @parametrize - def test_method_run_with_all_params(self, client: Mobilerun) -> None: - task = client.tasks.run( - llm_model="openai/gpt-5", - task="x", - apps=["string"], - credentials=[ - { - "credential_names": ["string"], - "package_name": "packageName", - } - ], - device_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", - display_id=0, - execution_timeout=0, - files=["string"], - max_steps=0, - output_schema={"foo": "bar"}, - reasoning=True, - temperature=0, - vision=True, - vpn_country="US", - ) + task = client.tasks.run() assert_matches_type(TaskRunResponse, task, path=["response"]) @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_run(self, client: Mobilerun) -> None: - response = client.tasks.with_raw_response.run( - llm_model="openai/gpt-5", - task="x", - ) + response = client.tasks.with_raw_response.run() assert response.is_closed is True assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -284,10 +252,7 @@ def test_raw_response_run(self, client: Mobilerun) -> None: @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_run(self, client: Mobilerun) -> None: - with client.tasks.with_streaming_response.run( - llm_model="openai/gpt-5", - task="x", - ) as response: + with client.tasks.with_streaming_response.run() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -300,7 +265,7 @@ def test_streaming_response_run(self, client: Mobilerun) -> None: @parametrize def test_method_run_streamed(self, client: Mobilerun) -> None: task = client.tasks.run_streamed( - llm_model="openai/gpt-5", + llm_model="openai/gpt-5.1", task="x", ) assert task is None @@ -309,7 +274,7 @@ def test_method_run_streamed(self, client: Mobilerun) -> None: @parametrize def test_method_run_streamed_with_all_params(self, client: Mobilerun) -> None: task = client.tasks.run_streamed( - llm_model="openai/gpt-5", + llm_model="openai/gpt-5.1", task="x", apps=["string"], credentials=[ @@ -335,7 +300,7 @@ def test_method_run_streamed_with_all_params(self, client: Mobilerun) -> None: @parametrize def test_raw_response_run_streamed(self, client: Mobilerun) -> None: response = client.tasks.with_raw_response.run_streamed( - llm_model="openai/gpt-5", + llm_model="openai/gpt-5.1", task="x", ) @@ -348,7 +313,7 @@ def test_raw_response_run_streamed(self, client: Mobilerun) -> None: @parametrize def test_streaming_response_run_streamed(self, client: Mobilerun) -> None: with client.tasks.with_streaming_response.run_streamed( - llm_model="openai/gpt-5", + llm_model="openai/gpt-5.1", task="x", ) as response: assert not response.is_closed @@ -619,45 +584,13 @@ async def test_path_params_get_trajectory(self, async_client: AsyncMobilerun) -> @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_run(self, async_client: AsyncMobilerun) -> None: - task = await async_client.tasks.run( - llm_model="openai/gpt-5", - task="x", - ) - assert_matches_type(TaskRunResponse, task, path=["response"]) - - @pytest.mark.skip(reason="Prism tests are disabled") - @parametrize - async def test_method_run_with_all_params(self, async_client: AsyncMobilerun) -> None: - task = await async_client.tasks.run( - llm_model="openai/gpt-5", - task="x", - apps=["string"], - credentials=[ - { - "credential_names": ["string"], - "package_name": "packageName", - } - ], - device_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", - display_id=0, - execution_timeout=0, - files=["string"], - max_steps=0, - output_schema={"foo": "bar"}, - reasoning=True, - temperature=0, - vision=True, - vpn_country="US", - ) + task = await async_client.tasks.run() assert_matches_type(TaskRunResponse, task, path=["response"]) @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_run(self, async_client: AsyncMobilerun) -> None: - response = await async_client.tasks.with_raw_response.run( - llm_model="openai/gpt-5", - task="x", - ) + response = await async_client.tasks.with_raw_response.run() assert response.is_closed is True assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -667,10 +600,7 @@ async def test_raw_response_run(self, async_client: AsyncMobilerun) -> None: @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_run(self, async_client: AsyncMobilerun) -> None: - async with async_client.tasks.with_streaming_response.run( - llm_model="openai/gpt-5", - task="x", - ) as response: + async with async_client.tasks.with_streaming_response.run() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -683,7 +613,7 @@ async def test_streaming_response_run(self, async_client: AsyncMobilerun) -> Non @parametrize async def test_method_run_streamed(self, async_client: AsyncMobilerun) -> None: task = await async_client.tasks.run_streamed( - llm_model="openai/gpt-5", + llm_model="openai/gpt-5.1", task="x", ) assert task is None @@ -692,7 +622,7 @@ async def test_method_run_streamed(self, async_client: AsyncMobilerun) -> None: @parametrize async def test_method_run_streamed_with_all_params(self, async_client: AsyncMobilerun) -> None: task = await async_client.tasks.run_streamed( - llm_model="openai/gpt-5", + llm_model="openai/gpt-5.1", task="x", apps=["string"], credentials=[ @@ -718,7 +648,7 @@ async def test_method_run_streamed_with_all_params(self, async_client: AsyncMobi @parametrize async def test_raw_response_run_streamed(self, async_client: AsyncMobilerun) -> None: response = await async_client.tasks.with_raw_response.run_streamed( - llm_model="openai/gpt-5", + llm_model="openai/gpt-5.1", task="x", ) @@ -731,7 +661,7 @@ async def test_raw_response_run_streamed(self, async_client: AsyncMobilerun) -> @parametrize async def test_streaming_response_run_streamed(self, async_client: AsyncMobilerun) -> None: async with async_client.tasks.with_streaming_response.run_streamed( - llm_model="openai/gpt-5", + llm_model="openai/gpt-5.1", task="x", ) as response: assert not response.is_closed diff --git a/tests/test_client.py b/tests/test_client.py index add5de8..8c4f9eb 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -8,10 +8,11 @@ import json import asyncio import inspect +import dataclasses import tracemalloc -from typing import Any, Union, cast +from typing import Any, Union, TypeVar, Callable, Iterable, Iterator, Optional, Coroutine, cast from unittest import mock -from typing_extensions import Literal +from typing_extensions import Literal, AsyncIterator, override import httpx import pytest @@ -36,6 +37,7 @@ from .utils import update_env +T = TypeVar("T") base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") api_key = "My API Key" @@ -50,6 +52,57 @@ def _low_retry_timeout(*_args: Any, **_kwargs: Any) -> float: return 0.1 +def mirror_request_content(request: httpx.Request) -> httpx.Response: + return httpx.Response(200, content=request.content) + + +# note: we can't use the httpx.MockTransport class as it consumes the request +# body itself, which means we can't test that the body is read lazily +class MockTransport(httpx.BaseTransport, httpx.AsyncBaseTransport): + def __init__( + self, + handler: Callable[[httpx.Request], httpx.Response] + | Callable[[httpx.Request], Coroutine[Any, Any, httpx.Response]], + ) -> None: + self.handler = handler + + @override + def handle_request( + self, + request: httpx.Request, + ) -> httpx.Response: + assert not inspect.iscoroutinefunction(self.handler), "handler must not be a coroutine function" + assert inspect.isfunction(self.handler), "handler must be a function" + return self.handler(request) + + @override + async def handle_async_request( + self, + request: httpx.Request, + ) -> httpx.Response: + assert inspect.iscoroutinefunction(self.handler), "handler must be a coroutine function" + return await self.handler(request) + + +@dataclasses.dataclass +class Counter: + value: int = 0 + + +def _make_sync_iterator(iterable: Iterable[T], counter: Optional[Counter] = None) -> Iterator[T]: + for item in iterable: + if counter: + counter.value += 1 + yield item + + +async def _make_async_iterator(iterable: Iterable[T], counter: Optional[Counter] = None) -> AsyncIterator[T]: + for item in iterable: + if counter: + counter.value += 1 + yield item + + def _get_open_connections(client: Mobilerun | AsyncMobilerun) -> int: transport = client._client._transport assert isinstance(transport, httpx.HTTPTransport) or isinstance(transport, httpx.AsyncHTTPTransport) @@ -511,6 +564,70 @@ def test_multipart_repeating_array(self, client: Mobilerun) -> None: b"", ] + @pytest.mark.respx(base_url=base_url) + def test_binary_content_upload(self, respx_mock: MockRouter, client: Mobilerun) -> None: + respx_mock.post("/upload").mock(side_effect=mirror_request_content) + + file_content = b"Hello, this is a test file." + + response = client.post( + "/upload", + content=file_content, + cast_to=httpx.Response, + options={"headers": {"Content-Type": "application/octet-stream"}}, + ) + + assert response.status_code == 200 + assert response.request.headers["Content-Type"] == "application/octet-stream" + assert response.content == file_content + + def test_binary_content_upload_with_iterator(self) -> None: + file_content = b"Hello, this is a test file." + counter = Counter() + iterator = _make_sync_iterator([file_content], counter=counter) + + def mock_handler(request: httpx.Request) -> httpx.Response: + assert counter.value == 0, "the request body should not have been read" + return httpx.Response(200, content=request.read()) + + with Mobilerun( + base_url=base_url, + api_key=api_key, + _strict_response_validation=True, + http_client=httpx.Client(transport=MockTransport(handler=mock_handler)), + ) as client: + response = client.post( + "/upload", + content=iterator, + cast_to=httpx.Response, + options={"headers": {"Content-Type": "application/octet-stream"}}, + ) + + assert response.status_code == 200 + assert response.request.headers["Content-Type"] == "application/octet-stream" + assert response.content == file_content + assert counter.value == 1 + + @pytest.mark.respx(base_url=base_url) + def test_binary_content_upload_with_body_is_deprecated(self, respx_mock: MockRouter, client: Mobilerun) -> None: + respx_mock.post("/upload").mock(side_effect=mirror_request_content) + + file_content = b"Hello, this is a test file." + + with pytest.deprecated_call( + match="Passing raw bytes as `body` is deprecated and will be removed in a future version. Please pass raw bytes via the `content` parameter instead." + ): + response = client.post( + "/upload", + body=file_content, + cast_to=httpx.Response, + options={"headers": {"Content-Type": "application/octet-stream"}}, + ) + + assert response.status_code == 200 + assert response.request.headers["Content-Type"] == "application/octet-stream" + assert response.content == file_content + @pytest.mark.respx(base_url=base_url) def test_basic_union_response(self, respx_mock: MockRouter, client: Mobilerun) -> None: class Model1(BaseModel): @@ -1339,6 +1456,72 @@ def test_multipart_repeating_array(self, async_client: AsyncMobilerun) -> None: b"", ] + @pytest.mark.respx(base_url=base_url) + async def test_binary_content_upload(self, respx_mock: MockRouter, async_client: AsyncMobilerun) -> None: + respx_mock.post("/upload").mock(side_effect=mirror_request_content) + + file_content = b"Hello, this is a test file." + + response = await async_client.post( + "/upload", + content=file_content, + cast_to=httpx.Response, + options={"headers": {"Content-Type": "application/octet-stream"}}, + ) + + assert response.status_code == 200 + assert response.request.headers["Content-Type"] == "application/octet-stream" + assert response.content == file_content + + async def test_binary_content_upload_with_asynciterator(self) -> None: + file_content = b"Hello, this is a test file." + counter = Counter() + iterator = _make_async_iterator([file_content], counter=counter) + + async def mock_handler(request: httpx.Request) -> httpx.Response: + assert counter.value == 0, "the request body should not have been read" + return httpx.Response(200, content=await request.aread()) + + async with AsyncMobilerun( + base_url=base_url, + api_key=api_key, + _strict_response_validation=True, + http_client=httpx.AsyncClient(transport=MockTransport(handler=mock_handler)), + ) as client: + response = await client.post( + "/upload", + content=iterator, + cast_to=httpx.Response, + options={"headers": {"Content-Type": "application/octet-stream"}}, + ) + + assert response.status_code == 200 + assert response.request.headers["Content-Type"] == "application/octet-stream" + assert response.content == file_content + assert counter.value == 1 + + @pytest.mark.respx(base_url=base_url) + async def test_binary_content_upload_with_body_is_deprecated( + self, respx_mock: MockRouter, async_client: AsyncMobilerun + ) -> None: + respx_mock.post("/upload").mock(side_effect=mirror_request_content) + + file_content = b"Hello, this is a test file." + + with pytest.deprecated_call( + match="Passing raw bytes as `body` is deprecated and will be removed in a future version. Please pass raw bytes via the `content` parameter instead." + ): + response = await async_client.post( + "/upload", + body=file_content, + cast_to=httpx.Response, + options={"headers": {"Content-Type": "application/octet-stream"}}, + ) + + assert response.status_code == 200 + assert response.request.headers["Content-Type"] == "application/octet-stream" + assert response.content == file_content + @pytest.mark.respx(base_url=base_url) async def test_basic_union_response(self, respx_mock: MockRouter, async_client: AsyncMobilerun) -> None: class Model1(BaseModel):