diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e913729a..3bf5d9ca 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -30,18 +30,18 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"] os: [ubuntu-latest, windows-latest, macos-latest] steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Run spell check on Ubuntu if: matrix.os == 'ubuntu-latest' uses: codespell-project/actions-codespell@master with: ignore_words_list: assertIn - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: ${{ matrix.python-version }} - name: Install dependencies diff --git a/README.md b/README.md index efd3a3af..4cb0354c 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ The MinIO Python Client SDK provides high level APIs to access any MinIO Object This Quickstart Guide covers how to install the MinIO client SDK, connect to the object storage service, and create a sample file uploader. The example below uses: -- [Python version 3.9+](https://www.python.org/downloads/) +- [Python version 3.10+](https://www.python.org/downloads/) - The [MinIO `mc` command line tool](https://min.io/docs/minio/linux/reference/minio-mc.html) - The MinIO `play` test server @@ -13,11 +13,11 @@ The `play` server is a public MinIO cluster located at [https://play.min.io](htt This cluster runs the latest stable version of MinIO and may be used for testing and development. The access credentials in the example are open to the public and all data uploaded to `play` should be considered public and world-readable. -For a complete list of APIs and examples, see the [Python Client API Reference](https://min.io/docs/minio/linux/developers/python/API.html) +For a complete list of APIs and examples, see the [Python SDK Documentation](https://docs.min.io/enterprise/aistor-object-store/developers/sdk/python/) ## Install the MinIO Python SDK -The Python SDK requires Python version 3.9+. +The Python SDK requires Python version 3.10+. You can install the SDK with `pip` or from the [`minio/minio-py` GitHub repository](https://github.com/minio/minio-py): ### Using `pip` @@ -141,12 +141,12 @@ mc ls play/python-test-bucket ## More References -* [Python Client API Reference](https://min.io/docs/minio/linux/developers/python/API.html) +* [Python SDK Documentation](https://docs.min.io/enterprise/aistor-object-store/developers/sdk/python/) * [Examples](https://github.com/minio/minio-py/tree/master/examples) ## Explore Further -* [Complete Documentation](https://min.io/docs/minio/kubernetes/upstream/index.html) +* [Complete Documentation](https://docs.min.io/enterprise/aistor-object-store/) ## Contribute diff --git a/docs/API.md b/docs/API.md index 4023e7d4..b524d3ac 100644 --- a/docs/API.md +++ b/docs/API.md @@ -72,26 +72,29 @@ client = Minio( | [`bucket_exists`](#bucket_exists) | [`put_object`](#put_object) | | [`remove_bucket`](#remove_bucket) | [`copy_object`](#copy_object) | | [`list_objects`](#list_objects) | [`compose_object`](#compose_object) | -| [`get_bucket_versioning`](#get_bucket_versioning) | [`stat_object`](#stat_object) | -| [`set_bucket_versioning`](#set_bucket_versioning) | [`remove_object`](#remove_object) | -| [`delete_bucket_replication`](#delete_bucket_replication) | [`remove_objects`](#remove_objects) | -| [`get_bucket_replication`](#get_bucket_replication) | [`fput_object`](#fput_object) | -| [`set_bucket_replication`](#set_bucket_replication) | [`fget_object`](#fget_object) | -| [`delete_bucket_lifecycle`](#delete_bucket_lifecycle) | [`select_object_content`](#select_object_content) | -| [`get_bucket_lifecycle`](#get_bucket_lifecycle) | [`delete_object_tags`](#delete_object_tags) | -| [`set_bucket_lifecycle`](#set_bucket_lifecycle) | [`get_object_tags`](#get_object_tags) | -| [`delete_bucket_tags`](#delete_bucket_tags) | [`set_object_tags`](#set_object_tags) | -| [`get_bucket_tags`](#get_bucket_tags) | [`enable_object_legal_hold`](#enable_object_legal_hold) | -| [`set_bucket_tags`](#set_bucket_tags) | [`disable_object_legal_hold`](#disable_object_legal_hold) | -| [`delete_bucket_policy`](#delete_bucket_policy) | [`is_object_legal_hold_enabled`](#is_object_legal_hold_enabled) | -| [`get_bucket_policy`](#get_bucket_policy) | [`get_object_retention`](#get_object_retention) | -| [`set_bucket_policy`](#set_bucket_policy) | [`set_object_retention`](#set_object_retention) | -| [`delete_bucket_notification`](#delete_bucket_notification) | [`presigned_get_object`](#presigned_get_object) | -| [`get_bucket_notification`](#get_bucket_notification) | [`presigned_put_object`](#presigned_put_object) | -| [`set_bucket_notification`](#set_bucket_notification) | [`presigned_post_policy`](#presigned_post_policy) | -| [`listen_bucket_notification`](#listen_bucket_notification) | [`get_presigned_url`](#get_presigned_url) | -| [`delete_bucket_encryption`](#delete_bucket_encryption) | [`upload_snowball_objects`](#upload_snowball_objects) | -| [`get_bucket_encryption`](#get_bucket_encryption) | [`prompt_object`](#prompt_object) | +| [`delete_bucket_cors`](#delete_bucket_cors) | [`stat_object`](#stat_object) | +| [`get_bucket_cors`](#get_bucket_cors) | [`remove_object`](#remove_object) | +| [`set_bucket_cors`](#set_bucket_cors) | [`remove_objects`](#remove_objects) | +| [`get_bucket_versioning`](#get_bucket_versioning) | [`fput_object`](#fput_object) | +| [`set_bucket_versioning`](#set_bucket_versioning) | [`fget_object`](#fget_object) | +| [`delete_bucket_replication`](#delete_bucket_replication) | [`select_object_content`](#select_object_content) | +| [`get_bucket_replication`](#get_bucket_replication) | [`delete_object_tags`](#delete_object_tags) | +| [`set_bucket_replication`](#set_bucket_replication) | [`get_object_tags`](#get_object_tags) | +| [`delete_bucket_lifecycle`](#delete_bucket_lifecycle) | [`set_object_tags`](#set_object_tags) | +| [`get_bucket_lifecycle`](#get_bucket_lifecycle) | [`enable_object_legal_hold`](#enable_object_legal_hold) | +| [`set_bucket_lifecycle`](#set_bucket_lifecycle) | [`disable_object_legal_hold`](#disable_object_legal_hold) | +| [`delete_bucket_tags`](#delete_bucket_tags) | [`is_object_legal_hold_enabled`](#is_object_legal_hold_enabled) | +| [`get_bucket_tags`](#get_bucket_tags) | [`get_object_retention`](#get_object_retention) | +| [`set_bucket_tags`](#set_bucket_tags) | [`set_object_retention`](#set_object_retention) | +| [`delete_bucket_policy`](#delete_bucket_policy) | [`presigned_get_object`](#presigned_get_object) | +| [`get_bucket_policy`](#get_bucket_policy) | [`presigned_put_object`](#presigned_put_object) | +| [`set_bucket_policy`](#set_bucket_policy) | [`presigned_post_policy`](#presigned_post_policy) | +| [`delete_bucket_notification`](#delete_bucket_notification) | [`get_presigned_url`](#get_presigned_url) | +| [`get_bucket_notification`](#get_bucket_notification) | [`upload_snowball_objects`](#upload_snowball_objects) | +| [`set_bucket_notification`](#set_bucket_notification) | [`prompt_object`](#prompt_object) | +| [`listen_bucket_notification`](#listen_bucket_notification) | [`get_object_acl`](#get_object_acl) | +| [`delete_bucket_encryption`](#delete_bucket_encryption) | [`get_object_attributes`](#get_object_attributes) | +| [`get_bucket_encryption`](#get_bucket_encryption) | [`put_object_fan_out`](#put_object_fan_out) | | [`set_bucket_encryption`](#set_bucket_encryption) | | | [`delete_object_lock_config`](#delete_object_lock_config) | | | [`get_object_lock_config`](#get_object_lock_config) | | @@ -107,13 +110,13 @@ Create a bucket with region and object lock. __Parameters__ -| Param | Type | Description | -|----------------------|-------------------------------------------------|--------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `location` | _Optional[str] = None_ | Region in which the bucket to be created. | -| `object_lock` | _bool = False_ | Flag to set object-lock feature. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|----------------------|------------------------------------------------|--------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `location` | _Optional[str] = None_ | Region in which the bucket to be created. | +| `object_lock` | _bool = False_ | Flag to set object-lock feature. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Example__ @@ -130,23 +133,25 @@ client.make_bucket(bucket_name="my-bucket", location="eu-west-2", object_lock=Tr -### list_buckets(self, *, bucket_region: Optional[str] = None, max_buckets: int = 10000, prefix: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None, ) -> Iterator[Bucket] +### list_buckets(self, *, bucket_region: Optional[str] = None, max_buckets: int = 10000, prefix: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None, ) -> Iterator[ListAllMyBucketsResult.Bucket] List information of all accessible buckets. -| Param | Type | Description | -|----------------------|-------------------------------------------------|--------------------------------------------| -| `bucket_region` | _Optional[str] = None_ | Fetch buckets from the region. | -| `max_buckets` | _int = 10000_ | Fetch maximum number of buckets. | -| `prefix` | _Optional[str] = None_ | Fetch buckets starts with the prefix. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | - __Parameters__ -| Return | -|:----------------------------------------| -| An iterator of _minio.datatypes.Bucket_ | +| Param | Type | Description | +|----------------------|------------------------------------------------|--------------------------------------------| +| `bucket_region` | _Optional[str] = None_ | Fetch buckets from the region. | +| `max_buckets` | _int = 10000_ | Fetch maximum number of buckets. | +| `prefix` | _Optional[str] = None_ | Fetch buckets starts with the prefix. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | + +__Return Value__ + +| Return | +|:------------------------------------------------------------| +| An iterator of _minio.models.ListAllMyBucketsResult.Bucket_ | __Example__ @@ -164,12 +169,12 @@ Check if a bucket exists. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Example__ @@ -188,12 +193,12 @@ Remove an empty bucket. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Example__ @@ -209,26 +214,26 @@ Lists object information of a bucket. __Parameters__ -| Param | Type | Description | -|:------------------------|:------------------------------------------------|:-------------------------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `prefix` | _Optional[str] = None_ | Object name starts with prefix. | -| `recursive` | _bool = False_ | List recursively than directory structure emulation. | -| `start_after` | _Optional[str] = None_ | List objects after this key name. | -| `include_user_meta` | _bool = False_ | MinIO specific flag to control to include user metadata. | -| `include_version` | _bool = False_ | Flag to control whether include object versions. | -| `use_api_v1` | _bool = False_ | Flag to control to use ListObjectV1 S3 API or not. | -| `use_url_encoding_type` | _bool = True_ | Flag to control whether URL encoding type to be used or not. | -| `fetch_owner` | _bool = False_ | Flag to control to fetch owner information. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:------------------------|:-----------------------------------------------|:-------------------------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `prefix` | _Optional[str] = None_ | Object name starts with prefix. | +| `recursive` | _bool = False_ | List recursively than directory structure emulation. | +| `start_after` | _Optional[str] = None_ | List objects after this key name. | +| `include_user_meta` | _bool = False_ | MinIO specific flag to control to include user metadata. | +| `include_version` | _bool = False_ | Flag to control whether include object versions. | +| `use_api_v1` | _bool = False_ | Flag to control to use ListObjectV1 S3 API or not. | +| `use_url_encoding_type` | _bool = True_ | Flag to control whether URL encoding type to be used or not. | +| `fetch_owner` | _bool = False_ | Flag to control to fetch owner information. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Return Value__ -| Return | -|:----------------------------------------| -| An iterator of _minio.datatypes.Object_ | +| Return | +|:-------------------------------------| +| An iterator of _minio.models.Object_ | __Example__ @@ -273,12 +278,12 @@ Get bucket policy configuration of a bucket. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Return Value__ @@ -300,13 +305,13 @@ Set bucket policy configuration to a bucket. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:--------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `policy` | _str \| bytes_ | Bucket policy configuration as JSON string. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:--------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `policy` | _str \| bytes_ | Bucket policy configuration as JSON string. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Example__ @@ -370,12 +375,12 @@ Delete bucket policy configuration of a bucket. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Example__ @@ -383,6 +388,91 @@ __Example__ client.delete_bucket_policy(bucket_name="my-bucket") ``` + + +### get_bucket_cors(self, *, bucket_name: str, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None) -> CORSConfig + +Get CORS configuration of a bucket. + +__Parameters__ + +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | + +__Return Value__ + +| Param | +|:----------------------------------| +| _minio.models.CORSConfig_ object. | + +__Example__ + +```py +config = client.get_bucket_cors(bucket_name="my-bucket") +``` + + + +### set_bucket_cors(self, *, bucket_name: str, config: CORSConfig, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None) + +Set CORS configuration of a bucket. + +__Parameters__ + +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `config` | _minio.models.CORSConfig_ | Cors configuration. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | + +__Example__ + +```py +config = CORSConfig( + rules=[ + CORSConfig.CORSRule( + allowed_headers=["*"], + allowed_methods=["PUT", "POST", "DELETE"], + allowed_origins=["http://www.example.com"], + expose_headers=["x-amz-server-side-encryption"], + max_age_seconds=3000, + ), + CORSConfig.CORSRule( + allowed_methods=["GET"], + allowed_origins=["*"], + ), + ], +) +client.set_bucket_cors(bucket_name="my-bucket", config=config) +``` + + + +### delete_bucket_cors(self, *, bucket_name: str, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None) + +Delete CORS configuration of a bucket. + +__Parameters__ + +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | + +__Example__ + +```py +client.delete_bucket_cors(bucket_name="my-bucket") +``` + ### get_bucket_notification(self, *, bucket_name: str, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None) -> NotificationConfig @@ -391,18 +481,18 @@ Get notification configuration of a bucket. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Return Value__ -| Param | -|:------------------------------------------------------| -| _minio.notificationconfig.NotificationConfig_ object. | +| Param | +|:------------------------------------------| +| _minio.models.NotificationConfig_ object. | __Example__ @@ -418,13 +508,13 @@ Set notification configuration of a bucket. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `config` | _minio.notificationconfig.NotificationConfig_ | Notification configuration. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `config` | _minio.models.NotificationConfig_ | Notification configuration. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Example__ @@ -450,12 +540,12 @@ Delete notification configuration of a bucket. On success, S3 service stops noti __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Example__ @@ -478,14 +568,14 @@ __Parameters__ | `suffix` | _str = ""_ | Listen events of object ends with suffix. | | `events` | _tuple[str, ...] = ('s3:ObjectCreated:*', 's3:ObjectRemoved:*', 's3:ObjectAccessed:*')_ | Events to listen. | | `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Return Value__ -| Param | -|:----------------------------------------------------------| -| Iterator _minio.datatypes.EventIterable_ of event records | +| Param | +|:-------------------------------------------------------| +| Iterator _minio.models.EventIterable_ of event records | ```py with client.listen_bucket_notification( @@ -504,18 +594,18 @@ Get encryption configuration of a bucket. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Return Value__ -| Param | -|:----------------------------------------------| -| _Optional[minio.sseconfig.SSEConfig]_ object. | +| Param | +|:-------------------------------------------| +| _Optional[minio.models.SSEConfig]_ object. | __Example__ @@ -531,13 +621,13 @@ Set encryption configuration of a bucket. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `config` | _minio.sseconfig.SSEConfig_ | Server-side encryption configuration. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `config` | _minio.models.SSEConfig_ | Server-side encryption configuration. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Example__ @@ -555,12 +645,12 @@ Delete encryption configuration of a bucket. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Example__ @@ -576,18 +666,18 @@ Get versioning configuration of a bucket. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Return Value__ -| Param | -|:--------------------------------------------------| -| _minio.versioningconfig.VersioningConfig_ object. | +| Param | +|:----------------------------------------| +| _minio.models.VersioningConfig_ object. | __Example__ @@ -604,13 +694,13 @@ Set versioning configuration to a bucket. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `config` | _minio.versioningconfig.VersioningConfig_ | Versioning configuration. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `config` | _minio.models.VersioningConfig_ | Versioning configuration. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Example__ @@ -626,12 +716,12 @@ Delete replication configuration of a bucket. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Example__ @@ -647,16 +737,16 @@ Get replication configuration of a bucket. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | -| Return | -|:--------------------------------------------------------------| -| _Optional[minio.replicationconfig.ReplicationConfig]_ object. | +| Return | +|:---------------------------------------------------| +| _Optional[minio.models.ReplicationConfig]_ object. | __Example__ @@ -672,13 +762,13 @@ Set replication configuration to a bucket. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `config` | _minio.replicationconfig.ReplicationConfig_ | Replication configuration. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `config` | _minio.models.ReplicationConfig_ | Replication configuration. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Example__ @@ -716,12 +806,12 @@ Delete lifecycle configuration of a bucket. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Example__ @@ -737,16 +827,16 @@ Get lifecycle configuration of a bucket. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | -| Return | -|:----------------------------------------------------------| -| _Optional[minio.lifecycleconfig.LifecycleConfig]_ object. | +| Return | +|:-------------------------------------------------| +| _Optional[minio.models.LifecycleConfig]_ object. | __Example__ @@ -763,13 +853,13 @@ Set lifecycle configuration to a bucket. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `config` | _minio.lifecycleconfig.LifecycleConfig_ | Lifecycle configuration. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `config` | _minio.models.LifecycleConfig_ | Lifecycle configuration. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Example__ @@ -801,12 +891,12 @@ Delete tags configuration of a bucket. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Example__ @@ -822,16 +912,16 @@ Get tags configuration of a bucket. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | -| Return | -|:--------------------------------------------| -| _Optional[minio.commonconfig.Tags]_ object. | +| Return | +|:--------------------------------------| +| _Optional[minio.models.Tags]_ object. | __Example__ @@ -847,13 +937,13 @@ Set tags configuration to a bucket. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `tags` | _minio.commonconfig.Tags_ | Tags configuration. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `tags` | _minio.models.Tags_ | Tags configuration. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Example__ @@ -872,12 +962,12 @@ Delete object-lock configuration of a bucket. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Example__ @@ -893,16 +983,16 @@ Get object-lock configuration of a bucket. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | -| Return | -|:--------------------------------------------------| -| _minio.objectlockconfig.ObjectLockConfig_ object. | +| Return | +|:----------------------------------------| +| _minio.models.ObjectLockConfig_ object. | __Example__ @@ -918,13 +1008,13 @@ Set object-lock configuration to a bucket. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `config` | _minio.objectlockconfig.ObjectLockConfig_ | Object-Lock configuration. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `config` | _minio.models.ObjectLockConfig_ | Object-Lock configuration. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Example__ @@ -943,25 +1033,25 @@ Appends data to existing object in a bucket. Only of `filename`, `stream` or `da __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-----------------------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `object_name` | _str_ | Object name in the bucket. | -| `filename` | _Optional[str \| os.PathLike] = None_ | Name of file to append. | -| `stream` | _Optional[io.BinaryIO] = None_ | An object having callable `read()` returning bytes object. | -| `data` | _Optional[bytes] = None_ | Data in byte array. | -| `length` | _Optional[int] = None_ | Data length of `data` or `stream`. | -| `chunk_size` | _Optional[int] = None_ | Chunk size. | -| `progress` | _Optional[minio.helpers.ProgressType] = None_ | A progress object. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-----------------------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `object_name` | _str_ | Object name in the bucket. | +| `filename` | _Optional[str \| os.PathLike] = None_ | Name of file to append. | +| `stream` | _Optional[io.BinaryIO] = None_ | An object having callable `read()` returning bytes object. | +| `data` | _Optional[bytes] = None_ | Data in byte array. | +| `length` | _Optional[int] = None_ | Data length of `data` or `stream`. | +| `chunk_size` | _Optional[int] = None_ | Chunk size. | +| `progress` | _Optional[minio.args.ProgressType] = None_ | A progress object. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Return Value__ -| Return | -|:------------------------------------------| -| _minio.helpers.ObjectWriteResult_ object. | +| Return | +|:-------------------------------------------| +| _minio.models.ObjectWriteResponse_ object. | __Example__ ```py @@ -1002,34 +1092,34 @@ with urlopen( -### get_object(self, *, bucket_name: str, object_name: str, version_id: Optional[str] = None, ssec: Optional[SseCustomerKey] = None, offset: int = 0, length: Optional[int] = None, match_etag: Optional[str] = None, not_match_etag: Optional[str] = None, modified_since: Optional[datetime] = None, unmodified_since: Optional[datetime] = None, fetch_checksum: bool = False, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None) -> BaseHTTPResponse +### get_object(self, *, bucket_name: str, object_name: str, version_id: Optional[str] = None, ssec: Optional[SseCustomerKey] = None, offset: int = 0, length: Optional[int] = None, match_etag: Optional[str] = None, not_match_etag: Optional[str] = None, modified_since: Optional[datetime] = None, unmodified_since: Optional[datetime] = None, fetch_checksum: bool = False, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None) -> GetObjectResponse -Gets data from offset to length of an object. Returned response should be closed after use to release network resources. To reuse the connection, it's required to call `response.release_conn()` explicitly. +Gets data from offset to length of an object. Returned response should be closed after use to release network resources. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:--------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `object_name` | _str_ | Object name in the bucket. | -| `version_id` | _Optional[str] = None_ | Version-ID of the object. | -| `ssec` | _Optional[minio.sse.SseCustomerKey] = None_ | Server-side encryption customer key. | -| `offset` | _int = 0_ | Start byte position of object data. | -| `length` | _Optional[int] = None_ | Number of bytes of object data from offset. | -| `match_etag` | _Optional[str] = None_ | Match ETag of the object. | -| `not_match_etag` | _Optional[str] = None_ | None-match ETag of the object. | -| `modified_since` | _Optional[datetime.datetime] = None_ | Modified-since of the object. | -| `unmodified_since` | _Optional[datetime.datetime] = None_ | Unmodified-since of the object. | -| `fetch_checksum` | _bool = False_ | Fetch object checksum. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:--------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `object_name` | _str_ | Object name in the bucket. | +| `version_id` | _Optional[str] = None_ | Version-ID of the object. | +| `ssec` | _Optional[minio.sse.SseCustomerKey] = None_ | Server-side encryption customer key. | +| `offset` | _int = 0_ | Start byte position of object data. | +| `length` | _Optional[int] = None_ | Number of bytes of object data from offset. | +| `match_etag` | _Optional[str] = None_ | Match ETag of the object. | +| `not_match_etag` | _Optional[str] = None_ | None-match ETag of the object. | +| `modified_since` | _Optional[datetime.datetime] = None_ | Modified-since of the object. | +| `unmodified_since` | _Optional[datetime.datetime] = None_ | Unmodified-since of the object. | +| `fetch_checksum` | _bool = False_ | Fetch object checksum. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Return Value__ -| Return | -|:-------------------------------------------------------------------------------| -| _urllib3.response.BaseHTTPResponse_ or _urllib3.response.HTTPResponse_ object. | +| Return | +|:-----------------------------------------| +| _minio.models.GetObjectResponse_ object. | __Example__ @@ -1040,7 +1130,6 @@ try: # Read data from response. finally: response.close() - response.release_conn() # Get data of an object of version-ID. try: @@ -1052,7 +1141,6 @@ try: # Read data from response. finally: response.close() - response.release_conn() # Get data of an object from offset and length. try: @@ -1065,7 +1153,6 @@ try: # Read data from response. finally: response.close() - response.release_conn() # Get data of an SSE-C encrypted object. try: @@ -1077,31 +1164,30 @@ try: # Read data from response. finally: response.close() - response.release_conn() ``` -### select_object_content(self, *, bucket_name: str, object_name: str, request: SelectRequest, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None) -> SelectObjectReader +### select_object_content(self, *, bucket_name: str, object_name: str, request: SelectObjectContentRequest, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None) -> SelectObjectResponse Select content of an object by SQL expression. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:--------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `object_name` | _str_ | Object name in the bucket. | -| `request` | _minio.select.SelectRequest_ | Select request. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `object_name` | _str_ | Object name in the bucket. | +| `request` | _minio.models.SelectObjectContentRequest_ | Select request. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Return Value__ -| Return | -|:------------------------------------------| -| _minio.select.SelectObjectReader_ object. | +| Return | +|:--------------------------------------------| +| _minio.models.SelectObjectResponse_ object. | __Example__ @@ -1115,39 +1201,43 @@ with client.select_object_content( output_serialization=CSVOutputSerialization(), request_progress=True, ), -) as result: - for data in result.stream(): +) as response: + for data in response.stream(): print(data.decode()) - print(result.stats()) + print(response.stats()) ``` -### fget_object(self, *, bucket_name: str, object_name: str, file_path: str, match_etag: Optional[str] = None, not_match_etag: Optional[str] = None, modified_since: Optional[datetime] = None, unmodified_since: Optional[datetime] = None, fetch_checksum: bool = False, ssec: Optional[SseCustomerKey] = None, version_id: Optional[str] = None, tmp_file_path: Optional[str] = None, progress: Optional[ProgressType] = None, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None) +### fget_object(self, *, bucket_name: str, object_name: str, file_path: str, match_etag: Optional[str] = None, not_match_etag: Optional[str] = None, modified_since: Optional[datetime] = None, unmodified_since: Optional[datetime] = None, fetch_checksum: bool = False, ssec: Optional[SseCustomerKey] = None, version_id: Optional[str] = None, tmp_file_path: Optional[str] = None, progress: Optional[ProgressType] = None, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None) -> GetObjectResponse Downloads data of an object to file. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:--------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `object_name` | _str_ | Object name in the bucket. | -| `file_path` | _str_ | Name of file to download. | -| `version_id` | _Optional[str] = None_ | Version-ID of the object. | -| `ssec` | _Optional[minio.sse.SseCustomerKey] = None_ | Server-side encryption customer key. | -| `offset` | _int = 0_ | Start byte position of object data. | -| `length` | _Optional[int] = None_ | Number of bytes of object data from offset. | -| `tmp_file_path` | _Optional[str] = None_ | Path to a temporary file. | -| `progress` | _Optional[minio.helpers.ProgressType] = None_ | A progress object. | -| `match_etag` | _Optional[str] = None_ | Match ETag of the object. | -| `not_match_etag` | _Optional[str] = None_ | None-match ETag of the object. | -| `modified_since` | _Optional[datetime.datetime] = None_ | Modified-since of the object. | -| `unmodified_since` | _Optional[datetime.datetime] = None_ | Unmodified-since of the object. | -| `fetch_checksum` | _bool = False_ | Fetch object checksum. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:--------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `object_name` | _str_ | Object name in the bucket. | +| `file_path` | _str_ | Name of file to download. | +| `version_id` | _Optional[str] = None_ | Version-ID of the object. | +| `ssec` | _Optional[minio.sse.SseCustomerKey] = None_ | Server-side encryption customer key. | +| `offset` | _int = 0_ | Start byte position of object data. | +| `length` | _Optional[int] = None_ | Number of bytes of object data from offset. | +| `tmp_file_path` | _Optional[str] = None_ | Path to a temporary file. | +| `progress` | _Optional[minio.args.ProgressType] = None_ | A progress object. | +| `match_etag` | _Optional[str] = None_ | Match ETag of the object. | +| `not_match_etag` | _Optional[str] = None_ | None-match ETag of the object. | +| `modified_since` | _Optional[datetime.datetime] = None_ | Modified-since of the object. | +| `unmodified_since` | _Optional[datetime.datetime] = None_ | Unmodified-since of the object. | +| `fetch_checksum` | _bool = False_ | Fetch object checksum. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | + +| Return | +|:-----------------------------------------| +| _minio.models.GetObjectResponse_ object. | __Example__ @@ -1178,34 +1268,34 @@ client.fget_object( -### copy_object(self, *, bucket_name: str, object_name: str, source: CopySource, sse: Optional[Sse] = None, user_metadata: Optional[HTTPHeaderDict] = None, tags: Optional[Tags] = None, retention: Optional[Retention] = None, legal_hold: bool = False, metadata_directive: Optional[str] = None, tagging_directive: Optional[str] = None, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None) -> ObjectWriteResult +### copy_object(self, *, bucket_name: str, object_name: str, source: CopySource, sse: Optional[Sse] = None, user_metadata: Optional[HTTPHeaderDict] = None, tags: Optional[Tags] = None, retention: Optional[Retention] = None, legal_hold: bool = False, metadata_directive: Optional[str] = None, tagging_directive: Optional[str] = None, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None) -> ObjectWriteResponse Create an object by server-side copying data from another object. In this API maximum supported source object size is 5GiB. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:----------------------------------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `object_name` | _str_ | Object name in the bucket. | -| `source` | _minio.commonconfig.CopySource_ | Source object information. | -| `sse` | _Optional[minio.sse.Sse] = None_ | Server-side encryption of destination object. | -| `user_metadata` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Any user-defined metadata to be copied along with destination object. | -| `tags` | _Optional[minio.commonconfig.Tags] = None_ | Tags for destination object. | -| `retention` | _Optional[minio.retention.Retention] = None_ | Retention configuration. | -| `legal_hold` | _bool = False_ | Flag to set legal hold for destination object. | -| `metadata_directive` | _Optional[str] = None_ | Directive used to handle user metadata for destination object. | -| `tagging_directive` | _Optional[str] = None_ | Directive used to handle tags for destination object. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:----------------------------------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `object_name` | _str_ | Object name in the bucket. | +| `source` | _minio.args.CopySource_ | Source object information. | +| `sse` | _Optional[minio.sse.Sse] = None_ | Server-side encryption of destination object. | +| `user_metadata` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Any user-defined metadata to be copied along with destination object. | +| `tags` | _Optional[minio.models.Tags] = None_ | Tags for destination object. | +| `retention` | _Optional[minio.models.Retention] = None_ | Retention configuration. | +| `legal_hold` | _bool = False_ | Flag to set legal hold for destination object. | +| `metadata_directive` | _Optional[str] = None_ | Directive used to handle user metadata for destination object. | +| `tagging_directive` | _Optional[str] = None_ | Directive used to handle tags for destination object. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Return Value__ -| Return | -|:------------------------------------------| -| _minio.helpers.ObjectWriteResult_ object. | +| Return | +|:-------------------------------------------| +| _minio.models.ObjectWriteResponse_ object. | __Example__ @@ -1253,32 +1343,32 @@ print(result.object_name, result.version_id) -### compose_object(self, *, bucket_name: str, object_name: str, sources: list[ComposeSource], sse: Optional[Sse] = None, user_metadata: Optional[HTTPHeaderDict] = None, tags: Optional[Tags] = None, retention: Optional[Retention] = None, legal_hold: bool = False, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None) -> ObjectWriteResult +### compose_object(self, *, bucket_name: str, object_name: str, sources: list[ComposeSource], sse: Optional[Sse] = None, user_metadata: Optional[HTTPHeaderDict] = None, tags: Optional[Tags] = None, retention: Optional[Retention] = None, legal_hold: bool = False, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None) -> ObjectWriteResponse Create an object by combining data from different source objects using server-side copy. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:----------------------------------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `object_name` | _str_ | Object name in the bucket. | -| `sources` | _list[minio.commonconfig.ComposeSource]_ | List of _ComposeSource_ object. | -| `sse` | _Optional[minio.sse.Sse] = None_ | Server-side encryption of destination object. | -| `user_metadata` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Any user-defined metadata to be copied along with destination object. | -| `tags` | _Optional[minio.commonconfig.Tags] = None_ | Tags for destination object. | -| `retention` | _Optional[minio.retention.Retention] = None_ | Retention configuration. | -| `legal_hold` | _bool = False_ | Flag to set legal hold for destination object. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:----------------------------------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `object_name` | _str_ | Object name in the bucket. | +| `sources` | _list[minio.models.ComposeSource]_ | List of _ComposeSource_ object. | +| `sse` | _Optional[minio.sse.Sse] = None_ | Server-side encryption of destination object. | +| `user_metadata` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Any user-defined metadata to be copied along with destination object. | +| `tags` | _Optional[minio.models.Tags] = None_ | Tags for destination object. | +| `retention` | _Optional[minio.models.Retention] = None_ | Retention configuration. | +| `legal_hold` | _bool = False_ | Flag to set legal hold for destination object. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Return Value__ -| Return | -|:------------------------------------------| -| _minio.helpers.ObjectWriteResult_ object. | +| Return | +|:-------------------------------------------| +| _minio.models.ObjectWriteResponse_ object. | __Example__ @@ -1333,38 +1423,38 @@ print(result.object_name, result.version_id) -### put_object(self, *, bucket_name: str, object_name: str, data: BinaryIO, length: int, content_type: str = "application/octet-stream", headers: Optional[HTTPHeaderDict] = None, user_metadata: Optional[HTTPHeaderDict] = None, sse: Optional[Sse] = None, progress: Optional[ProgressType] = None, part_size: int = 0, checksum: Optional[Algorithm] = None, num_parallel_uploads: int = 3, tags: Optional[Tags] = None, retention: Optional[Retention] = None, legal_hold: bool = False, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None) -> ObjectWriteResult +### put_object(self, *, bucket_name: str, object_name: str, data: BinaryIO, length: int, content_type: str = "application/octet-stream", headers: Optional[HTTPHeaderDict] = None, user_metadata: Optional[HTTPHeaderDict] = None, sse: Optional[Sse] = None, progress: Optional[ProgressType] = None, part_size: int = 0, checksum: Optional[Algorithm] = None, num_parallel_uploads: int = 3, tags: Optional[Tags] = None, retention: Optional[Retention] = None, legal_hold: bool = False, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None) -> ObjectWriteResponse Uploads data from a stream to an object in a bucket. __Parameters__ -| Param | Type | Description | -|:-----------------------|:------------------------------------------------|:----------------------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `object_name` | _str_ | Object name in the bucket. | -| `data` | _io.BinaryIO_ | An object having callable read() returning bytes object. | -| `length` | _int_ | Data size; -1 for unknown size and set valid `part_size`. | -| `content_type` | _str = "application/octet-stream"_ | Content type of the object. | -| `headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Additional headers. | -| `user_metadata` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | User metadata of the object. | -| `sse` | _Optional[minio.sse.Sse] = None_ | Server-side encryption. | -| `progress` | _Optional[minio.helpers.ProgressType] = None_ | A progress object. | -| `part_size` | _int = 0_ | Multipart part size. | -| `checksum` | _Optional[minio.checksum.Algorithm] = None_ | Algorithm for checksum computation. | -| `num_parallel_uploads` | _int = 3_ | Number of parallel uploads. | -| `tags` | _Optional[minio.commonconfig.Tags] = None_ | Tags for the object. | -| `retention` | _Optional[minio.retention.Retention] = None_ | Retention configuration. | -| `legal_hold` | _bool = False_ | Flag to set legal hold for the object. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:-----------------------|:-----------------------------------------------|:----------------------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `object_name` | _str_ | Object name in the bucket. | +| `data` | _io.BinaryIO_ | An object having callable read() returning bytes object. | +| `length` | _int_ | Data size; -1 for unknown size and set valid `part_size`. | +| `content_type` | _str = "application/octet-stream"_ | Content type of the object. | +| `headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Additional headers. | +| `user_metadata` | _Optional[minio.compat.HTTPHeaderDict] = None_ | User metadata of the object. | +| `sse` | _Optional[minio.sse.Sse] = None_ | Server-side encryption. | +| `progress` | _Optional[minio.args.ProgressType] = None_ | A progress object. | +| `part_size` | _int = 0_ | Multipart part size. | +| `checksum` | _Optional[minio.checksum.Algorithm] = None_ | Algorithm for checksum computation. | +| `num_parallel_uploads` | _int = 3_ | Number of parallel uploads. | +| `tags` | _Optional[minio.models.Tags] = None_ | Tags for the object. | +| `retention` | _Optional[minio.models.Retention] = None_ | Retention configuration. | +| `legal_hold` | _bool = False_ | Flag to set legal hold for the object. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Return Value__ -| Return | -|:------------------------------------------| -| _minio.helpers.ObjectWriteResult_ object. | +| Return | +|:-------------------------------------------| +| _minio.models.ObjectWriteResponse_ object. | __Example__ ```py @@ -1497,37 +1587,37 @@ print( -### fput_object(self, *, bucket_name: str, object_name: str, file_path: str, content_type: str = "application/octet-stream", headers: Optional[HTTPHeaderDict] = None, user_metadata: Optional[HTTPHeaderDict] = None, sse: Optional[Sse] = None, progress: Optional[ProgressType] = None, part_size: int = 0, checksum: Optional[Algorithm] = None, num_parallel_uploads: int = 3, tags: Optional[Tags] = None, retention: Optional[Retention] = None, legal_hold: bool = False, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None) -> ObjectWriteResult +### fput_object(self, *, bucket_name: str, object_name: str, file_path: str, content_type: str = "application/octet-stream", headers: Optional[HTTPHeaderDict] = None, user_metadata: Optional[HTTPHeaderDict] = None, sse: Optional[Sse] = None, progress: Optional[ProgressType] = None, part_size: int = 0, checksum: Optional[Algorithm] = None, num_parallel_uploads: int = 3, tags: Optional[Tags] = None, retention: Optional[Retention] = None, legal_hold: bool = False, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None) -> ObjectWriteResponse Uploads data from a file to an object in a bucket. __Parameters__ -| Param | Type | Description | -|:-----------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `object_name` | _str_ | Object name in the bucket. | -| `file_path` | _str_ | Name of file to upload. | -| `content_type` | _str = "application/octet-stream"_ | Content type of the object. | -| `headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Additional headers. | -| `user_metadata` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | User metadata of the object. | -| `sse` | _Optional[minio.sse.Sse] = None_ | Server-side encryption. | -| `progress` | _Optional[minio.helpers.ProgressType] = None_ | A progress object. | -| `part_size` | _int = 0_ | Multipart part size. | -| `checksum` | _Optional[minio.checksum.Algorithm] = None_ | Algorithm for checksum computation. | -| `num_parallel_uploads` | _int = 3_ | Number of parallel uploads. | -| `tags` | _Optional[minio.commonconfig.Tags] = None_ | Tags for the object. | -| `retention` | _Optional[minio.retention.Retention] = None_ | Retention configuration. | -| `legal_hold` | _bool = False_ | Flag to set legal hold for the object. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:-----------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `object_name` | _str_ | Object name in the bucket. | +| `file_path` | _str_ | Name of file to upload. | +| `content_type` | _str = "application/octet-stream"_ | Content type of the object. | +| `headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Additional headers. | +| `user_metadata` | _Optional[minio.compat.HTTPHeaderDict] = None_ | User metadata of the object. | +| `sse` | _Optional[minio.sse.Sse] = None_ | Server-side encryption. | +| `progress` | _Optional[minio.args.ProgressType] = None_ | A progress object. | +| `part_size` | _int = 0_ | Multipart part size. | +| `checksum` | _Optional[minio.checksum.Algorithm] = None_ | Algorithm for checksum computation. | +| `num_parallel_uploads` | _int = 3_ | Number of parallel uploads. | +| `tags` | _Optional[minio.models.Tags] = None_ | Tags for the object. | +| `retention` | _Optional[minio.models.Retention] = None_ | Retention configuration. | +| `legal_hold` | _bool = False_ | Flag to set legal hold for the object. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Return Value__ -| Return | -|:------------------------------------------| -| _minio.helpers.ObjectWriteResult_ object. | +| Return | +|:-------------------------------------------| +| _minio.models.ObjectWriteResponse_ object. | __Example__ @@ -1647,36 +1737,86 @@ print( ) ``` + + +### put_object_fan_out(self, *, bucket_name: str, data: BinaryIO, length: int, entries: list[PutObjectFanOutEntry], sse: Optional[Sse] = None, checksum: Optional[Checksum] = None, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None) -> PutObjectFanOutResponse + +Uploads multiple objects with same content from single stream with optional metadata and tags. + +__Parameters__ + +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:---------------------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `object_name` | _str_ | Object name in the bucket. | +| `data` | _io.BinaryIO_ | An object having callable read() returning bytes object. | +| `length` | _int_ | Size of the data in bytes. | +| `entries` | _list[minio.args.PutObjectFanOutEntry]_ | Objects to be created. | +| `sse` | _Optional[minio.sse.Sse] = None_ | Server-side encryption. | +| `checksum` | _Optional[minio.checksum.Algorithm] = None_ | Algorithm for checksum computation. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | + + +__Return Value__ + +| Return | +|:-----------------------------------------------| +| _minio.models.PutObjectFanOutResponse_ object. | + +__Example__ +```py +response = client.put_object_fan_out( + bucket_name="my-bucket", + data=io.BytesIO(b"hello"), + length=5, + entries=[ + PutObjectFanOutEntry(key="fan-out.0"), + PutObjectFanOutEntry( + key="fan-out.1", + tags={"Project": "Project One", "User": "jsmith"}, + ), + ], +) +for result in response.results: + print( + f"created {result.key} object; etag: {result.etag}, " + f"version-id: {result.version_id}, ", + f"error: {result.error}", + ) +``` + -### stat_object(self, *, bucket_name: str, object_name: str, version_id: Optional[str] = None, ssec: Optional[SseCustomerKey] = None, offset: int = 0, length: Optional[int] = None, match_etag: Optional[str] = None, not_match_etag: Optional[str] = None, modified_since: Optional[datetime] = None, unmodified_since: Optional[datetime] = None, fetch_checksum: bool = False, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None) -> Object: +### stat_object(self, *, bucket_name: str, object_name: str, version_id: Optional[str] = None, ssec: Optional[SseCustomerKey] = None, offset: int = 0, length: Optional[int] = None, match_etag: Optional[str] = None, not_match_etag: Optional[str] = None, modified_since: Optional[datetime] = None, unmodified_since: Optional[datetime] = None, fetch_checksum: bool = False, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None) -> StatObjectResponse: Get object information and metadata of an object. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:--------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `object_name` | _str_ | Object name in the bucket. | -| `version_id` | _Optional[str] = None_ | Version ID of the object. | -| `ssec` | _Optional[minio.sse.SseCustomerKey] = None_ | Server-side encryption customer key. | -| `offset` | _int = 0_ | Start byte position of object data. | -| `length` | _Optional[int] = None_ | Number of bytes of object data from offset. | -| `match_etag` | _Optional[str] = None_ | Match ETag of the object. | -| `not_match_etag` | _Optional[str] = None_ | None-match ETag of the object. | -| `modified_since` | _Optional[datetime.datetime] = None_ | Modified-since of the object. | -| `unmodified_since` | _Optional[datetime.datetime] = None_ | Unmodified-since of the object. | -| `fetch_checksum` | _bool = False_ | Fetch object checksum. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:--------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `object_name` | _str_ | Object name in the bucket. | +| `version_id` | _Optional[str] = None_ | Version ID of the object. | +| `ssec` | _Optional[minio.sse.SseCustomerKey] = None_ | Server-side encryption customer key. | +| `offset` | _int = 0_ | Start byte position of object data. | +| `length` | _Optional[int] = None_ | Number of bytes of object data from offset. | +| `match_etag` | _Optional[str] = None_ | Match ETag of the object. | +| `not_match_etag` | _Optional[str] = None_ | None-match ETag of the object. | +| `modified_since` | _Optional[datetime.datetime] = None_ | Modified-since of the object. | +| `unmodified_since` | _Optional[datetime.datetime] = None_ | Unmodified-since of the object. | +| `fetch_checksum` | _bool = False_ | Fetch object checksum. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Return Value__ -| Return | -|:---------------------------------| -| _minio.datatypes.Object_ object. | +| Return | +|:------------------------------------------| +| _minio.models.StatObjectResponse_ object. | __Example__ @@ -1713,14 +1853,14 @@ Remove an object. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `object_name` | _str_ | Object name in the bucket. | -| `version_id` | _Optional[str] = None_ | Version ID of the object. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `object_name` | _str_ | Object name in the bucket. | +| `version_id` | _Optional[str] = None_ | Version ID of the object. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Example__ @@ -1747,20 +1887,20 @@ Remove multiple objects. __Parameters__ -| Param | Type | Description | -|:-------------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `delete_object_list` | _Iterable[minio.deleteobjects.DeleteObject]_ | DeleteObject iterable. | -| `bypass_governance_mode` | _bool = False_ | Bypass Governance retention mode. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:-------------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `delete_object_list` | _Iterable[minio.models.DeleteRequest.Object]_ | DeleteObject iterable. | +| `bypass_governance_mode` | _bool = False_ | Bypass Governance retention mode. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Return Value__ | Return | |:----------------------------------------------------| -| _Iterator[minio.deleteobjects.DeleteError]_ object. | +| _Iterator[minio.models.DeleteResult.Error]_ object. | __Example__ @@ -1805,14 +1945,14 @@ Delete tags configuration of an object. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `object_name` | _str_ | Object name in the bucket. | -| `version_id` | _Optional[str] = None_ | Version ID of the object. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `object_name` | _str_ | Object name in the bucket. | +| `version_id` | _Optional[str] = None_ | Version ID of the object. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Example__ @@ -1820,6 +1960,76 @@ __Example__ client.delete_object_tags(bucket_name="my-bucket", object_name="my-object") ``` + + +### get_object_attributes(self, *, bucket_name: str, object_name: str, version_id: Optional[str] = None, ssec: Optional[SseCustomerKey] = None, object_attributes: Optional[list[str]] = None, max_parts: Optional[int] = None, part_number_marker: Optional[int] = None, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None) -> Optional[GetObjectAttributesResponse] + +Get retention information of an object. + +__Parameters__ + +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `object_name` | _str_ | Object name in the bucket. | +| `version_id` | _Optional[str] = None_ | Version ID of the object. | +| `ssec` | _Optional[minio.sse.SseCustomerKey] = None_ | Server-side encryption customer key. | +| `object_attributes` | _Optional[list[str]] = None_ | Object attributes. | +| `max_parts` | _Optional[int] = None_ | Maximum parts to fetch. | +| `part_number_marker` | _Optional[int] = None_ | Part number marker to fetch remaining.. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | + +__Return Value__ + +| Return | +|:-------------------------------------------------------------| +| _Optional[minio.models.GetObjectAttributesResponse]_ object. | + + +__Example__ + +```py +response = client.get_object_attributes( + bucket_name="my-bucket", + object_name="my-object", +) +``` + + + +### get_object_acl(self, *, bucket_name: str, object_name: str, version_id: Optional[str] = None, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None) -> Optional[GetObjectAclResponse] + +Get retention information of an object. + +__Parameters__ + +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `object_name` | _str_ | Object name in the bucket. | +| `version_id` | _Optional[str] = None_ | Version ID of the object. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | + +__Return Value__ + +| Return | +|:------------------------------------------------------| +| _Optional[minio.models.GetObjectAclResponse]_ object. | + + +__Example__ + +```py +response = client.get_object_acl( + bucket_name="my-bucket", + object_name="my-object", +) +``` + ### get_object_tags(self, *, bucket_name: str, object_name: str, version_id: Optional[str] = None, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None) -> Optional[Tags] @@ -1828,18 +2038,18 @@ Get tags configuration of an object. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `object_name` | _str_ | Object name in the bucket. | -| `version_id` | _Optional[str] = None_ | Version ID of the object. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `object_name` | _str_ | Object name in the bucket. | +| `version_id` | _Optional[str] = None_ | Version ID of the object. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | -| Return | -|:--------------------------------------------| -| _Optional[minio.commonconfig.Tags]_ object. | +| Return | +|:--------------------------------------| +| _Optional[minio.models.Tags]_ object. | __Example__ @@ -1855,15 +2065,15 @@ Set tags configuration to an object. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `object_name` | _str_ | Object name in the bucket. | -| `tags` | _minio.commonconfig.Tags_ | Tags configuration. | -| `version_id` | _Optional[str] = None_ | Version ID of the object. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `object_name` | _str_ | Object name in the bucket. | +| `tags` | _minio.models.Tags_ | Tags configuration. | +| `version_id` | _Optional[str] = None_ | Version ID of the object. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Example__ @@ -1882,14 +2092,14 @@ Enable legal hold on an object. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `object_name` | _str_ | Object name in the bucket. | -| `version_id` | _Optional[str] = None_ | Version ID of the object. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `object_name` | _str_ | Object name in the bucket. | +| `version_id` | _Optional[str] = None_ | Version ID of the object. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Example__ @@ -1905,14 +2115,14 @@ Disable legal hold on an object. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `object_name` | _str_ | Object name in the bucket. | -| `version_id` | _Optional[str] = None_ | Version ID of the object. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `object_name` | _str_ | Object name in the bucket. | +| `version_id` | _Optional[str] = None_ | Version ID of the object. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Example__ @@ -1928,14 +2138,14 @@ Returns true if legal hold is enabled on an object. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `object_name` | _str_ | Object name in the bucket. | -| `version_id` | _Optional[str] = None_ | Version ID of the object. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `object_name` | _str_ | Object name in the bucket. | +| `version_id` | _Optional[str] = None_ | Version ID of the object. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Example__ @@ -1957,20 +2167,20 @@ Get retention information of an object. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `object_name` | _str_ | Object name in the bucket. | -| `version_id` | _Optional[str] = None_ | Version ID of the object. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `object_name` | _str_ | Object name in the bucket. | +| `version_id` | _Optional[str] = None_ | Version ID of the object. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Return Value__ -| Return | -|:----------------------------------------------| -| _Optional[minio.retention.Retention]_ object. | +| Return | +|:-------------------------------------------| +| _Optional[minio.models.Retention]_ object. | __Example__ @@ -1990,15 +2200,15 @@ Set retention information to an object. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:-------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `object_name` | _str_ | Object name in the bucket. | -| `config` | _minio.retention.Retention_ | Retention configuration. | -| `version_id` | _Optional[str] = None_ | Version ID of the object. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:-------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `object_name` | _str_ | Object name in the bucket. | +| `config` | _minio.models.Retention_ | Retention configuration. | +| `version_id` | _Optional[str] = None_ | Version ID of the object. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Example__ @@ -2013,30 +2223,30 @@ client.set_object_retention( -### prompt_object(self, *, bucket_name: str, object_name: str, prompt: str, lambda_arn: Optional[str] = None, ssec: Optional[SseCustomerKey] = None, version_id: Optional[str] = None, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None, **kwargs: Optional[Any]) -> BaseHTTPResponse +### prompt_object(self, *, bucket_name: str, object_name: str, prompt: str, lambda_arn: Optional[str] = None, ssec: Optional[SseCustomerKey] = None, version_id: Optional[str] = None, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None, **kwargs: Optional[Any]) -> PromptObjectResponse Prompt an object using natural language. __Parameters__ -| Param | Type | Description | -|----------------------|-------------------------------------------------|-------------------------------------------------------------------------| -| `bucket_name` | `str` | Name of the bucket. | -| `object_name` | `str` | Object name in the bucket. | -| `prompt` | `str` | Natural language prompt to interact with the object using the AI model. | -| `lambda_arn` | `Optional[str] = None` | AWS Lambda ARN to use for processing the prompt. | -| `ssec` | `Optional[minio.sse.SseCustomerKey] = None` | Server-side encryption customer key. | -| `version_id` | `Optional[str] = None` | Version ID of the object. | -| `region` | `Optional[str] = None` | Region of the bucket to skip auto probing. | -| `extra_headers` | `Optional[minio.helpers.HTTPHeaderDict] = None` | Extra headers for advanced usage. | -| `extra_query_params` | `Optional[minio.helpers.HTTPQueryDict] = None` | Extra query parameters for advanced usage. | -| `**kwargs` | `Optional[Any]` | Additional parameters for advanced usage. | +| Param | Type | Description | +|----------------------|------------------------------------------------|-------------------------------------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `object_name` | _str_ | Object name in the bucket. | +| `prompt` | _str_ | Natural language prompt to interact with the object using the AI model. | +| `lambda_arn` | _Optional[str] = None_ | AWS Lambda ARN to use for processing the prompt. | +| `ssec` | _Optional[minio.sse.SseCustomerKey] = None_ | Server-side encryption customer key. | +| `version_id` | _Optional[str] = None_ | Version ID of the object. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| `**kwargs` | _Optional[Any]_ | Additional parameters for advanced usage. | __Return Value__ -| Return | -|:-------------------------------------------------------------------------------| -| _urllib3.response.BaseHTTPResponse_ or _urllib3.response.HTTPResponse_ object. | +| Return | +|:--------------------------------------------| +| _minio.models.PromptObjectResponse_ object. | __Example__ @@ -2052,7 +2262,6 @@ try: finally: if response: response.close() - response.release_conn() ``` @@ -2071,7 +2280,7 @@ __Parameters__ | `request_date` | _Optional[datetime.datetime] = None_ | Request time instead of current time. | | `version_id` | _Optional[str] = None_ | Version ID of the object. | | `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Return Value__ @@ -2114,7 +2323,7 @@ __Parameters__ | `object_name` | _str_ | Object name in the bucket. | | `expires` | _datetime.timedelta = datetime.timedelta(days=7)_ | Expiry in seconds. | | `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Return Value__ @@ -2151,9 +2360,9 @@ Get form-data of PostPolicy of an object to upload its data using POST method. __Parameters__ -| Param | Type | Description | -|:---------|:-----------------------------|:-------------| -| `policy` | _minio.datatypes.PostPolicy_ | Post policy. | +| Param | Type | Description | +|:---------|:--------------------------|:-------------| +| `policy` | _minio.models.PostPolicy_ | Post policy. | __Return Value__ @@ -2191,7 +2400,7 @@ __Parameters__ | `request_date` | _Optional[datetime.datetime] = None_ | Request time instead of current time. | | `version_id` | _Optional[str] = None_ | Version ID of the object. | | `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Return Value__ @@ -2237,33 +2446,33 @@ print(url) -### upload_snowball_objects(self, *, bucket_name: str, objects: Iterable[SnowballObject], headers: Optional[HTTPHeaderDict] = None, user_metadata: Optional[HTTPHeaderDict] = None, sse: Optional[Sse] = None, tags: Optional[Tags] = None, retention: Optional[Retention] = None, legal_hold: bool = False, staging_filename: Optional[str] = None, compression: bool = False, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None) -> ObjectWriteResult +### upload_snowball_objects(self, *, bucket_name: str, objects: Iterable[SnowballObject], headers: Optional[HTTPHeaderDict] = None, user_metadata: Optional[HTTPHeaderDict] = None, sse: Optional[Sse] = None, tags: Optional[Tags] = None, retention: Optional[Retention] = None, legal_hold: bool = False, staging_filename: Optional[str] = None, compression: bool = False, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None) -> ObjectWriteResponse Uploads multiple objects in a single put call. It is done by creating intermediate TAR file optionally compressed which is uploaded to S3 service. __Parameters__ -| Param | Type | Description | -|:---------------------|:------------------------------------------------|:---------------------------------------------------| -| `bucket_name` | _str_ | Name of the bucket. | -| `objects` | _Iterable[minio.commonconfig.SnowballObject]_ | An iterable contain snowball object. | -| `headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Additional headers. | -| `user_metadata` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | User metadata. | -| `sse` | _Optional[minio.sse.Sse] = None_ | Server-side encryption. | -| `tags` | _Optional[minio.commonconfig.Tags] = None_ | Tags for the object. | -| `retention` | _Optional[minio.retention.Retention] = None_ | Retention configuration. | -| `legal_hold` | _bool = False_ | Flag to set legal hold for the object. | -| `staging_filename` | _Optional[str] = None_ | A staging filename to create intermediate tarball. | -| `compression` | _bool = False_ | Flag to compress tarball. | -| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | -| `extra_headers` | _Optional[minio.helpers.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | -| `extra_query_params` | _Optional[minio.helpers.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | +| Param | Type | Description | +|:---------------------|:-----------------------------------------------|:---------------------------------------------------| +| `bucket_name` | _str_ | Name of the bucket. | +| `objects` | _Iterable[minio.models.SnowballObject]_ | An iterable contain snowball object. | +| `headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Additional headers. | +| `user_metadata` | _Optional[minio.compat.HTTPHeaderDict] = None_ | User metadata. | +| `sse` | _Optional[minio.sse.Sse] = None_ | Server-side encryption. | +| `tags` | _Optional[minio.models.Tags] = None_ | Tags for the object. | +| `retention` | _Optional[minio.models.Retention] = None_ | Retention configuration. | +| `legal_hold` | _bool = False_ | Flag to set legal hold for the object. | +| `staging_filename` | _Optional[str] = None_ | A staging filename to create intermediate tarball. | +| `compression` | _bool = False_ | Flag to compress tarball. | +| `region` | _Optional[str] = None_ | Region of the bucket to skip auto probing. | +| `extra_headers` | _Optional[minio.compat.HTTPHeaderDict] = None_ | Extra headers for advanced usage. | +| `extra_query_params` | _Optional[minio.compat.HTTPQueryDict] = None_ | Extra query parameters for advanced usage. | __Return Value__ -| Return | -|:------------------------------------------| -| _minio.helpers.ObjectWriteResult_ object. | +| Return | +|:-------------------------------------------| +| _minio.models.ObjectWriteResponse_ object. | __Example__ @@ -2293,6 +2502,6 @@ client.upload_snowball_objects( ## 5. Explore Further -- [MinIO Golang Client SDK Quickstart Guide](https://min.io/docs/minio/linux/developers/go/minio-go.html) -- [MinIO Java Client SDK Quickstart Guide](https://min.io/docs/minio/linux/developers/java/minio-java.html) -- [MinIO JavaScript Client SDK Quickstart Guide](https://min.io/docs/minio/linux/developers/javascript/minio-javascript.html) +- [MinIO Golang Client SDK Quickstart Guide](https://docs.min.io/enterprise/aistor-object-store/developers/sdk/go/) +- [MinIO Java Client SDK Quickstart Guide](https://docs.min.io/enterprise/aistor-object-store/developers/sdk/java/) +- [MinIO JavaScript Client SDK Quickstart Guide](https://docs.min.io/enterprise/aistor-object-store/developers/sdk/javascript/) diff --git a/examples/append_object.py b/examples/append_object.py index 90b4569d..1c362e6e 100644 --- a/examples/append_object.py +++ b/examples/append_object.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2025 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/bucket_exists.py b/examples/bucket_exists.py index eb3bd269..ed965013 100644 --- a/examples/bucket_exists.py +++ b/examples/bucket_exists.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/compose_object.py b/examples/compose_object.py index 9d092a44..253fa225 100644 --- a/examples/compose_object.py +++ b/examples/compose_object.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,7 +15,7 @@ # limitations under the License. from minio import Minio -from minio.commonconfig import ComposeSource +from minio.args import SourceObject from minio.sse import SseS3 client = Minio( @@ -25,13 +25,13 @@ ) sources = [ - ComposeSource( + SourceObject( bucket_name="my-job-bucket", object_name="my-object-part-one", ), - ComposeSource( + SourceObject( bucket_name="my-job-bucket", object_name="my-object-part-two", ), - ComposeSource( + SourceObject( bucket_name="my-job-bucket", object_name="my-object-part-three", ), ] diff --git a/examples/copy_object.py b/examples/copy_object.py index 9314743b..66b3a48e 100644 --- a/examples/copy_object.py +++ b/examples/copy_object.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2016-2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,7 +17,7 @@ from datetime import datetime, timezone from minio import Minio -from minio.commonconfig import REPLACE, CopySource +from minio.args import Directive, SourceObject client = Minio( endpoint="play.min.io", @@ -29,7 +29,7 @@ result = client.copy_object( bucket_name="my-bucket", object_name="my-object", - source=CopySource( + source=SourceObject( bucket_name="my-sourcebucket", object_name="my-sourceobject", ), ) @@ -39,7 +39,7 @@ result = client.copy_object( bucket_name="my-bucket", object_name="my-object", - source=CopySource( + source=SourceObject( bucket_name="my-sourcebucket", object_name="my-sourceobject", modified_since=datetime(2014, 4, 1, tzinfo=timezone.utc), @@ -51,10 +51,10 @@ result = client.copy_object( bucket_name="my-bucket", object_name="my-object", - source=CopySource( + source=SourceObject( bucket_name="my-sourcebucket", object_name="my-sourceobject", ), user_metadata={"test_meta_key": "test_meta_value"}, - metadata_directive=REPLACE, + metadata_directive=Directive.REPLACE, ) print(result.object_name, result.version_id) diff --git a/examples/delete_bucket_cors.py b/examples/delete_bucket_cors.py new file mode 100644 index 00000000..54bdb965 --- /dev/null +++ b/examples/delete_bucket_cors.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from minio import Minio + +client = Minio( + endpoint="play.min.io", + access_key="Q3AM3UQ867SPQQA43P2F", + secret_key="zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG", +) + +client.delete_bucket_cors(bucket_name="my-bucket") diff --git a/examples/delete_bucket_encryption.py b/examples/delete_bucket_encryption.py index c616d4de..4f4a914e 100644 --- a/examples/delete_bucket_encryption.py +++ b/examples/delete_bucket_encryption.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/delete_bucket_lifecycle.py b/examples/delete_bucket_lifecycle.py index 972ec411..6b93b3fa 100644 --- a/examples/delete_bucket_lifecycle.py +++ b/examples/delete_bucket_lifecycle.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/delete_bucket_notification.py b/examples/delete_bucket_notification.py index a8d2ebcc..91f81874 100644 --- a/examples/delete_bucket_notification.py +++ b/examples/delete_bucket_notification.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/delete_bucket_policy.py b/examples/delete_bucket_policy.py index c0a126ce..1f229150 100644 --- a/examples/delete_bucket_policy.py +++ b/examples/delete_bucket_policy.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/delete_bucket_replication.py b/examples/delete_bucket_replication.py index 2a3573fe..3c96bf0b 100644 --- a/examples/delete_bucket_replication.py +++ b/examples/delete_bucket_replication.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/delete_bucket_tags.py b/examples/delete_bucket_tags.py index 26414764..31beac8c 100644 --- a/examples/delete_bucket_tags.py +++ b/examples/delete_bucket_tags.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/delete_object_lock_config.py b/examples/delete_object_lock_config.py index 6274c7c6..d1304cad 100644 --- a/examples/delete_object_lock_config.py +++ b/examples/delete_object_lock_config.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/delete_object_tags.py b/examples/delete_object_tags.py index 6f92aefb..ff451056 100644 --- a/examples/delete_object_tags.py +++ b/examples/delete_object_tags.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/disable_object_legal_hold.py b/examples/disable_object_legal_hold.py index 92060fcd..fd6be14c 100644 --- a/examples/disable_object_legal_hold.py +++ b/examples/disable_object_legal_hold.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/enable_object_legal_hold.py b/examples/enable_object_legal_hold.py index ef29cafb..504e1c41 100644 --- a/examples/enable_object_legal_hold.py +++ b/examples/enable_object_legal_hold.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/fget_object.py b/examples/fget_object.py index 3245bac3..3fc4bd81 100644 --- a/examples/fget_object.py +++ b/examples/fget_object.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/fput_object.py b/examples/fput_object.py index cfe6d36f..51537bde 100644 --- a/examples/fput_object.py +++ b/examples/fput_object.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,8 +18,7 @@ from examples.progress import Progress from minio import Minio -from minio.commonconfig import GOVERNANCE, Tags -from minio.retention import Retention +from minio.models import Retention, Tags from minio.sse import SseCustomerKey, SseKMS, SseS3 client = Minio( @@ -110,7 +109,7 @@ object_name="my-object", file_path="my-filename", tags=tags, - retention=Retention(GOVERNANCE, date), + retention=Retention(Retention.GOVERNANCE, date), legal_hold=True, ) print( diff --git a/examples/get_bucket_cors.py b/examples/get_bucket_cors.py new file mode 100644 index 00000000..b0a660a6 --- /dev/null +++ b/examples/get_bucket_cors.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from minio import Minio + +client = Minio( + endpoint="play.min.io", + access_key="Q3AM3UQ867SPQQA43P2F", + secret_key="zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG", +) + +config = client.get_bucket_cors(bucket_name="my-bucket") diff --git a/examples/get_bucket_encryption.py b/examples/get_bucket_encryption.py index 997863b4..50d451a5 100644 --- a/examples/get_bucket_encryption.py +++ b/examples/get_bucket_encryption.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/get_bucket_lifecycle.py b/examples/get_bucket_lifecycle.py index cc9e3381..32db0b79 100644 --- a/examples/get_bucket_lifecycle.py +++ b/examples/get_bucket_lifecycle.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/get_bucket_notification.py b/examples/get_bucket_notification.py index 226a8159..8b499bab 100644 --- a/examples/get_bucket_notification.py +++ b/examples/get_bucket_notification.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/get_bucket_policy.py b/examples/get_bucket_policy.py index 378f9d41..c75be4a2 100644 --- a/examples/get_bucket_policy.py +++ b/examples/get_bucket_policy.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2016 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/get_bucket_replication.py b/examples/get_bucket_replication.py index ec2dc373..773e9f77 100644 --- a/examples/get_bucket_replication.py +++ b/examples/get_bucket_replication.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/get_bucket_tags.py b/examples/get_bucket_tags.py index 89aebeb2..b9f08844 100644 --- a/examples/get_bucket_tags.py +++ b/examples/get_bucket_tags.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/get_bucket_versioning.py b/examples/get_bucket_versioning.py index 20e1e2a7..58eb5a6c 100644 --- a/examples/get_bucket_versioning.py +++ b/examples/get_bucket_versioning.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/get_object.py b/examples/get_object.py index 0a247a7b..59b166dd 100644 --- a/examples/get_object.py +++ b/examples/get_object.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -34,7 +34,6 @@ finally: if response: response.close() - response.release_conn() # Get data of an object of version-ID. response = None @@ -48,7 +47,6 @@ finally: if response: response.close() - response.release_conn() # Get data of an object from offset and length. response = None @@ -63,7 +61,6 @@ finally: if response: response.close() - response.release_conn() # Get data of an SSE-C encrypted object. response = None @@ -77,4 +74,3 @@ finally: if response: response.close() - response.release_conn() diff --git a/examples/get_object_acl.py b/examples/get_object_acl.py new file mode 100644 index 00000000..312120a5 --- /dev/null +++ b/examples/get_object_acl.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from minio import Minio + +client = Minio( + endpoint="play.min.io", + access_key="Q3AM3UQ867SPQQA43P2F", + secret_key="zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG", +) + +response = client.get_object_acl( + bucket_name="my-bucket", + object_name="my-object", +) diff --git a/examples/get_object_attributes.py b/examples/get_object_attributes.py new file mode 100644 index 00000000..f4ac6c5a --- /dev/null +++ b/examples/get_object_attributes.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from minio import Minio + +client = Minio( + endpoint="play.min.io", + access_key="Q3AM3UQ867SPQQA43P2F", + secret_key="zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG", +) + +response = client.get_object_attributes( + bucket_name="my-bucket", + object_name="my-object", +) diff --git a/examples/get_object_lock_config.py b/examples/get_object_lock_config.py index 8a4493c4..61b6efa9 100644 --- a/examples/get_object_lock_config.py +++ b/examples/get_object_lock_config.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/get_object_retention.py b/examples/get_object_retention.py index 5a6d786c..550e4800 100644 --- a/examples/get_object_retention.py +++ b/examples/get_object_retention.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/get_object_tags.py b/examples/get_object_tags.py index 47b030c9..6e274b13 100644 --- a/examples/get_object_tags.py +++ b/examples/get_object_tags.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/get_presigned_url.py b/examples/get_presigned_url.py index 545c300e..42abfe7e 100644 --- a/examples/get_presigned_url.py +++ b/examples/get_presigned_url.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/is_object_legal_hold_enabled.py b/examples/is_object_legal_hold_enabled.py index c22b3aa5..6821b2ee 100644 --- a/examples/is_object_legal_hold_enabled.py +++ b/examples/is_object_legal_hold_enabled.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/list_buckets.py b/examples/list_buckets.py index 88bfa99c..4f62046f 100644 --- a/examples/list_buckets.py +++ b/examples/list_buckets.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/list_objects.py b/examples/list_objects.py index 498597cc..a1fb5bd1 100644 --- a/examples/list_objects.py +++ b/examples/list_objects.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/listen_bucket_notification.py b/examples/listen_bucket_notification.py index f48b2e2d..04817e05 100644 --- a/examples/listen_bucket_notification.py +++ b/examples/listen_bucket_notification.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2016 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/make_bucket.py b/examples/make_bucket.py index 63c42677..1bfcbbfe 100644 --- a/examples/make_bucket.py +++ b/examples/make_bucket.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -26,9 +26,9 @@ client.make_bucket(bucket_name="my-bucket") # Create bucket on specific region. -client.make_bucket(bucket_name="my-bucket", location="us-west-1") +client.make_bucket(bucket_name="my-bucket", region="us-west-1") # Create bucket with object-lock feature on specific region. client.make_bucket( - bucket_name="my-bucket", location="eu-west-2", object_lock=True, + bucket_name="my-bucket", region="eu-west-2", object_lock=True, ) diff --git a/examples/minio_with_assume_role_provider.py b/examples/minio_with_assume_role_provider.py index 9c539f5f..fd6499a4 100644 --- a/examples/minio_with_assume_role_provider.py +++ b/examples/minio_with_assume_role_provider.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/minio_with_aws_config_provider.py b/examples/minio_with_aws_config_provider.py index 2315e098..a7f48842 100644 --- a/examples/minio_with_aws_config_provider.py +++ b/examples/minio_with_aws_config_provider.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/minio_with_certificate_identity_provider.py b/examples/minio_with_certificate_identity_provider.py index a9229f74..53e6ef22 100644 --- a/examples/minio_with_certificate_identity_provider.py +++ b/examples/minio_with_certificate_identity_provider.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2022 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/minio_with_chained_provider.py b/examples/minio_with_chained_provider.py index 3886693f..b0dcff7a 100644 --- a/examples/minio_with_chained_provider.py +++ b/examples/minio_with_chained_provider.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/minio_with_client_grants_provider.py b/examples/minio_with_client_grants_provider.py index 5045e666..66015f99 100644 --- a/examples/minio_with_client_grants_provider.py +++ b/examples/minio_with_client_grants_provider.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/minio_with_env_aws_provider.py b/examples/minio_with_env_aws_provider.py index 32dfe32b..01d88066 100644 --- a/examples/minio_with_env_aws_provider.py +++ b/examples/minio_with_env_aws_provider.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/minio_with_env_minio_provider.py b/examples/minio_with_env_minio_provider.py index a31d67d4..60fa8934 100644 --- a/examples/minio_with_env_minio_provider.py +++ b/examples/minio_with_env_minio_provider.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/minio_with_iam_aws_provider.py b/examples/minio_with_iam_aws_provider.py index 6b745364..8fde866a 100644 --- a/examples/minio_with_iam_aws_provider.py +++ b/examples/minio_with_iam_aws_provider.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/minio_with_ldap_identity_provider.py b/examples/minio_with_ldap_identity_provider.py index 1ccdb82e..f22945d4 100644 --- a/examples/minio_with_ldap_identity_provider.py +++ b/examples/minio_with_ldap_identity_provider.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/minio_with_minio_client_config_provider.py b/examples/minio_with_minio_client_config_provider.py index 9ae5d9f1..e7cab394 100644 --- a/examples/minio_with_minio_client_config_provider.py +++ b/examples/minio_with_minio_client_config_provider.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/minio_with_web_identity_provider.py b/examples/minio_with_web_identity_provider.py index 57d624b1..db09b456 100644 --- a/examples/minio_with_web_identity_provider.py +++ b/examples/minio_with_web_identity_provider.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/presigned_get_object.py b/examples/presigned_get_object.py index 7ce66e7c..293aa044 100644 --- a/examples/presigned_get_object.py +++ b/examples/presigned_get_object.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/presigned_post_policy.py b/examples/presigned_post_policy.py index 8decea86..a643305a 100644 --- a/examples/presigned_post_policy.py +++ b/examples/presigned_post_policy.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 to 2023 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,7 +17,7 @@ from datetime import datetime, timedelta from minio import Minio -from minio.datatypes import PostPolicy +from minio.models import PostPolicy client = Minio( endpoint="play.min.io", diff --git a/examples/presigned_put_object.py b/examples/presigned_put_object.py index 0ddd2717..51df4ff9 100644 --- a/examples/presigned_put_object.py +++ b/examples/presigned_put_object.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/progress.py b/examples/progress.py index f4551e44..0ce7dedb 100644 --- a/examples/progress.py +++ b/examples/progress.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2018 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,13 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -""" -This module implements a progress printer while communicating with MinIO server - -:copyright: (c) 2018 by MinIO, Inc. -:license: Apache 2.0, see LICENSE for more details. - -""" +"""A sample progress implementation.""" import sys import time diff --git a/examples/prompt_object.py b/examples/prompt_object.py index 0ffd723d..edae2779 100644 --- a/examples/prompt_object.py +++ b/examples/prompt_object.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2025 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -33,4 +33,3 @@ finally: if response: response.close() - response.release_conn() diff --git a/examples/put_object.py b/examples/put_object.py index afab26f4..1c543744 100644 --- a/examples/put_object.py +++ b/examples/put_object.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -20,8 +20,7 @@ from examples.progress import Progress from minio import Minio -from minio.commonconfig import GOVERNANCE, Tags -from minio.retention import Retention +from minio.models import Retention, Tags from minio.sse import SseCustomerKey, SseKMS, SseS3 client = Minio( @@ -135,7 +134,7 @@ data=io.BytesIO(b"hello"), length=5, tags=tags, - retention=Retention(GOVERNANCE, date), + retention=Retention(Retention.GOVERNANCE, date), legal_hold=True, ) print( diff --git a/examples/put_object_fan_out.py b/examples/put_object_fan_out.py new file mode 100644 index 00000000..df401d52 --- /dev/null +++ b/examples/put_object_fan_out.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import io + +from minio import Minio +from minio.args import PutObjectFanOutEntry + +client = Minio( + endpoint="play.min.io", + access_key="Q3AM3UQ867SPQQA43P2F", + secret_key="zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG", +) + +response = client.put_object_fan_out( + bucket_name="my-bucket", + data=io.BytesIO(b"hello"), + length=5, + entries=[ + PutObjectFanOutEntry(key="fan-out.0"), + PutObjectFanOutEntry( + key="fan-out.1", + tags={"Project": "Project One", "User": "jsmith"}, + ), + ], +) +for result in response.results: + print( + f"created {result.key} object; etag: {result.etag}, " + f"version-id: {result.version_id}, ", + f"error: {result.error}", + ) diff --git a/examples/remove_bucket.py b/examples/remove_bucket.py index bbe4fd74..55059567 100644 --- a/examples/remove_bucket.py +++ b/examples/remove_bucket.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/remove_object.py b/examples/remove_object.py index aff25033..39af82c5 100644 --- a/examples/remove_object.py +++ b/examples/remove_object.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/remove_objects.py b/examples/remove_objects.py index 60d1b7f3..9d08aa3a 100644 --- a/examples/remove_objects.py +++ b/examples/remove_objects.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,7 +15,7 @@ # limitations under the License. from minio import Minio -from minio.deleteobjects import DeleteObject +from minio.models import DeleteRequest client = Minio( endpoint="play.min.io", @@ -26,10 +26,10 @@ # Remove list of objects. errors = client.remove_objects( bucket_name="my-bucket", - delete_object_list=[ - DeleteObject(name="my-object1"), - DeleteObject(name="my-object2"), - DeleteObject( + objects=[ + DeleteRequest.Object(name="my-object1"), + DeleteRequest.Object(name="my-object2"), + DeleteRequest.Object( name="my-object3", version_id="13f88b18-8dcd-4c83-88f2-8631fdb6250c", ), @@ -40,7 +40,7 @@ # Remove a prefix recursively. delete_object_list = map( - lambda x: DeleteObject(x.object_name), + lambda x: DeleteRequest.Object(x.object_name), client.list_objects( bucket_name="my-bucket", prefix="my/prefix/", @@ -49,7 +49,7 @@ ) errors = client.remove_objects( bucket_name="my-bucket", - delete_object_list=delete_object_list, + objects=delete_object_list, ) for error in errors: print("error occurred when deleting object", error) diff --git a/examples/select_object_content.py b/examples/select_object_content.py index 3e6fff75..4a060647 100644 --- a/examples/select_object_content.py +++ b/examples/select_object_content.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2019 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,8 +16,7 @@ from minio import Minio -from minio.select import (CSVInputSerialization, CSVOutputSerialization, - SelectRequest) +from minio.models import SelectObjectContentRequest client = Minio( endpoint="play.min.io", @@ -28,10 +27,14 @@ with client.select_object_content( bucket_name="my-bucket", object_name="my-object.csv", - request=SelectRequest( + request=SelectObjectContentRequest( expression="select * from S3Object", - input_serialization=CSVInputSerialization(), - output_serialization=CSVOutputSerialization(), + input_serialization=( + SelectObjectContentRequest.CSVInputSerialization() + ), + output_serialization=( + SelectObjectContentRequest.CSVOutputSerialization() + ), request_progress=True, ), ) as result: diff --git a/examples/set_bucket_cors.py b/examples/set_bucket_cors.py new file mode 100644 index 00000000..ec7d092c --- /dev/null +++ b/examples/set_bucket_cors.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from minio import Minio +from minio.models import CORSConfig + +client = Minio( + endpoint="play.min.io", + access_key="Q3AM3UQ867SPQQA43P2F", + secret_key="zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG", +) + +config = CORSConfig( + rules=[ + CORSConfig.CORSRule( + allowed_headers=["*"], + allowed_methods=["PUT", "POST", "DELETE"], + allowed_origins=["http://www.example.com"], + expose_headers=["x-amz-server-side-encryption"], + max_age_seconds=3000, + ), + CORSConfig.CORSRule( + allowed_methods=["GET"], + allowed_origins=["*"], + ), + ], +) + +client.set_bucket_cors(bucket_name="my-bucket", config=config) diff --git a/examples/set_bucket_encryption.py b/examples/set_bucket_encryption.py index 33652684..1dcd7757 100644 --- a/examples/set_bucket_encryption.py +++ b/examples/set_bucket_encryption.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,7 +15,7 @@ # limitations under the License. from minio import Minio -from minio.sseconfig import Rule, SSEConfig +from minio.models import SSEConfig client = Minio( endpoint="play.min.io", @@ -24,5 +24,7 @@ ) client.set_bucket_encryption( - bucket_name="my-bucket", config=SSEConfig(Rule.new_sse_s3_rule()), + bucket_name="my-bucket", config=SSEConfig( + SSEConfig.Rule.new_sse_s3_rule(), + ), ) diff --git a/examples/set_bucket_lifecycle.py b/examples/set_bucket_lifecycle.py index 4ea26e52..ed56f285 100644 --- a/examples/set_bucket_lifecycle.py +++ b/examples/set_bucket_lifecycle.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,8 +15,7 @@ # limitations under the License. from minio import Minio -from minio.commonconfig import ENABLED, Filter -from minio.lifecycleconfig import Expiration, LifecycleConfig, Rule, Transition +from minio.models import Filter, LifecycleConfig, Status client = Minio( endpoint="play.min.io", @@ -26,17 +25,19 @@ config = LifecycleConfig( [ - Rule( - status=ENABLED, + LifecycleConfig.Rule( + status=Status.ENABLED, rule_filter=Filter(prefix="documents/"), rule_id="rule1", - transition=Transition(days=30, storage_class="GLACIER"), + transition=LifecycleConfig.Transition( + days=30, storage_class="GLACIER", + ), ), - Rule( - status=ENABLED, + LifecycleConfig.Rule( + status=Status.ENABLED, rule_filter=Filter(prefix="logs/"), rule_id="rule2", - expiration=Expiration(days=365), + expiration=LifecycleConfig.Expiration(days=365), ), ], ) diff --git a/examples/set_bucket_notification.py b/examples/set_bucket_notification.py index 7d20ab75..62526db2 100644 --- a/examples/set_bucket_notification.py +++ b/examples/set_bucket_notification.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,8 +15,7 @@ # limitations under the License. from minio import Minio -from minio.notificationconfig import (NotificationConfig, PrefixFilterRule, - QueueConfig) +from minio.models import NotificationConfig client = Minio( endpoint="play.min.io", @@ -26,11 +25,11 @@ config = NotificationConfig( queue_config_list=[ - QueueConfig( + NotificationConfig.QueueConfig( queue="QUEUE-ARN-OF-THIS-BUCKET", events=["s3:ObjectCreated:*"], config_id="1", - prefix_filter_rule=PrefixFilterRule("abc"), + prefix_filter_rule=NotificationConfig.PrefixFilterRule("abc"), ), ], ) diff --git a/examples/set_bucket_policy.py b/examples/set_bucket_policy.py index bc25b73e..03d304c3 100644 --- a/examples/set_bucket_policy.py +++ b/examples/set_bucket_policy.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2016 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/set_bucket_replication.py b/examples/set_bucket_replication.py index 76b6025c..d3a69600 100644 --- a/examples/set_bucket_replication.py +++ b/examples/set_bucket_replication.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,9 +15,7 @@ # limitations under the License. from minio import Minio -from minio.commonconfig import DISABLED, ENABLED, AndOperator, Filter -from minio.replicationconfig import (DeleteMarkerReplication, Destination, - ReplicationConfig, Rule) +from minio.models import Filter, ReplicationConfig, Status client = Minio( endpoint="play.min.io", @@ -28,16 +26,16 @@ config = ReplicationConfig( role="REPLACE-WITH-ACTUAL-ROLE", rules=[ - Rule( - destination=Destination( + ReplicationConfig.Rule( + destination=ReplicationConfig.Destination( "REPLACE-WITH-ACTUAL-DESTINATION-BUCKET-ARN", ), - status=ENABLED, - delete_marker_replication=DeleteMarkerReplication( - DISABLED, + status=Status.ENABLED, + delete_marker_replication=ReplicationConfig.DeleteMarkerReplication( + Status.DISABLED, ), rule_filter=Filter( - AndOperator( + Filter.And( "TaxDocs", {"key1": "value1", "key2": "value2"}, ), diff --git a/examples/set_bucket_tags.py b/examples/set_bucket_tags.py index af513667..ed5b4f39 100644 --- a/examples/set_bucket_tags.py +++ b/examples/set_bucket_tags.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,7 +15,7 @@ # limitations under the License. from minio import Minio -from minio.commonconfig import Tags +from minio.models import Tags client = Minio( endpoint="play.min.io", diff --git a/examples/set_bucket_versioning.py b/examples/set_bucket_versioning.py index 5f164e82..1afd54be 100644 --- a/examples/set_bucket_versioning.py +++ b/examples/set_bucket_versioning.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,8 +15,7 @@ # limitations under the License. from minio import Minio -from minio.commonconfig import ENABLED -from minio.versioningconfig import VersioningConfig +from minio.models import VersioningConfig client = Minio( endpoint="play.min.io", @@ -26,5 +25,5 @@ client.set_bucket_versioning( bucket_name="my-bucket", - config=VersioningConfig(ENABLED), + config=VersioningConfig(VersioningConfig.ENABLED), ) diff --git a/examples/set_object_lock_config.py b/examples/set_object_lock_config.py index 317bc4a4..274820cb 100644 --- a/examples/set_object_lock_config.py +++ b/examples/set_object_lock_config.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,8 +15,7 @@ # limitations under the License. from minio import Minio -from minio.commonconfig import GOVERNANCE -from minio.objectlockconfig import DAYS, ObjectLockConfig +from minio.models import ObjectLockConfig client = Minio( endpoint="play.min.io", @@ -24,5 +23,9 @@ secret_key="zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG", ) -config = ObjectLockConfig(mode=GOVERNANCE, duration=15, duration_unit=DAYS) +config = ObjectLockConfig( + mode=ObjectLockConfig.GOVERNANCE, + duration=15, + duration_unit=ObjectLockConfig.DAYS, +) client.set_object_lock_config(bucket_name="my-bucket", config=config) diff --git a/examples/set_object_retention.py b/examples/set_object_retention.py index 189fad04..c061b671 100644 --- a/examples/set_object_retention.py +++ b/examples/set_object_retention.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,8 +17,7 @@ from datetime import datetime, timedelta from minio import Minio -from minio.commonconfig import GOVERNANCE -from minio.retention import Retention +from minio.models import Retention client = Minio( endpoint="play.min.io", @@ -26,7 +25,8 @@ secret_key="zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG", ) -config = Retention(GOVERNANCE, datetime.utcnow() + timedelta(days=10)) +config = Retention(Retention.GOVERNANCE, + datetime.utcnow() + timedelta(days=10)) client.set_object_retention( bucket_name="my-bucket", object_name="my-object", diff --git a/examples/set_object_tags.py b/examples/set_object_tags.py index 36e9dbe2..fe994df6 100644 --- a/examples/set_object_tags.py +++ b/examples/set_object_tags.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage. -# Copyright (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,7 +15,7 @@ # limitations under the License. from minio import Minio -from minio.commonconfig import Tags +from minio.models import Tags client = Minio( endpoint="play.min.io", diff --git a/examples/stat_object.py b/examples/stat_object.py index 707685cc..b2b62e85 100644 --- a/examples/stat_object.py +++ b/examples/stat_object.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/examples/upload_snowball_objects.py b/examples/upload_snowball_objects.py index 6270a2d4..b5011da8 100644 --- a/examples/upload_snowball_objects.py +++ b/examples/upload_snowball_objects.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2023 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,7 +18,7 @@ from datetime import datetime from minio import Minio -from minio.commonconfig import SnowballObject +from minio.args import SnowballObject client = Minio( endpoint="play.min.io", diff --git a/minio/__init__.py b/minio/__init__.py index 99facfbf..1c653e28 100644 --- a/minio/__init__.py +++ b/minio/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015, 2016, 2017 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,7 +15,7 @@ # limitations under the License. """ -minio - MinIO Python SDK for Amazon S3 Compatible Cloud Storage +MinIO Python SDK for Amazon S3 Compatible Cloud Storage >>> from minio import Minio >>> client = Minio( @@ -26,20 +26,15 @@ >>> buckets = client.list_buckets() >>> for bucket in buckets: ... print(bucket.name, bucket.creation_date) - -:copyright: (C) 2015-2020 MinIO, Inc. -:license: Apache 2.0, see LICENSE for more details. """ __title__ = "minio-py" __author__ = "MinIO, Inc." __version__ = "7.2.20" __license__ = "Apache 2.0" -__copyright__ = "Copyright 2015, 2016, 2017, 2018, 2019, 2020 MinIO, Inc." +__copyright__ = "Copyright [2014] - [2025] MinIO, Inc." -# pylint: disable=unused-import,useless-import-alias -from .api import Minio as Minio -from .error import InvalidResponseError as InvalidResponseError -from .error import S3Error as S3Error -from .error import ServerError as ServerError -from .minioadmin import MinioAdmin as MinioAdmin +# pylint: disable=unused-import +from .error import InvalidResponseError, S3Error, ServerError +from .minio import Minio +from .minioadmin import MinioAdmin diff --git a/minio/args.py b/minio/args.py new file mode 100644 index 00000000..e50080c4 --- /dev/null +++ b/minio/args.py @@ -0,0 +1,207 @@ +# -*- coding: utf-8 -*- +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# pylint: disable=invalid-name + +"""Argument classes for APIs.""" + +from __future__ import annotations + +import json +from dataclasses import dataclass +from datetime import datetime +from enum import Enum +from typing import IO, Any, Optional + +from typing_extensions import Protocol + +from .compat import quote +from .models import Retention +from .sse import SseCustomerKey +from .time import to_http_header, to_iso8601utc + + +class Directive(str, Enum): + """metadata and tagging directive.""" + COPY = "COPY" + REPLACE = "REPLACE" + + +class ProgressType(Protocol): + """typing stub for Put/Get object progress.""" + + def set_meta(self, object_name: str, total_length: int): + """Set process meta information.""" + + def update(self, length: int): + """Set current progress length.""" + + +@dataclass(frozen=True) +class PutObjectFanOutEntry: + """PutObjectFanOut entry.""" + key: str + user_metadata: Optional[dict[str, str]] = None + tags: Optional[dict[str, str]] = None + content_type: Optional[str] = None + content_encoding: Optional[str] = None + content_disposition: Optional[str] = None + content_language: Optional[str] = None + cache_control: Optional[str] = None + retention: Optional[Retention] = None + + def to_json(self) -> str: + """Convert the entry to JSON string.""" + mapping: dict[str, Any] = {"key": self.key} + if self.user_metadata: + normalized = {} + for key, value in self.user_metadata.items(): + key = ( + key if key.lower().startswith("x-amz-meta-") + else ("x-amz-meta-" + key) + ) + normalized[key] = value + mapping["metadata"] = normalized + if self.tags: + mapping["tags"] = self.tags + if self.content_type: + mapping["contentType"] = self.content_type + if self.content_encoding: + mapping["contentEncoding"] = self.content_encoding + if self.content_disposition: + mapping["contentDisposition"] = self.content_disposition + if self.content_language: + mapping["contentLanguage"] = self.content_language + if self.cache_control: + mapping["cacheControl"] = self.cache_control + if self.retention: + mapping["retention"] = self.retention.mode + mapping["retainUntil"] = to_iso8601utc( + self.retention.retain_until_date, + ) + return json.dumps(mapping) + + +@dataclass(frozen=True) +class SnowballObject: + """A source object definition for upload_snowball_objects method.""" + object_name: str + filename: Optional[str] = None + data: Optional[IO[bytes]] = None + length: Optional[int] = None + mod_time: Optional[datetime] = None + + def __post_init__(self): + if not (self.filename is not None) ^ (self.data is not None): + raise ValueError("only one of filename or data must be provided") + if self.data is not None and self.length is None: + raise ValueError("length must be provided for data") + + +@dataclass(frozen=True) +class SourceObject: + """Source object for copy and compose object.""" + bucket_name: str + object_name: str + region: Optional[str] = None + version_id: Optional[str] = None + ssec: Optional[SseCustomerKey] = None + offset: Optional[int] = None + length: Optional[int] = None + match_etag: Optional[str] = None + not_match_etag: Optional[str] = None + modified_since: Optional[datetime] = None + unmodified_since: Optional[datetime] = None + object_size: Optional[int] = None + + def __post_init__(self): + if self.offset is not None and self.offset < 0: + raise ValueError("offset should be zero or greater") + if self.length is not None and self.length <= 0: + raise ValueError("length should be greater than zero") + if self.match_etag is not None and self.match_etag == "": + raise ValueError("match_etag must not be empty") + if self.not_match_etag is not None and self.not_match_etag == "": + raise ValueError("not_match_etag must not be empty") + + @property + def headers(self) -> dict[str, str]: + """Generate copy source headers.""" + copy_source = quote("/" + self.bucket_name + "/" + self.object_name) + if self.version_id: + copy_source += "?versionId=" + quote(self.version_id) + + headers = {"x-amz-copy-source": copy_source} + if self.ssec: + headers.update(self.ssec.copy_headers()) + if self.match_etag: + headers["x-amz-copy-source-if-match"] = self.match_etag + if self.not_match_etag: + headers["x-amz-copy-source-if-none-match"] = self.not_match_etag + if self.modified_since: + headers["x-amz-copy-source-if-modified-since"] = ( + to_http_header(self.modified_since) + ) + if self.unmodified_since: + headers["x-amz-copy-source-if-unmodified-since"] = ( + to_http_header(self.unmodified_since) + ) + return headers + + def of( + self, + object_size: int, + etag: str, + ) -> SourceObject: + """Create source object with object size and etag.""" + if self.match_etag is None: + if etag is None: + raise ValueError("etag must be provided") + elif etag is not None and self.match_etag != etag: + raise ValueError( + f"match_etag {self.match_etag} and passed etag {etag} does not " + "match", + ) + version = f"?versionId={self.version_id}" if self.version_id else "" + + def _raise_error(arg: int, arg_name: str): + raise ValueError( + f"source {self.bucket_name}/{self.object_name}{version}: " + f"{arg_name} {arg} is beyond object size {object_size}", + ) + if self.offset is not None and self.offset >= object_size: + _raise_error(self.offset, "offset") + if self.length is not None: + if self.length > object_size: + _raise_error(self.length, "length") + compose_size = (self.offset or 0) + self.length + if compose_size > object_size: + _raise_error(compose_size, "compose size") + + return SourceObject( + bucket_name=self.bucket_name, + object_name=self.object_name, + region=self.region, + version_id=self.version_id, + ssec=self.ssec, + offset=self.offset, + length=self.length, + match_etag=self.match_etag or etag, + not_match_etag=self.not_match_etag, + modified_since=self.modified_since, + unmodified_since=self.unmodified_since, + object_size=object_size, + ) diff --git a/minio/checksum.py b/minio/checksum.py index e86b33fb..2605a464 100644 --- a/minio/checksum.py +++ b/minio/checksum.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) -# 2025 MinIO, Inc. +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,7 +16,7 @@ """Checksum functions.""" -from __future__ import absolute_import, annotations, division, unicode_literals +from __future__ import annotations import base64 import binascii @@ -35,6 +35,33 @@ UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD" +def md5sum_hash(data: Optional[str | bytes]) -> Optional[str]: + """Compute MD5 of data and return hash as Base64 encoded value.""" + if data is None: + return None + + # indicate md5 hashing algorithm is not used in a security context. + # Refer https://bugs.python.org/issue9216 for more information. + hasher = hashlib.new( # type: ignore[call-arg] + "md5", + usedforsecurity=False, + ) + hasher.update(data.encode() if isinstance(data, str) else data) + md5sum = base64.b64encode(hasher.digest()) + return md5sum.decode() if isinstance(md5sum, bytes) else md5sum + + +def sha256_hash(data: Optional[str | bytes]) -> str: + """Compute SHA-256 of data and return hash as hex encoded value.""" + data = data or b"" + hasher = hashlib.sha256() + hasher.update(data.encode() if isinstance(data, str) else data) + sha256sum = hasher.hexdigest() + if isinstance(sha256sum, bytes): + return sha256sum.decode() + return sha256sum + + def base64_string(data: bytes) -> str: """Encodes the specified bytes to Base64 string.""" return base64.b64encode(data).decode("ascii") diff --git a/minio/commonconfig.py b/minio/commonconfig.py deleted file mode 100644 index 993e6967..00000000 --- a/minio/commonconfig.py +++ /dev/null @@ -1,522 +0,0 @@ -# -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) -# 2020 MinIO, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Common request/response configuration of S3 APIs.""" -# pylint: disable=invalid-name - -from __future__ import absolute_import, annotations - -from abc import ABC, abstractmethod -from dataclasses import dataclass, field -from datetime import datetime -from typing import IO, Optional, Type, TypeVar, cast -from xml.etree import ElementTree as ET - -from .error import MinioException -from .helpers import quote -from .sse import SseCustomerKey -from .time import to_http_header -from .xml import SubElement, find, findall, findtext - -COPY = "COPY" -REPLACE = "REPLACE" -DISABLED = "Disabled" -ENABLED = "Enabled" -GOVERNANCE = "GOVERNANCE" -COMPLIANCE = "COMPLIANCE" -_MAX_KEY_LENGTH = 128 -_MAX_VALUE_LENGTH = 256 -_MAX_OBJECT_TAG_COUNT = 10 -_MAX_TAG_COUNT = 50 - -A = TypeVar("A", bound="Tags") - - -class Tags(dict): - """dict extended to bucket/object tags.""" - - def __init__(self, for_object: bool = False): - self._for_object = for_object - super().__init__() - - def __setitem__(self, key: str, value: str): - limit = _MAX_OBJECT_TAG_COUNT if self._for_object else _MAX_TAG_COUNT - if len(self) == limit: - tag_type = "object" if self._for_object else "bucket" - raise ValueError(f"only {limit} {tag_type} tags are allowed") - if not key or len(key) > _MAX_KEY_LENGTH or "&" in key: - raise ValueError(f"invalid tag key '{key}'") - if value is None or len(value) > _MAX_VALUE_LENGTH or "&" in value: - raise ValueError(f"invalid tag value '{value}'") - super().__setitem__(key, value) - - @classmethod - def new_bucket_tags(cls: Type[A]) -> A: - """Create new bucket tags.""" - return cls() - - @classmethod - def new_object_tags(cls: Type[A]) -> A: - """Create new object tags.""" - return cls(True) - - @classmethod - def fromxml(cls: Type[A], element: ET.Element) -> A: - """Create new object with values from XML element.""" - elements = findall(element, "Tag") - obj = cls() - for tag in elements: - key = cast(str, findtext(tag, "Key", True)) - value = cast(str, findtext(tag, "Value", True)) - obj[key] = value - return obj - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - if element is None: - raise ValueError("element must be provided") - for key, value in self.items(): - tag = SubElement(element, "Tag") - SubElement(tag, "Key", key) - SubElement(tag, "Value", value) - return element - - -B = TypeVar("B", bound="Tag") - - -@dataclass(frozen=True) -class Tag: - """Tag.""" - - key: str - value: str - - def __post_init__(self): - if not self.key: - raise ValueError("key must be provided") - if self.value is None: - raise ValueError("value must be provided") - - @classmethod - def fromxml(cls: Type[B], element: ET.Element) -> B: - """Create new object with values from XML element.""" - element = cast(ET.Element, find(element, "Tag", True)) - key = cast(str, findtext(element, "Key", True)) - value = cast(str, findtext(element, "Value", True)) - return cls(key, value) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - if element is None: - raise ValueError("element must be provided") - element = SubElement(element, "Tag") - SubElement(element, "Key", self.key) - SubElement(element, "Value", self.value) - return element - - -C = TypeVar("C", bound="AndOperator") - - -@dataclass(frozen=True) -class AndOperator: - """AND operator.""" - - prefix: Optional[str] = None - tags: Optional[Tags] = None - - def __post_init__(self): - if self.prefix is None and not self.tags: - raise ValueError("at least prefix or tags must be provided") - - @classmethod - def fromxml(cls: Type[C], element: ET.Element) -> C: - """Create new object with values from XML element.""" - element = cast(ET.Element, find(element, "And", True)) - prefix = findtext(element, "Prefix") - tags = ( - None if find(element, "Tag") is None - else Tags.fromxml(element) - ) - return cls(prefix, tags) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - if element is None: - raise ValueError("element must be provided") - element = SubElement(element, "And") - if self.prefix is not None: - SubElement(element, "Prefix", self.prefix) - if self.tags is not None: - self.tags.toxml(element) - return element - - -D = TypeVar("D", bound="Filter") - - -@dataclass(frozen=True) -class Filter: - """Lifecycle rule filter.""" - - and_operator: Optional[AndOperator] = None - prefix: Optional[str] = None - tag: Optional[Tag] = None - - def __post_init__(self): - valid = ( - (self.and_operator is not None) ^ - (self.prefix is not None) ^ - (self.tag is not None) - ) - if not valid: - raise ValueError("only one of and, prefix or tag must be provided") - - @classmethod - def fromxml(cls: Type[D], element: ET.Element) -> D: - """Create new object with values from XML element.""" - element = cast(ET.Element, find(element, "Filter", True)) - and_operator = ( - None if find(element, "And") is None - else AndOperator.fromxml(element) - ) - prefix = findtext(element, "Prefix") - tag = None if find(element, "Tag") is None else Tag.fromxml(element) - return cls(and_operator, prefix, tag) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - if element is None: - raise ValueError("element must be provided") - element = SubElement(element, "Filter") - if self.and_operator: - self.and_operator.toxml(element) - if self.prefix is not None: - SubElement(element, "Prefix", self.prefix) - if self.tag is not None: - self.tag.toxml(element) - return element - - -@dataclass(frozen=True) -class BaseRule(ABC): - """Base rule class for Replication and Lifecycle.""" - status: str - rule_filter: Optional[Filter] = None - rule_id: Optional[str] = None - - def __post_init__(self): - check_status(self.status) - if self.rule_id is not None: - self.rule_id = self.rule_id.strip() - if not self.rule_id: - raise ValueError("rule ID must be non-empty string") - if len(self.rule_id) > 255: - raise ValueError("rule ID must not exceed 255 characters") - - @abstractmethod - def _require_subclass_implementation(self) -> None: - """Dummy abstract method to enforce abstract class behavior.""" - - @staticmethod - def parsexml( - element: ET.Element, - ) -> tuple[str, Optional[Filter], Optional[str]]: - """Parse XML and return filter and ID.""" - return ( - cast(str, findtext(element, "Status", True)), - ( - None if find(element, "Filter") is None - else Filter.fromxml(element) - ), - findtext(element, "ID"), - ) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - if element is None: - raise ValueError("element must be provided") - SubElement(element, "Status", self.status) - if self.rule_filter: - self.rule_filter.toxml(element) - if self.rule_id is not None: - SubElement(element, "ID", self.rule_id) - return element - - -def check_status(status: str): - """Validate status.""" - if status not in [ENABLED, DISABLED]: - raise ValueError("status must be 'Enabled' or 'Disabled'") - - -@dataclass(frozen=True) -class SourceObject: - """Source object for copy and compose object.""" - bucket_name: str - object_name: str - version_id: Optional[str] = None - ssec: Optional[SseCustomerKey] = None - offset: int = 0 - length: int = 0 - match_etag: Optional[str] = None - not_match_etag: Optional[str] = None - modified_since: Optional[datetime] = None - unmodified_since: Optional[datetime] = None - fetch_checksum: bool = False - region: Optional[str] = None - - def __post_init__(self): - if ( - self.ssec is not None and - not isinstance(self.ssec, SseCustomerKey) - ): - raise ValueError("ssec must be SseCustomerKey type") - if self.offset < 0: - raise ValueError("offset should be zero or greater") - if self.length <= 0: - raise ValueError("length should be greater than zero") - if self.match_etag is not None and self.match_etag == "": - raise ValueError("match_etag must not be empty") - if self.not_match_etag is not None and self.not_match_etag == "": - raise ValueError("not_match_etag must not be empty") - if ( - self.modified_since is not None and - not isinstance(self.modified_since, datetime) - ): - raise ValueError("modified_since must be datetime type") - if ( - self.unmodified_since is not None and - not isinstance(self.unmodified_since, datetime) - ): - raise ValueError("unmodified_since must be datetime type") - - def gen_copy_headers(self) -> dict[str, str]: - """Generate copy source headers.""" - copy_source = quote("/" + self.bucket_name + "/" + self.object_name) - if self.version_id: - copy_source += "?versionId=" + quote(self.version_id) - - headers = {"x-amz-copy-source": copy_source} - if self.ssec: - headers.update(self.ssec.copy_headers()) - if self.match_etag: - headers["x-amz-copy-source-if-match"] = self.match_etag - if self.not_match_etag: - headers["x-amz-copy-source-if-none-match"] = self.not_match_etag - if self.modified_since: - headers["x-amz-copy-source-if-modified-since"] = ( - to_http_header(self.modified_since) - ) - if self.unmodified_since: - headers["x-amz-copy-source-if-unmodified-since"] = ( - to_http_header(self.unmodified_since) - ) - return headers - - -@dataclass -class ObjectConditionalReadArgs(ABC): - """Base argument class holds condition properties for reading object.""" - bucket_name: str - object_name: str - region: Optional[str] = None - version_id: Optional[str] = None - ssec: Optional[SseCustomerKey] = None - offset: Optional[int] = None - length: Optional[int] = None - match_etag: Optional[str] = None - not_match_etag: Optional[str] = None - modified_since: Optional[datetime] = None - unmodified_since: Optional[datetime] = None - - def __post_init__(self): - if ( - self.ssec is not None and - not isinstance(self.ssec, SseCustomerKey) - ): - raise ValueError("ssec must be SseCustomerKey type") - if self.offset is not None and self.offset < 0: - raise ValueError("offset should be zero or greater") - if self.length is not None and self.length <= 0: - raise ValueError("length should be greater than zero") - if self.match_etag is not None and self.match_etag == "": - raise ValueError("match_etag must not be empty") - if self.not_match_etag is not None and self.not_match_etag == "": - raise ValueError("not_match_etag must not be empty") - if ( - self.modified_since is not None and - not isinstance(self.modified_since, datetime) - ): - raise ValueError("modified_since must be datetime type") - if ( - self.unmodified_since is not None and - not isinstance(self.unmodified_since, datetime) - ): - raise ValueError("unmodified_since must be datetime type") - - @abstractmethod - def _require_subclass_implementation(self) -> None: - """Dummy abstract method to enforce abstract class behavior.""" - - def gen_copy_headers(self) -> dict[str, str]: - """Generate copy source headers.""" - copy_source = quote("/" + self.bucket_name + "/" + self.object_name) - if self.version_id: - copy_source += "?versionId=" + quote(self.version_id) - - headers = {"x-amz-copy-source": copy_source} - if self.ssec: - headers.update(self.ssec.copy_headers()) - if self.match_etag: - headers["x-amz-copy-source-if-match"] = self.match_etag - if self.not_match_etag: - headers["x-amz-copy-source-if-none-match"] = self.not_match_etag - if self.modified_since: - headers["x-amz-copy-source-if-modified-since"] = ( - to_http_header(self.modified_since) - ) - if self.unmodified_since: - headers["x-amz-copy-source-if-unmodified-since"] = ( - to_http_header(self.unmodified_since) - ) - return headers - - -E = TypeVar("E", bound="CopySource") - - -@dataclass -class CopySource(ObjectConditionalReadArgs): - """A source object definition for copy_object method.""" - - def _require_subclass_implementation(self) -> None: - """Dummy abstract method to enforce abstract class behavior.""" - - @classmethod - def of(cls: Type[E], src: ObjectConditionalReadArgs) -> E: - """Create CopySource from another source.""" - return cls( - bucket_name=src.bucket_name, - object_name=src.object_name, - region=src.region, - version_id=src.version_id, - ssec=src.ssec, - offset=src.offset, - length=src.length, - match_etag=src.match_etag, - not_match_etag=src.not_match_etag, - modified_since=src.modified_since, - unmodified_since=src.unmodified_since, - ) - - -F = TypeVar("F", bound="ComposeSource") - - -@dataclass -class ComposeSource(ObjectConditionalReadArgs): - """A source object definition for compose_object method.""" - _object_size: Optional[int] = field(default=None, init=False) - _headers: Optional[dict[str, str]] = field(default=None, init=False) - - def _require_subclass_implementation(self) -> None: - """Dummy abstract method to enforce abstract class behavior.""" - - def _validate_size(self, object_size: int): - """Validate object size with offset and length.""" - def make_error(name, value): - ver = ("?versionId="+self.version_id) if self.version_id else "" - return ValueError( - f"Source {self.bucket_name}/{self.object_name}{ver}: " - f"{name} {value} is beyond object size {object_size}" - ) - - if self.offset is not None and self.offset >= object_size: - raise make_error("offset", self.offset) - if self.length is not None: - if self.length > object_size: - raise make_error("length", self.length) - offset = self.offset or 0 - if offset+self.length > object_size: - raise make_error("compose size", offset+self.length) - - def build_headers(self, object_size: int, etag: str): - """Build headers.""" - self._validate_size(object_size) - self._object_size = object_size - headers = self.gen_copy_headers() - headers["x-amz-copy-source-if-match"] = self.match_etag or etag - self._headers = headers - - @property - def object_size(self) -> Optional[int]: - """Get object size.""" - if self.object_size is None: - raise MinioException( - "build_headers() must be called prior to " - "this method invocation", - ) - return self.object_size - - @property - def headers(self) -> dict[str, str]: - """Get headers.""" - if self.headers is None: - raise MinioException( - "build_headers() must be called prior to " - "this method invocation", - ) - return self.headers.copy() - - @classmethod - def of(cls: Type[F], src: ObjectConditionalReadArgs) -> F: - """Create ComposeSource from another source.""" - return cls( - bucket_name=src.bucket_name, - object_name=src.object_name, - region=src.region, - version_id=src.version_id, - ssec=src.ssec, - offset=src.offset, - length=src.length, - match_etag=src.match_etag, - not_match_etag=src.not_match_etag, - modified_since=src.modified_since, - unmodified_since=src.unmodified_since, - ) - - -@dataclass(frozen=True) -class SnowballObject: - """A source object definition for upload_snowball_objects method.""" - object_name: str - filename: Optional[str] = None - data: Optional[IO[bytes]] = None - length: Optional[int] = None - mod_time: Optional[datetime] = None - - def __post_init__(self): - if not (self.filename is not None) ^ (self.data is not None): - raise ValueError("only one of filename or data must be provided") - if self.data is not None and self.length is None: - raise ValueError("length must be provided for data") - if ( - self.mod_time is not None and - not isinstance(self.mod_time, datetime) - ): - raise ValueError("mod_time must be datetime type") diff --git a/minio/compat.py b/minio/compat.py new file mode 100644 index 00000000..b483a509 --- /dev/null +++ b/minio/compat.py @@ -0,0 +1,136 @@ +# -*- coding: utf-8 -*- +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# pylint: disable=unused-import + +"""Compatibility types.""" + +from __future__ import annotations + +import errno +import os +import urllib.parse +from typing import Dict, Iterable, List, Mapping, Optional, Union + +from urllib3._collections import HTTPHeaderDict + +try: + from urllib3.response import \ + BaseHTTPResponse as HTTPResponse # type: ignore[attr-defined] +except ImportError: + from urllib3.response import HTTPResponse + +JSONDecodeError: type[ValueError] +try: + from json.decoder import JSONDecodeError +except ImportError: + JSONDecodeError = ValueError + + +class HTTPQueryDict(dict[str, List[str]]): + """Dictionary for HTTP query parameters with multiple values per key.""" + + def __init__( + self, + initial: Optional[ + Union[ + "HTTPQueryDict", + Mapping[str, Union[str, Iterable[str]]], + ] + ] = None + ): + super().__init__() + if initial: + if not isinstance(initial, Mapping): + raise TypeError( + "HTTPQueryDict expects a mapping-like object, " + f"got {type(initial).__name__}", + ) + for key, value in initial.items(): + if isinstance(value, (str, bytes)): + self[key] = [value] + else: + self[key] = list(value) + + def __setitem__(self, key: str, value: Union[str, Iterable[str]]) -> None: + super().__setitem__( + key, + [value] if isinstance(value, (str, bytes)) else list(value), + ) + + def copy(self) -> "HTTPQueryDict": + return HTTPQueryDict(self) + + def extend( + self, + other: Optional[ + Union[ + "HTTPQueryDict", + Mapping[str, Union[str, Iterable[str]]], + ] + ], + ) -> "HTTPQueryDict": + """Merges other keys and values.""" + if other is None: + return self + if not isinstance(other, Mapping): + raise TypeError( + "extend() expects a mapping-like object, " + f"got {type(other).__name__}", + ) + for key, value in other.items(): + normalized = ( + [value] if isinstance(value, (str, bytes)) else list(value) + ) + if key in self: + self[key] += normalized + else: + self[key] = normalized + return self + + def __str__(self) -> str: + """Convert dictionary to a URL-encoded query string.""" + query_list = [(k, v) for k, values in self.items() for v in values] + query_list.sort(key=lambda x: (x[0], x[1])) # Sort by key, then value + return urllib.parse.urlencode(query_list, quote_via=urllib.parse.quote) + + +def quote(resource: str, safe: str = "/") -> str: + """ + Wrapper to urllib.parse.quote() replacing back to '~' for older python + versions. + """ + return urllib.parse.quote( + resource, safe=safe, encoding=None, errors=None, + ).replace("%7E", "~") + + +def queryencode(query: str) -> str: + """Encode query parameter value.""" + return quote(query, safe="") + + +def makedirs(path: str): + """Wrapper of os.makedirs() ignores errno.EEXIST.""" + try: + if path: + os.makedirs(path) + except OSError as exc: + if exc.errno != errno.EEXIST: + raise + + if not os.path.isdir(path): + raise ValueError(f"path {path} is not a directory") from exc diff --git a/minio/credentials/__init__.py b/minio/credentials/__init__.py index 4adc1ada..31146991 100644 --- a/minio/credentials/__init__.py +++ b/minio/credentials/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,9 +14,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Credential module.""" - # pylint: disable=unused-import + +"""Credential definitions and providers.""" + from .credentials import Credentials from .providers import (AssumeRoleProvider, AWSConfigProvider, CertificateIdentityProvider, ChainedProvider, diff --git a/minio/credentials/credentials.py b/minio/credentials/credentials.py index deac5ee7..85a4d99d 100644 --- a/minio/credentials/credentials.py +++ b/minio/credentials/credentials.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,6 +15,7 @@ # limitations under the License. """Credential definitions to access S3 service.""" + from __future__ import annotations from dataclasses import dataclass diff --git a/minio/credentials/providers.py b/minio/credentials/providers.py index 002f7326..9ab746ef 100644 --- a/minio/credentials/providers.py +++ b/minio/credentials/providers.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -33,23 +33,17 @@ from pathlib import Path from typing import Callable, Optional, cast from urllib.parse import urlencode, urlsplit, urlunsplit -from xml.etree import ElementTree as ET import certifi -from urllib3._collections import HTTPHeaderDict from urllib3.poolmanager import PoolManager - -try: - from urllib3.response import BaseHTTPResponse # type: ignore[attr-defined] -except ImportError: - from urllib3.response import HTTPResponse as BaseHTTPResponse - from urllib3.util import Retry, parse_url -from minio.helpers import sha256_hash, url_replace +from minio.checksum import sha256_hash +from minio.compat import HTTPHeaderDict, HTTPResponse +from minio.helpers import url_replace from minio.signer import sign_v4_sts from minio.time import from_iso8601utc, to_amz_date, utcnow -from minio.xml import find, findtext +from minio.xml import ET, find, findtext from .credentials import Credentials @@ -78,7 +72,7 @@ def _urlopen( url: str, body: Optional[str | bytes] = None, headers: Optional[HTTPHeaderDict] = None, -) -> BaseHTTPResponse: +) -> HTTPResponse: """Wrapper of urlopen() handles HTTP status code.""" res = http_client.urlopen(method, url, body=body, headers=headers) if res.status not in [200, 204, 206]: diff --git a/minio/crypto.py b/minio/crypto.py index 614f856c..5b82e642 100644 --- a/minio/crypto.py +++ b/minio/crypto.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) -# 2015, 2016, 2017 MinIO, Inc. +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,9 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Cryptography to read and write encrypted MinIO Admin payload""" +"""Cryptography to read and write encrypted MinIO Admin payload.""" -from __future__ import absolute_import, annotations +from __future__ import annotations import os @@ -25,10 +25,7 @@ from Crypto.Cipher._mode_gcm import GcmMode from Crypto.Cipher.ChaCha20_Poly1305 import ChaCha20Poly1305Cipher -try: - from urllib3.response import BaseHTTPResponse # type: ignore[attr-defined] -except ImportError: - from urllib3.response import HTTPResponse as BaseHTTPResponse +from .compat import HTTPResponse # # Encrypted Message Format: @@ -142,7 +139,7 @@ class DecryptReader: APIs. """ - def __init__(self, response: BaseHTTPResponse, secret: bytes): + def __init__(self, response: HTTPResponse, secret: bytes): self._response = response self._secret = secret self._payload = None @@ -242,7 +239,7 @@ def stream(self, num_bytes=32*1024): yield result -def decrypt(response: BaseHTTPResponse, secret_key: str) -> bytes: +def decrypt(response: HTTPResponse, secret_key: str) -> bytes: """Decrypt response data.""" result = b"" with DecryptReader(response, secret_key.encode()) as reader: diff --git a/minio/datatypes.py b/minio/datatypes.py deleted file mode 100644 index 6684df0f..00000000 --- a/minio/datatypes.py +++ /dev/null @@ -1,817 +0,0 @@ -# -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) -# 2020 MinIO, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# pylint: disable=too-many-lines - -""" -Response of ListBuckets, ListObjects, ListObjectsV2 and ListObjectVersions API. -""" - -from __future__ import absolute_import, annotations - -import base64 -import json -from collections import OrderedDict -from dataclasses import dataclass, field -from datetime import datetime -from enum import Enum -from typing import Any, List, Optional, Tuple, Type, TypeVar, Union, cast -from urllib.parse import unquote_plus -from xml.etree import ElementTree as ET - -from urllib3._collections import HTTPHeaderDict - -try: - from urllib3.response import BaseHTTPResponse # type: ignore[attr-defined] -except ImportError: - from urllib3.response import HTTPResponse as BaseHTTPResponse - -from .commonconfig import Tags -from .credentials import Credentials -from .helpers import HTTPQueryDict, check_bucket_name -from .signer import get_credential_string, post_presign_v4 -from .time import from_iso8601utc, to_amz_date, to_iso8601utc -from .xml import find, findall, findtext - -JSONDecodeError: type[ValueError] -try: - from json.decoder import JSONDecodeError -except ImportError: - JSONDecodeError = ValueError - - -@dataclass(frozen=True) -class Bucket: - """Bucket information.""" - name: str - creation_date: Optional[datetime] - - -A = TypeVar("A", bound="ListAllMyBucketsResult") - - -@dataclass(frozen=True) -class ListAllMyBucketsResult: - """LissBuckets API result.""" - buckets: list[Bucket] - prefix: Optional[str] - continuation_token: Optional[str] - owner_id: Optional[str] = None - owner_name: Optional[str] = None - - @classmethod - def fromxml(cls: Type[A], element: ET.Element) -> A: - """Create new object with values from XML element.""" - prefix = findtext(element, "Prefix") - continuation_token = findtext(element, "ContinuationToken") - owner = find(element, "Owner") - owner_id = None if owner is None else findtext(owner, "ID") - owner_name = None if owner is None else findtext(owner, "DisplayName") - element = cast(ET.Element, find(element, "Buckets", True)) - buckets = [] - elements = findall(element, "Bucket") - for bucket in elements: - name = cast(str, findtext(bucket, "Name", True)) - creation_date = findtext(bucket, "CreationDate") - buckets.append(Bucket( - name, - from_iso8601utc(creation_date) if creation_date else None, - )) - return cls( - buckets=buckets, - prefix=prefix, - continuation_token=continuation_token, - owner_id=owner_id, - owner_name=owner_name, - ) - - -B = TypeVar("B", bound="Object") - - -@dataclass(frozen=True) -class Object: - """Object information.""" - bucket_name: str - object_name: Optional[str] - last_modified: Optional[datetime] = None - etag: Optional[str] = None - size: Optional[int] = None - metadata: Optional[Union[dict[str, str], HTTPHeaderDict]] = None - version_id: Optional[str] = None - is_latest: Optional[str] = None - storage_class: Optional[str] = None - owner_id: Optional[str] = None - owner_name: Optional[str] = None - content_type: Optional[str] = None - is_delete_marker: bool = False - tags: Optional[Tags] = None - is_dir: bool = field(default=False, init=False) - - def __post_init__(self): - object.__setattr__( - self, - "is_dir", - bool(self.object_name and self.object_name.endswith("/")), - ) - - @classmethod - def fromxml( - cls: Type[B], - element: ET.Element, - bucket_name: str, - is_delete_marker: bool = False, - encoding_type: Optional[str] = None, - ) -> B: - """Create new object with values from XML element.""" - tag = findtext(element, "LastModified") - last_modified = None if tag is None else from_iso8601utc(tag) - - tag = findtext(element, "ETag") - etag = None if tag is None else tag.replace('"', "") - - tag = findtext(element, "Size") - size = None if tag is None else int(tag) - - elem = find(element, "Owner") - owner_id, owner_name = ( - (None, None) if elem is None - else (findtext(elem, "ID"), findtext(elem, "DisplayName")) - ) - - elems: ET.Element | list = find(element, "UserMetadata") or [] - metadata: dict[str, str] = {} - for child in elems: - key = child.tag.split("}")[1] if "}" in child.tag else child.tag - metadata[key] = child.text or "" - - object_name = cast(str, findtext(element, "Key", True)) - if encoding_type == "url": - object_name = unquote_plus(object_name) - - tags_text = findtext(element, "UserTags") - tags: Optional[Tags] = None - if tags_text: - tags = Tags.new_object_tags() - tags.update( - cast( - List[Tuple[Any, Any]], - [tokens.split("=") for tokens in tags_text.split("&")], - ), - ) - - return cls( - bucket_name=bucket_name, - object_name=object_name, - last_modified=last_modified, - etag=etag, - size=size, - version_id=findtext(element, "VersionId"), - is_latest=findtext(element, "IsLatest"), - storage_class=findtext(element, "StorageClass"), - owner_id=owner_id, - owner_name=owner_name, - metadata=metadata, - is_delete_marker=is_delete_marker, - tags=tags - ) - - -def parse_list_objects( - response: BaseHTTPResponse, - bucket_name: Optional[str] = None, -) -> tuple[list[Object], bool, Optional[str], Optional[str]]: - """Parse ListObjects/ListObjectsV2/ListObjectVersions response.""" - element = ET.fromstring(response.data.decode()) - bucket_name = cast(str, findtext(element, "Name", True)) - encoding_type = findtext(element, "EncodingType") - elements = findall(element, "Contents") - objects = [ - Object.fromxml(tag, bucket_name, encoding_type=encoding_type) - for tag in elements - ] - marker = objects[-1].object_name if objects else None - - elements = findall(element, "Version") - objects += [ - Object.fromxml(tag, bucket_name, encoding_type=encoding_type) - for tag in elements - ] - - elements = findall(element, "CommonPrefixes") - objects += [ - Object( - bucket_name, unquote_plus(findtext(tag, "Prefix", True) or "") - if encoding_type == "url" else findtext(tag, "Prefix", True) - ) for tag in elements - ] - - elements = findall(element, "DeleteMarker") - objects += [ - Object.fromxml(tag, bucket_name, is_delete_marker=True, - encoding_type=encoding_type) - for tag in elements - ] - - is_truncated = (findtext(element, "IsTruncated") or "").lower() == "true" - key_marker = findtext(element, "NextKeyMarker") - if key_marker and encoding_type == "url": - key_marker = unquote_plus(key_marker) - version_id_marker = findtext(element, "NextVersionIdMarker") - continuation_token = findtext(element, "NextContinuationToken") - if key_marker is not None: - continuation_token = key_marker - if continuation_token is None: - continuation_token = findtext(element, "NextMarker") - if continuation_token and encoding_type == "url": - continuation_token = unquote_plus(continuation_token) - if continuation_token is None and is_truncated: - continuation_token = marker - return objects, is_truncated, continuation_token, version_id_marker - - -@dataclass(frozen=True) -class CompleteMultipartUploadResult: - """CompleteMultipartUpload API result.""" - - headers: HTTPHeaderDict - bucket_name: Optional[str] = None - object_name: Optional[str] = None - location: Optional[str] = None - etag: Optional[str] = None - version_id: Optional[str] = None - - def __init__(self, response: BaseHTTPResponse): - object.__setattr__(self, "headers", response.headers) - element = ET.fromstring(response.data.decode()) - object.__setattr__(self, "bucket_name", findtext(element, "Bucket")) - object.__setattr__(self, "object_name", findtext(element, "Key")) - object.__setattr__(self, "location", findtext(element, "Location")) - etag = findtext(element, "ETag") - if etag: - object.__setattr__( - self, - "etag", - cast(str, etag).replace('"', ""), - ) - object.__setattr__( - self, - "version_id", - response.headers.get("x-amz-version-id"), - ) - - -C = TypeVar("C", bound="Part") - - -@dataclass(frozen=True) -class Part: - """Part information of a multipart upload.""" - part_number: int - etag: str - last_modified: Optional[datetime] = None - size: Optional[int] = None - checksum_crc32: Optional[str] = None - checksum_crc32c: Optional[str] = None - checksum_sha1: Optional[str] = None - checksum_sha256: Optional[str] = None - - @classmethod - def fromxml(cls: Type[C], element: ET.Element) -> C: - """Create new object with values from XML element.""" - part_number = int(cast(str, findtext(element, "PartNumber", True))) - etag = cast(str, findtext(element, "ETag", True)) - etag = etag.replace('"', "") - tag = findtext(element, "LastModified") - last_modified = None if tag is None else from_iso8601utc(tag) - size = findtext(element, "Size") - return cls( - part_number=part_number, - etag=etag, - last_modified=last_modified, - size=int(size) if size else None, - ) - - -@dataclass(frozen=True) -class ListPartsResult: - """ListParts API result.""" - - bucket_name: Optional[str] = None - object_name: Optional[str] = None - initiator_id: Optional[str] = None - initiator_name: Optional[str] = None - owner_id: Optional[str] = None - owner_name: Optional[str] = None - storage_class: Optional[str] = None - part_number_marker: Optional[str] = None - next_part_number_marker: Optional[str] = None - max_parts: Optional[int] = None - is_truncated: bool = False - parts: list[Part] = field(default_factory=list) - - def __init__(self, response: BaseHTTPResponse): - element = ET.fromstring(response.data.decode()) - object.__setattr__(self, "bucket_name", findtext(element, "Bucket")) - object.__setattr__(self, "object_name", findtext(element, "Key")) - tag = find(element, "Initiator") - object.__setattr__( - self, - "initiator_id", - None if tag is None else findtext(tag, "ID"), - ) - object.__setattr__( - self, - "initiator_name", - None if tag is None else findtext(tag, "DisplayName"), - ) - tag = find(element, "Owner") - object.__setattr__( - self, - "owner_id", - None if tag is None else findtext(tag, "ID") - ) - object.__setattr__( - self, - "owner_name", - None if tag is None else findtext(tag, "DisplayName"), - ) - object.__setattr__( - self, - "storage_class", - findtext(element, "StorageClass"), - ) - object.__setattr__( - self, - "part_number_marker", - findtext(element, "PartNumberMarker"), - ) - object.__setattr__( - self, - "next_part_number_marker", - findtext(element, "NextPartNumberMarker"), - ) - max_parts = findtext(element, "MaxParts") - object.__setattr__( - self, - "max_parts", - int(max_parts) if max_parts else None, - ) - is_truncated = findtext(element, "IsTruncated") - object.__setattr__( - self, - "is_truncated", - is_truncated is not None and is_truncated.lower() == "true", - ) - object.__setattr__( - self, - "parts", - [Part.fromxml(tag) for tag in findall(element, "Part")], - ) - - -@dataclass(frozen=True) -class Upload: - """ Upload information of a multipart upload.""" - - object_name: str - encoding_type: Optional[str] = None - upload_id: Optional[str] = None - initiator_id: Optional[str] = None - initiator_name: Optional[str] = None - owner_id: Optional[str] = None - owner_name: Optional[str] = None - storage_class: Optional[str] = None - initiated_time: Optional[datetime] = None - - def __init__( - self, element: ET.Element, encoding_type: Optional[str] = None, - ): - object_name = cast(str, findtext(element, "Key", True)) - object.__setattr__( - self, - "object_name", - unquote_plus(object_name) if encoding_type == "url" - else object_name, - ) - object.__setattr__(self, "encoding_type", encoding_type) - object.__setattr__(self, "upload_id", findtext(element, "UploadId")) - tag = find(element, "Initiator") - object.__setattr__( - self, - "initiator_id", - None if tag is None else findtext(tag, "ID"), - ) - object.__setattr__( - self, - "initiator_name", - None if tag is None else findtext(tag, "DisplayName"), - ) - tag = find(element, "Owner") - object.__setattr__( - self, - "owner_id", - None if tag is None else findtext(tag, "ID"), - ) - object.__setattr__( - self, - "owner_name", - None if tag is None else findtext(tag, "DisplayName"), - ) - object.__setattr__( - self, - "storage_class", - findtext(element, "StorageClass"), - ) - initiated_time = findtext(element, "Initiated") - object.__setattr__( - self, - "initiated_time", - from_iso8601utc(initiated_time) if initiated_time else None, - ) - - -@dataclass(frozen=True) -class ListMultipartUploadsResult: - """ListMultipartUploads API result.""" - - encoding_type: Optional[str] = None - bucket_name: Optional[str] = None - key_marker: Optional[str] = None - upload_id_marker: Optional[str] = None - next_key_marker: Optional[str] = None - next_upload_id_marker: Optional[str] = None - max_uploads: Optional[int] = None - is_truncated: bool = False - uploads: list[Upload] = field(default_factory=list) - - def __init__(self, response: BaseHTTPResponse): - element = ET.fromstring(response.data.decode()) - encoding_type = findtext(element, "EncodingType") - object.__setattr__(self, "encoding_type", encoding_type) - object.__setattr__( - self, - "bucket_name", - findtext(element, "Bucket"), - ) - value = findtext(element, "KeyMarker") - if value is not None and encoding_type == "url": - value = unquote_plus(value) - object.__setattr__(self, "key_marker", value) - object.__setattr__( - self, - "upload_id_marker", - findtext(element, "UploadIdMarker"), - ) - value = findtext(element, "NextKeyMarker") - if value is not None and encoding_type == "url": - value = unquote_plus(value) - object.__setattr__(self, "next_key_marker", value) - object.__setattr__( - self, - "self._next_upload_id_marker", - findtext(element, "NextUploadIdMarker"), - ) - max_uploads = findtext(element, "MaxUploads") - object.__setattr__( - self, - "max_uploads", - int(max_uploads) if max_uploads else None, - ) - is_truncated = findtext(element, "IsTruncated") - object.__setattr__( - self, - "is_truncated", - is_truncated is not None and is_truncated.lower() == "true", - ) - object.__setattr__( - self, - "uploads", - [ - Upload(tag, encoding_type) - for tag in findall(element, "Upload") - ], - ) - - -_RESERVED_ELEMENTS = ( - "bucket", - "x-amz-algorithm", - "x-amz-credential", - "x-amz-date", - "policy", - "x-amz-signature", -) -_EQ = "eq" -_STARTS_WITH = "starts-with" -_ALGORITHM = "AWS4-HMAC-SHA256" - - -def _trim_dollar(value: str) -> str: - """Trim dollar character if present.""" - return value[1:] if value.startswith("$") else value - - -class PostPolicy: - """ - Post policy information to be used to generate presigned post policy - form-data. Condition elements and respective condition for Post policy - is available at - https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-HTTPPOSTConstructPolicy.html#sigv4-PolicyConditions - """ - - def __init__(self, bucket_name: str, expiration: datetime): - check_bucket_name(bucket_name) - if not isinstance(expiration, datetime): - raise ValueError("expiration must be datetime type") - self._bucket_name = bucket_name - self._expiration = expiration - self._conditions: OrderedDict = OrderedDict() - self._conditions[_EQ] = OrderedDict() - self._conditions[_STARTS_WITH] = OrderedDict() - self._lower_limit: Optional[int] = None - self._upper_limit: Optional[int] = None - - def add_equals_condition(self, element: str, value: str): - """Add equals condition of an element and value.""" - if not element: - raise ValueError("condition element cannot be empty") - element = _trim_dollar(element) - if ( - element in [ - "success_action_redirect", - "redirect", - "content-length-range", - ] - ): - raise ValueError(element + " is unsupported for equals condition") - if element in _RESERVED_ELEMENTS: - raise ValueError(element + " cannot be set") - self._conditions[_EQ][element] = value - - def remove_equals_condition(self, element: str): - """Remove previously set equals condition of an element.""" - if not element: - raise ValueError("condition element cannot be empty") - self._conditions[_EQ].pop(element) - - def add_starts_with_condition(self, element: str, value: str): - """ - Add starts-with condition of an element and value. Value set to empty - string does matching any content condition. - """ - if not element: - raise ValueError("condition element cannot be empty") - element = _trim_dollar(element) - if ( - element in ["success_action_status", "content-length-range"] or - ( - element.startswith("x-amz-") and - not element.startswith("x-amz-meta-") - ) - ): - raise ValueError( - f"{element} is unsupported for starts-with condition", - ) - if element in _RESERVED_ELEMENTS: - raise ValueError(element + " cannot be set") - self._conditions[_STARTS_WITH][element] = value - - def remove_starts_with_condition(self, element: str): - """Remove previously set starts-with condition of an element.""" - if not element: - raise ValueError("condition element cannot be empty") - self._conditions[_STARTS_WITH].pop(element) - - def add_content_length_range_condition( # pylint: disable=invalid-name - self, lower_limit: int, upper_limit: int): - """Add content-length-range condition with lower and upper limits.""" - if lower_limit < 0: - raise ValueError("lower limit cannot be negative number") - if upper_limit < 0: - raise ValueError("upper limit cannot be negative number") - if lower_limit > upper_limit: - raise ValueError("lower limit cannot be greater than upper limit") - self._lower_limit = lower_limit - self._upper_limit = upper_limit - - def remove_content_length_range_condition( # pylint: disable=invalid-name - self): - """Remove previously set content-length-range condition.""" - self._lower_limit = None - self._upper_limit = None - - def form_data(self, creds: Credentials, region: str): - """ - Return form-data of this post policy. The returned dict contains - x-amz-algorithm, x-amz-credential, x-amz-security-token, x-amz-date, - policy and x-amz-signature. - """ - if not isinstance(creds, Credentials): - raise ValueError("credentials must be Credentials type") - if not region: - raise ValueError("region cannot be empty") - if ( - "key" not in self._conditions[_EQ] and - "key" not in self._conditions[_STARTS_WITH] - ): - raise ValueError("key condition must be set") - - policy: OrderedDict = OrderedDict() - policy["expiration"] = to_iso8601utc(self._expiration) - policy["conditions"] = [[_EQ, "$bucket", self._bucket_name]] - for cond_key, conditions in self._conditions.items(): - for key, value in conditions.items(): - policy["conditions"].append([cond_key, "$"+key, value]) - if self._lower_limit is not None and self._upper_limit is not None: - policy["conditions"].append( - ["content-length-range", self._lower_limit, self._upper_limit], - ) - utcnow = datetime.utcnow() - credential = get_credential_string(creds.access_key, utcnow, region) - amz_date = to_amz_date(utcnow) - policy["conditions"].append([_EQ, "$x-amz-algorithm", _ALGORITHM]) - policy["conditions"].append([_EQ, "$x-amz-credential", credential]) - if creds.session_token: - policy["conditions"].append( - [_EQ, "$x-amz-security-token", creds.session_token], - ) - policy["conditions"].append([_EQ, "$x-amz-date", amz_date]) - - policy_encoded = base64.b64encode( - json.dumps(policy).encode(), - ).decode("utf-8") - signature = post_presign_v4( - policy_encoded, creds.secret_key, utcnow, region, - ) - form_data = { - "x-amz-algorithm": _ALGORITHM, - "x-amz-credential": credential, - "x-amz-date": amz_date, - "policy": policy_encoded, - "x-amz-signature": signature, - } - if creds.session_token: - form_data["x-amz-security-token"] = creds.session_token - return form_data - - @property - def bucket_name(self) -> str: - """Get bucket name.""" - return self._bucket_name - - -def parse_copy_object( - response: BaseHTTPResponse, -) -> tuple[str, Optional[datetime]]: - """Parse CopyObject/UploadPartCopy response.""" - element = ET.fromstring(response.data.decode()) - etag = cast(str, findtext(element, "ETag", True)).replace('"', "") - last_modified = findtext(element, "LastModified") - return etag, from_iso8601utc(last_modified) if last_modified else None - - -class EventIterable: - """Context manager friendly event iterable.""" - - def __init__(self, func): - self._func = func - self._response = None - - def _close_response(self): - """Close response.""" - if self._response: - self._response.close() - self._response.release_conn() - self._response = None - - def __iter__(self): - return self - - def _get_records(self): - """Get event records from response stream.""" - try: - line = self._response.readline().strip() - if not line: - return None - if hasattr(line, 'decode'): - line = line.decode() - event = json.loads(line) - if event['Records']: - return event - except (StopIteration, JSONDecodeError): - self._close_response() - return None - - def __next__(self): - records = None - while not records: - if not self._response or self._response.closed: - self._response = self._func() - records = self._get_records() - return records - - def __enter__(self): - return self - - def __exit__(self, exc_type, value, traceback): - self._close_response() - - -@dataclass(frozen=True) -class PeerSite: - """Represents a cluster/site to be added to the set of replicated sites.""" - name: str - endpoint: str - access_key: str - secret_key: str - - def to_dict(self) -> dict[str, str]: - """Convert to dictionary.""" - return { - "name": self.name, - "endpoints": self.endpoint, - "accessKey": self.access_key, - "secretKey": self.secret_key, - } - - -@dataclass(frozen=True) -class SiteReplicationStatusOptions: - """Represents site replication status options.""" - ENTITY_TYPE = Enum( - "ENTITY_TYPE", - { - "BUCKET": "bucket", - "POLICY": "policy", - "USER": "user", - "GROUP": "group", - }, - ) - buckets: bool = False - policies: bool = False - users: bool = False - groups: bool = False - metrics: bool = False - show_deleted: bool = False - entity: Optional[str] = None - entity_value: Optional[str] = None - - def to_query_params(self) -> HTTPQueryDict: - """Convert this options to query parameters.""" - params = HTTPQueryDict() - params["buckets"] = str(self.buckets).lower() - params["policies"] = str(self.policies).lower() - params["users"] = str(self.users).lower() - params["groups"] = str(self.groups).lower() - params["metrics"] = str(self.metrics).lower() - params["showDeleted"] = str(self.show_deleted).lower() - if self.entity and self.entity_value: - params["entity"] = self.entity - params["entityvalue"] = self.entity_value - return params - - -@dataclass(frozen=True) -class PeerInfo: - """Site replication peer information.""" - deployment_id: str - endpoint: str - bucket_bandwidth_limit: str - bucket_bandwidth_set: str - name: Optional[str] = None - sync_status: Optional[str] = None - bucket_bandwidth_updated_at: Optional[datetime] = None - - def to_dict(self): - """Converts peer information to dictionary.""" - data = { - "endpoint": self.endpoint, - "deploymentID": self.deployment_id, - "defaultbandwidth": { - "bandwidthLimitPerBucket": self.bucket_bandwidth_limit, - "set": self.bucket_bandwidth_set, - }, - } - if self.name: - data["name"] = self.name - if self.sync_status is not None: - data["sync"] = "enable" if self.sync_status else "disable" - if self.bucket_bandwidth_updated_at: - data["defaultbandwidth"]["updatedAt"] = to_iso8601utc( - self.bucket_bandwidth_updated_at, - ) - return data diff --git a/minio/deleteobjects.py b/minio/deleteobjects.py deleted file mode 100644 index d323631b..00000000 --- a/minio/deleteobjects.py +++ /dev/null @@ -1,140 +0,0 @@ -# -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) -# 2020 MinIO, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Request/response of DeleteObjects API.""" - -from __future__ import absolute_import, annotations - -from dataclasses import dataclass -from typing import Optional, Type, TypeVar, cast -from xml.etree import ElementTree as ET - -from .xml import Element, SubElement, findall, findtext - - -@dataclass(frozen=True) -class DeleteObject: - """Delete object request information.""" - - name: str - version_id: Optional[str] = None - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - if element is None: - raise ValueError("element must be provided") - element = SubElement(element, "Object") - SubElement(element, "Key", self.name) - if self.version_id is not None: - SubElement(element, "VersionId", self.version_id) - return element - - -@dataclass(frozen=True) -class DeleteRequest: - """Delete object request.""" - - object_list: list[DeleteObject] - quiet: bool = False - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - element = Element("Delete") - if self.quiet: - SubElement(element, "Quiet", "true") - for obj in self.object_list: - obj.toxml(element) - return element - - -A = TypeVar("A", bound="DeletedObject") - - -@dataclass(frozen=True) -class DeletedObject: - """Deleted object information.""" - - name: str - version_id: Optional[str] - delete_marker: bool - delete_marker_version_id: Optional[str] - - @classmethod - def fromxml(cls: Type[A], element: ET.Element) -> A: - """Create new object with values from XML element.""" - name = cast(str, findtext(element, "Key", True)) - version_id = findtext(element, "VersionId") - delete_marker = findtext(element, "DeleteMarker") - delete_marker_version_id = findtext(element, "DeleteMarkerVersionId") - return cls( - name=name, - version_id=version_id, - delete_marker=( - delete_marker is not None and delete_marker.title() == "True" - ), - delete_marker_version_id=delete_marker_version_id, - ) - - -B = TypeVar("B", bound="DeleteError") - - -@dataclass(frozen=True) -class DeleteError: - """Delete error information.""" - - code: str - message: Optional[str] - name: Optional[str] - version_id: Optional[str] - - @classmethod - def fromxml(cls: Type[B], element: ET.Element) -> B: - """Create new object with values from XML element.""" - code = cast(str, findtext(element, "Code", True)) - message = findtext(element, "Message") - name = findtext(element, "Key") - version_id = findtext(element, "VersionId") - return cls( - code=code, - message=message, - name=name, - version_id=version_id, - ) - - -C = TypeVar("C", bound="DeleteResult") - - -@dataclass(frozen=True) -class DeleteResult: - """Delete object result.""" - - object_list: list[DeletedObject] - error_list: list[DeleteError] - - @classmethod - def fromxml(cls: Type[C], element: ET.Element) -> C: - """Create new object with values from XML element.""" - elements = findall(element, "Deleted") - object_list = [] - for tag in elements: - object_list.append(DeletedObject.fromxml(tag)) - elements = findall(element, "Error") - error_list = [] - for tag in elements: - error_list.append(DeleteError.fromxml(tag)) - return cls(object_list=object_list, error_list=error_list) diff --git a/minio/error.py b/minio/error.py index fd169beb..d94b26e3 100644 --- a/minio/error.py +++ b/minio/error.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015-2019 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,29 +16,14 @@ # pylint: disable=too-many-lines -""" -minio.error -~~~~~~~~~~~~~~~~~~~ +"""SDK exceptions.""" -This module provides custom exception classes for MinIO library -and API specific errors. +from __future__ import annotations -:copyright: (c) 2015, 2016, 2017 by MinIO, Inc. -:license: Apache 2.0, see LICENSE for more details. +from typing import Optional, Type -""" - -from __future__ import absolute_import, annotations - -from typing import Optional, Type, TypeVar -from xml.etree import ElementTree as ET - -try: - from urllib3.response import BaseHTTPResponse # type: ignore[attr-defined] -except ImportError: - from urllib3.response import HTTPResponse as BaseHTTPResponse - -from .xml import findtext +from .compat import HTTPResponse +from .xml import ET, findtext class MinioException(Exception): @@ -76,15 +61,12 @@ def status_code(self) -> int: return self._status_code -A = TypeVar("A", bound="S3Error") - - class S3Error(MinioException): """ Raised to indicate that error response is received when executing S3 operation. """ - response: BaseHTTPResponse + response: HTTPResponse code: Optional[str] message: Optional[str] resource: Optional[str] @@ -97,7 +79,7 @@ class S3Error(MinioException): def __init__( # pylint: disable=too-many-positional-arguments self, - response: BaseHTTPResponse, + response: HTTPResponse, code: Optional[str], message: Optional[str], resource: Optional[str], @@ -150,7 +132,7 @@ def __delattr__(self, name): object.__delattr__(self, name) @classmethod - def fromxml(cls: Type[A], response: BaseHTTPResponse) -> A: + def new(cls: Type[S3Error], response: HTTPResponse) -> S3Error: """Create new object with values from XML element.""" element = ET.fromstring(response.data.decode()) return cls( diff --git a/minio/helpers.py b/minio/helpers.py index aa0203c5..463aa8ba 100644 --- a/minio/helpers.py +++ b/minio/helpers.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) -# 2015, 2016, 2017 MinIO, Inc. +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,73 +14,80 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Helper functions.""" +"""Utility functions and classes.""" -from __future__ import absolute_import, annotations, division, unicode_literals +from __future__ import annotations -import base64 -import errno -import hashlib -import math -import os import platform import re import urllib.parse -from dataclasses import dataclass -from datetime import datetime from queue import Queue from threading import BoundedSemaphore, Lock, Thread -from typing import (BinaryIO, Dict, Iterable, List, Mapping, Optional, Type, - Union) - -from typing_extensions import Protocol -from urllib3._collections import HTTPHeaderDict +from typing import Mapping, Optional from . import __title__, __version__ -from .checksum import Algorithm, Hasher, reset_hashers, update_hashers -from .sse import Sse, SseCustomerKey +from .compat import HTTPHeaderDict, HTTPQueryDict, quote _DEFAULT_USER_AGENT = ( f"MinIO ({platform.system()}; {platform.machine()}) " f"{__title__}/{__version__}" ) -MAX_MULTIPART_COUNT = 10000 # 10000 parts +MAX_MULTIPART_COUNT = 10000 # 10,000 parts MAX_MULTIPART_OBJECT_SIZE = 5 * 1024 * 1024 * 1024 * 1024 # 5TiB MAX_PART_SIZE = 5 * 1024 * 1024 * 1024 # 5GiB MIN_PART_SIZE = 5 * 1024 * 1024 # 5MiB -_AWS_S3_PREFIX = (r'^(((bucket\.|accesspoint\.)' - r'vpce(-(?!_)[a-z_\d]+(? str: + """Get user agent header value for app name and version.""" + if default: + return _DEFAULT_USER_AGENT + if not (app_name and app_version): + raise ValueError("Application name and version must be provided.") + return f"{_DEFAULT_USER_AGENT} {app_name}/{app_version}" class RegionMap: @@ -106,102 +113,6 @@ def remove(self, bucket_name: str): self._map.pop(bucket_name, None) -class HTTPQueryDict(dict[str, List[str]]): - """Dictionary for HTTP query parameters with multiple values per key.""" - - def __init__( - self, - initial: Optional[ - Union[ - "HTTPQueryDict", - Mapping[str, Union[str, Iterable[str]]], - ] - ] = None - ): - super().__init__() - if initial: - if not isinstance(initial, Mapping): - raise TypeError( - "HTTPQueryDict expects a mapping-like object, " - f"got {type(initial).__name__}", - ) - for key, value in initial.items(): - if isinstance(value, (str, bytes)): - self[key] = [value] - else: - self[key] = list(value) - - def __setitem__(self, key: str, value: Union[str, Iterable[str]]) -> None: - super().__setitem__( - key, - [value] if isinstance(value, (str, bytes)) else list(value), - ) - - def copy(self) -> "HTTPQueryDict": - return HTTPQueryDict(self) - - def extend( - self, - other: Optional[ - Union[ - "HTTPQueryDict", - Mapping[str, Union[str, Iterable[str]]], - ] - ], - ) -> "HTTPQueryDict": - """Merges other keys and values.""" - if other is None: - return self - if not isinstance(other, Mapping): - raise TypeError( - "extend() expects a mapping-like object, " - f"got {type(other).__name__}", - ) - for key, value in other.items(): - normalized = ( - [value] if isinstance(value, (str, bytes)) else list(value) - ) - if key in self: - self[key] += normalized - else: - self[key] = normalized - return self - - def __str__(self) -> str: - """Convert dictionary to a URL-encoded query string.""" - query_list = [(k, v) for k, values in self.items() for v in values] - query_list.sort(key=lambda x: (x[0], x[1])) # Sort by key, then value - return urllib.parse.urlencode(query_list, quote_via=urllib.parse.quote) - - -def quote( - resource: str, - safe: str = "/", - encoding: Optional[str] = None, - errors: Optional[str] = None, -) -> str: - """ - Wrapper to urllib.parse.quote() replacing back to '~' for older python - versions. - """ - return urllib.parse.quote( - resource, - safe=safe, - encoding=encoding, - errors=errors, - ).replace("%7E", "~") - - -def queryencode( - query: str, - safe: str = "", - encoding: Optional[str] = None, - errors: Optional[str] = None, -) -> str: - """Encode query parameter value.""" - return quote(query, safe, encoding, errors) - - def headers_to_strings( headers: Mapping[str, str | list[str] | tuple[str]], titled_key: bool = False, @@ -214,119 +125,17 @@ def headers_to_strings( item = re.sub( r"Credential=([^/]+)", "Credential=*REDACTED*", - re.sub(r"Signature=([0-9a-f]+)", "Signature=*REDACTED*", item), + re.sub( + r"Signature=([0-9a-f]+)", + "Signature=*REDACTED*", + item, + flags=re.IGNORECASE, + ), ) if titled_key else item values.append(f"{key}: {item}") return "\n".join(values) -def _validate_sizes(object_size: int, part_size: int): - """Validate object and part size.""" - if part_size > 0: - if part_size < MIN_PART_SIZE: - raise ValueError( - f"part size {part_size} is not supported; minimum allowed 5MiB" - ) - if part_size > MAX_PART_SIZE: - raise ValueError( - f"part size {part_size} is not supported; maximum allowed 5GiB" - ) - - if object_size >= 0: - if object_size > MAX_MULTIPART_OBJECT_SIZE: - raise ValueError( - f"object size {object_size} is not supported; " - f"maximum allowed 5TiB" - ) - elif part_size <= 0: - raise ValueError( - "valid part size must be provided when object size is unknown", - ) - - -def _get_part_info(object_size: int, part_size: int): - """Compute part information for object and part size.""" - _validate_sizes(object_size, part_size) - - if object_size < 0: - return part_size, -1 - - if part_size > 0: - part_size = min(part_size, object_size) - return part_size, math.ceil(object_size / part_size) if part_size else 1 - - part_size = math.ceil( - math.ceil(object_size / MAX_MULTIPART_COUNT) / MIN_PART_SIZE, - ) * MIN_PART_SIZE - return part_size, math.ceil(object_size / part_size) if part_size else 1 - - -def get_part_info(object_size: int, part_size: int) -> tuple[int, int]: - """Compute part information for object and part size.""" - part_size, part_count = _get_part_info(object_size, part_size) - if part_count > MAX_MULTIPART_COUNT: - raise ValueError( - f"object size {object_size} and part size {part_size} " - f"make more than {MAX_MULTIPART_COUNT} parts for upload" - ) - return part_size, part_count - - -class ProgressType(Protocol): - """typing stub for Put/Get object progress.""" - - def set_meta(self, object_name: str, total_length: int): - """Set process meta information.""" - - def update(self, length: int): - """Set current progress length.""" - - -def read_part_data( - *, - stream: BinaryIO, - size: int, - part_data: bytes = b"", - progress: Optional[ProgressType] = None, - hashers: Optional[Dict[Algorithm, Hasher]] = None, -) -> bytes: - """Read part data of given size from stream.""" - reset_hashers(hashers) - initial_length = len(part_data) - size -= initial_length - if part_data: - update_hashers(hashers, part_data, initial_length) - while size: - data = stream.read(size) - if not data: - break # EOF reached - if not isinstance(data, bytes): - raise ValueError("read() must return 'bytes' object") - part_data += data - size -= len(data) - update_hashers( - hashers, - data, - len(data) - (initial_length if size == 0 else 0), - ) - if progress: - progress.update(len(data)) - return part_data - - -def makedirs(path: str): - """Wrapper of os.makedirs() ignores errno.EEXIST.""" - try: - if path: - os.makedirs(path) - except OSError as exc: # Python >2.5 - if exc.errno != errno.EEXIST: - raise - - if not os.path.isdir(path): - raise ValueError(f"path {path} is not a directory") from exc - - def check_bucket_name( bucket_name: str, strict: bool = False, @@ -336,43 +145,45 @@ def check_bucket_name( if strict: if not _BUCKET_NAME_REGEX.match(bucket_name): - raise ValueError(f'invalid bucket name {bucket_name}') + raise ValueError(f"invalid bucket name {bucket_name}") else: if not _OLD_BUCKET_NAME_REGEX.match(bucket_name): - raise ValueError(f'invalid bucket name {bucket_name}') + raise ValueError(f"invalid bucket name {bucket_name}") if _IPV4_REGEX.match(bucket_name): - raise ValueError(f'bucket name {bucket_name} must not be formatted ' - 'as an IP address') + raise ValueError( + f"bucket name {bucket_name} must not be formatted as an IP address", + ) unallowed_successive_chars = ['..', '.-', '-.'] if any(x in bucket_name for x in unallowed_successive_chars): - raise ValueError(f'bucket name {bucket_name} contains invalid ' - 'successive characters') + raise ValueError( + f"bucket name {bucket_name} contains invalid successive characters", + ) - if ( - s3_check and + if s3_check and ( bucket_name.startswith("xn--") or bucket_name.endswith("-s3alias") or bucket_name.endswith("--ol-s3") ): - raise ValueError(f"bucket name {bucket_name} must not start with " - "'xn--' and must not end with '--s3alias' or " - "'--ol-s3'") + raise ValueError( + f"bucket name {bucket_name} must not start with 'xn--' and " + f"must not end with '--s3alias' or '--ol-s3'" + ) -def check_non_empty_string(string: str | bytes): +def _check_non_empty_string(string: str | bytes, kind: str): """Check whether given string is not empty.""" try: if not string.strip(): - raise ValueError() + raise ValueError(f"{kind} must be a non-empty string or bytes") except AttributeError as exc: - raise TypeError() from exc + raise TypeError(f"{kind} must be a string or bytes") from exc def check_object_name(object_name: str): """Check whether given object name is valid.""" - check_non_empty_string(object_name) + _check_non_empty_string(object_name, "object name") tokens = object_name.split("/") if "." in tokens or ".." in tokens: raise ValueError( @@ -380,59 +191,9 @@ def check_object_name(object_name: str): ) -def is_valid_policy_type(policy: str | bytes): - """ - Validate if policy is type str - - :param policy: S3 style Bucket policy. - :return: True if policy parameter is of a valid type, 'string'. - Raise :exc:`TypeError` otherwise. - """ - if not isinstance(policy, (str, bytes)): - raise TypeError("policy must be str or bytes type") - - check_non_empty_string(policy) - - return True - - -def check_ssec(sse: Optional[SseCustomerKey]): - """Check sse is SseCustomerKey type or not.""" - if sse and not isinstance(sse, SseCustomerKey): - raise ValueError("SseCustomerKey type is required") - - -def check_sse(sse: Optional[Sse]): - """Check sse is Sse type or not.""" - if sse and not isinstance(sse, Sse): - raise ValueError("Sse type is required") - - -def md5sum_hash(data: Optional[str | bytes]) -> Optional[str]: - """Compute MD5 of data and return hash as Base64 encoded value.""" - if data is None: - return None - - # indicate md5 hashing algorithm is not used in a security context. - # Refer https://bugs.python.org/issue9216 for more information. - hasher = hashlib.new( # type: ignore[call-arg] - "md5", - usedforsecurity=False, - ) - hasher.update(data.encode() if isinstance(data, str) else data) - md5sum = base64.b64encode(hasher.digest()) - return md5sum.decode() if isinstance(md5sum, bytes) else md5sum - - -def sha256_hash(data: Optional[str | bytes]) -> str: - """Compute SHA-256 of data and return hash as hex encoded value.""" - data = data or b"" - hasher = hashlib.sha256() - hasher.update(data.encode() if isinstance(data, str) else data) - sha256sum = hasher.hexdigest() - if isinstance(sha256sum, bytes): - return sha256sum.decode() - return sha256sum +def check_policy(policy: str | bytes): + """Check whether given policy is valid.""" + _check_non_empty_string(policy, "policy") def url_replace( @@ -479,73 +240,15 @@ def normalize_headers(headers: Optional[HTTPHeaderDict]) -> HTTPHeaderDict: return normalized_headers -def _get_aws_info( - host: str, - https: bool, - region: Optional[str], -) -> tuple[Optional[dict], Optional[str]]: - """Extract AWS domain information. """ - - if not _HOSTNAME_REGEX.match(host): - return (None, None) - - if _AWS_ELB_ENDPOINT_REGEX.match(host): - region_in_host = host.split(".elb.amazonaws.com", 1)[0].split(".")[-1] - return (None, region or region_in_host) - - if not _AWS_ENDPOINT_REGEX.match(host): - return (None, None) - - if host.startswith("ec2-"): - return (None, None) - - if not _AWS_S3_ENDPOINT_REGEX.match(host): - raise ValueError(f"invalid Amazon AWS host {host}") - - matcher = _AWS_S3_PREFIX_REGEX.match(host) - end = matcher.end() if matcher else 0 - aws_s3_prefix = host[:end] - - if "s3-accesspoint" in aws_s3_prefix and not https: - raise ValueError(f"use HTTPS scheme for host {host}") - - tokens = host[end:].split(".") - dualstack = tokens[0] == "dualstack" - if dualstack: - tokens = tokens[1:] - region_in_host = "" - if tokens[0] not in ["vpce", "amazonaws"]: - region_in_host = tokens[0] - tokens = tokens[1:] - aws_domain_suffix = ".".join(tokens) - - if host in "s3-external-1.amazonaws.com": - region_in_host = "us-east-1" - - if host in ["s3-us-gov-west-1.amazonaws.com", - "s3-fips-us-gov-west-1.amazonaws.com"]: - region_in_host = "us-gov-west-1" - - if (aws_domain_suffix.endswith(".cn") and - not aws_s3_prefix.endswith("s3-accelerate.") and - not region_in_host and - not region): - raise ValueError( - f"region missing in Amazon S3 China endpoint {host}", - ) - - return ({"s3_prefix": aws_s3_prefix, - "domain_suffix": aws_domain_suffix, - "region": region or region_in_host or None, - "dualstack": dualstack}, None) - - -def _parse_url(endpoint: str) -> urllib.parse.SplitResult: +def parse_url(endpoint: str) -> urllib.parse.SplitResult: """Parse url string.""" url = urllib.parse.urlsplit(endpoint) host = url.hostname + if not host: + raise ValueError("hostname in endpoint is missing") + if url.scheme.lower() not in ["http", "https"]: raise ValueError("scheme in endpoint must be http or https") @@ -591,13 +294,13 @@ class BaseURL: _accelerate_host_flag: bool def __init__(self, endpoint: str, region: Optional[str]): - url = _parse_url(endpoint) + url = parse_url(endpoint) - if region and not _REGION_REGEX.match(region): + if region and not REGION_REGEX.match(region): raise ValueError(f"invalid region {region}") hostname = url.hostname or "" - self._aws_info, region_in_host = _get_aws_info( + self._aws_info, region_in_host = self._get_aws_info( hostname, url.scheme == "https", region) self._virtual_style_flag = ( self._aws_info is not None or hostname.endswith("aliyuncs.com") @@ -611,6 +314,68 @@ def __init__(self, endpoint: str, region: Optional[str]): self._aws_info["s3_prefix"].endswith("s3-accelerate.") ) + @staticmethod + def _get_aws_info( + host: str, + https: bool, + region: Optional[str], + ) -> tuple[Optional[dict], Optional[str]]: + """Extract AWS domain information. """ + + if not _HOSTNAME_REGEX.match(host): + return (None, None) + + if _AWS_ELB_ENDPOINT_REGEX.match(host): + region_in_host = host.split( + ".elb.amazonaws.com", 1)[0].split(".")[-1] + return (None, region or region_in_host) + + if not _AWS_ENDPOINT_REGEX.match(host): + return (None, None) + + if host.startswith("ec2-"): + return (None, None) + + if not _AWS_S3_ENDPOINT_REGEX.match(host): + raise ValueError(f"invalid Amazon AWS host {host}") + + matcher = _AWS_S3_PREFIX_REGEX.match(host) + end = matcher.end() if matcher else 0 + aws_s3_prefix = host[:end] + + if "s3-accesspoint" in aws_s3_prefix and not https: + raise ValueError(f"use HTTPS scheme for host {host}") + + tokens = host[end:].split(".") + dualstack = tokens[0] == "dualstack" + if dualstack: + tokens = tokens[1:] + region_in_host = "" + if tokens[0] not in ["vpce", "amazonaws"]: + region_in_host = tokens[0] + tokens = tokens[1:] + aws_domain_suffix = ".".join(tokens) + + if host in "s3-external-1.amazonaws.com": + region_in_host = "us-east-1" + + if host in ["s3-us-gov-west-1.amazonaws.com", + "s3-fips-us-gov-west-1.amazonaws.com"]: + region_in_host = "us-gov-west-1" + + if (aws_domain_suffix.endswith(".cn") and + not aws_s3_prefix.endswith("s3-accelerate.") and + not region_in_host and + not region): + raise ValueError( + f"region missing in Amazon S3 China endpoint {host}", + ) + + return ({"s3_prefix": aws_s3_prefix, + "domain_suffix": aws_domain_suffix, + "region": region or region_in_host or None, + "dualstack": dualstack}, None) + @property def region(self) -> Optional[str]: """Get region.""" @@ -800,53 +565,6 @@ def build( return url_replace(url=url, netloc=netloc, path=path) -@dataclass(frozen=True) -class ObjectWriteResult: - """Result class of any APIs doing object creation.""" - headers: HTTPHeaderDict - bucket_name: str - object_name: str - etag: str - version_id: Optional[str] = None - last_modified: Optional[datetime] = None - location: Optional[str] = None - checksum_crc32: Optional[str] = None - checksum_crc32c: Optional[str] = None - checksum_crc64nvme: Optional[str] = None - checksum_sha1: Optional[str] = None - checksum_sha256: Optional[str] = None - checksum_type: Optional[str] = None - - @classmethod - def new( - cls: Type[ObjectWriteResult], - *, - headers: HTTPHeaderDict, - bucket_name: str, - object_name: str, - etag: Optional[str] = None, - version_id: Optional[str] = None, - last_modified: Optional[datetime] = None, - location: Optional[str] = None, - ) -> ObjectWriteResult: - """Creates object write result.""" - return cls( - headers=headers, - bucket_name=bucket_name, - object_name=object_name, - etag=etag or headers.get("etag", "").replace('"', ""), - version_id=version_id or headers.get("x-amz-version-id"), - last_modified=last_modified, - location=location, - checksum_crc32=headers.get("x-amz-checksum-crc32"), - checksum_crc32c=headers.get("x-amz-checksum-crc32c"), - checksum_crc64nvme=headers.get("x-amz-checksum-crc64nvme"), - checksum_sha1=headers.get("x-amz-checksum-sha1"), - checksum_sha256=headers.get("x-amz-checksum-sha256"), - checksum_type=headers.get("x-amz-checksum-type"), - ) - - class Worker(Thread): """ Thread executing tasks from a given tasks queue """ diff --git a/minio/legalhold.py b/minio/legalhold.py deleted file mode 100644 index a5bb0d00..00000000 --- a/minio/legalhold.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) -# 2020 MinIO, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Request/response of PutObjectLegalHold and GetObjectLegalHold S3 APIs.""" - -from __future__ import absolute_import, annotations - -from dataclasses import dataclass -from typing import Optional, Type, TypeVar -from xml.etree import ElementTree as ET - -from .xml import Element, SubElement, findtext - -A = TypeVar("A", bound="LegalHold") - - -@dataclass(frozen=True) -class LegalHold: - """Legal hold configuration.""" - - status: bool = False - - @classmethod - def fromxml(cls: Type[A], element: ET.Element) -> A: - """Create new object with values from XML element.""" - status = findtext(element, "Status") - return cls(status=status == "ON") - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - element = Element("LegalHold") - SubElement(element, "Status", "ON" if self.status is True else "OFF") - return element diff --git a/minio/lifecycleconfig.py b/minio/lifecycleconfig.py deleted file mode 100644 index 054287bd..00000000 --- a/minio/lifecycleconfig.py +++ /dev/null @@ -1,344 +0,0 @@ -# -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) -# 2015, 2016, 2017, 2018, 2019 MinIO, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Request/response of PutBucketLifecycleConfiguration and -GetBucketLifecycleConfiguration APIs. -""" -# pylint: disable=invalid-name - -from __future__ import absolute_import, annotations - -from abc import ABC -from dataclasses import dataclass -from datetime import datetime -from typing import Optional, Type, TypeVar, cast -from xml.etree import ElementTree as ET - -from .commonconfig import BaseRule -from .time import from_iso8601utc, to_iso8601utc -from .xml import Element, SubElement, find, findall, findtext - - -@dataclass(frozen=True) -class DateDays(ABC): - """Base class holds date and days of Transition and Expiration.""" - date: Optional[datetime] = None - days: Optional[int] = None - - @staticmethod - def parsexml( - element: ET.Element) -> tuple[Optional[datetime], Optional[int]]: - """Parse XML to date and days.""" - date = from_iso8601utc(findtext(element, "Date")) - days = findtext(element, "Days") - return date, int(days) if days else None - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - if element is None: - raise ValueError("element must be provided") - if self.date is not None: - SubElement( - element, "Date", to_iso8601utc(self.date), - ) - if self.days: - SubElement(element, "Days", str(self.days)) - return element - - -A = TypeVar("A", bound="Transition") - - -@dataclass(frozen=True) -class Transition(DateDays): - """Transition.""" - storage_class: Optional[str] = None - - @classmethod - def fromxml(cls: Type[A], element: ET.Element) -> A: - """Create new object with values from XML element.""" - element = cast(ET.Element, find(element, "Transition", True)) - date, days = cls.parsexml(element) - return cls(date, days, findtext(element, "StorageClass")) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - if element is None: - raise ValueError("element must be provided") - element = SubElement(element, "Transition") - super().toxml(element) - if self.storage_class: - SubElement(element, "StorageClass", self.storage_class) - return element - - -B = TypeVar("B", bound="NoncurrentVersionTransition") - - -@dataclass(frozen=True) -class NoncurrentVersionTransition: - """Noncurrent version transition.""" - noncurrent_days: Optional[int] = None - storage_class: Optional[str] = None - newer_noncurrent_versions: Optional[int] = None - - @classmethod - def fromxml(cls: Type[B], element: ET.Element) -> B: - """Create new object with values from XML element.""" - element = cast( - ET.Element, - find(element, "NoncurrentVersionTransition", True), - ) - noncurrent_days = findtext(element, "NoncurrentDays") - versions = findtext(element, "NewerNoncurrentVersions") - return cls( - int(noncurrent_days) if noncurrent_days else None, - findtext(element, "StorageClass"), - int(versions) if versions else None, - ) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - if element is None: - raise ValueError("element must be provided") - element = SubElement(element, "NoncurrentVersionTransition") - if self.noncurrent_days: - SubElement(element, "NoncurrentDays", str(self.noncurrent_days)) - if self.storage_class: - SubElement(element, "StorageClass", self.storage_class) - if self.newer_noncurrent_versions: - SubElement(element, "NewerNoncurrentVersions", - str(self.newer_noncurrent_versions)) - return element - - -C = TypeVar("C", bound="NoncurrentVersionExpiration") - - -@dataclass(frozen=True) -class NoncurrentVersionExpiration: - """Noncurrent version expiration.""" - noncurrent_days: Optional[int] = None - newer_noncurrent_versions: Optional[int] = None - - @classmethod - def fromxml(cls: Type[C], element: ET.Element) -> C: - """Create new object with values from XML element.""" - element = cast( - ET.Element, - find(element, "NoncurrentVersionExpiration", True), - ) - noncurrent_days = findtext(element, "NoncurrentDays") - versions = findtext(element, "NewerNoncurrentVersions") - return cls(int(noncurrent_days) if noncurrent_days else None, - int(versions) if versions else None) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - if element is None: - raise ValueError("element must be provided") - element = SubElement(element, "NoncurrentVersionExpiration") - if self.noncurrent_days: - SubElement(element, "NoncurrentDays", str(self.noncurrent_days)) - if self.newer_noncurrent_versions: - SubElement(element, "NewerNoncurrentVersions", - str(self.newer_noncurrent_versions)) - return element - - -D = TypeVar("D", bound="Expiration") - - -@dataclass(frozen=True) -class Expiration(DateDays): - """Expiration.""" - expired_object_delete_marker: Optional[bool] = None - - @classmethod - def fromxml(cls: Type[D], element: ET.Element) -> D: - """Create new object with values from XML element.""" - element = cast(ET.Element, find(element, "Expiration", True)) - date, days = cls.parsexml(element) - expired_object_delete_marker = findtext( - element, "ExpiredObjectDeleteMarker", - ) - if expired_object_delete_marker is None: - return cls(date, days, None) - - if expired_object_delete_marker.title() not in ["False", "True"]: - raise ValueError( - "value of ExpiredObjectDeleteMarker must be " - "'True' or 'False'", - ) - return cls(date, days, expired_object_delete_marker.title() == "True") - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - if element is None: - raise ValueError("element must be provided") - element = SubElement(element, "Expiration") - super().toxml(element) - if self.expired_object_delete_marker is not None: - SubElement( - element, - "ExpiredObjectDeleteMarker", - str(self.expired_object_delete_marker).lower(), - ) - return element - - -E = TypeVar("E", bound="AbortIncompleteMultipartUpload") - - -@dataclass(frozen=True) -class AbortIncompleteMultipartUpload: - """Abort incomplete multipart upload.""" - days_after_initiation: Optional[int] = None - - @classmethod - def fromxml(cls: Type[E], element: ET.Element) -> E: - """Create new object with values from XML element.""" - element = cast( - ET.Element, - find(element, "AbortIncompleteMultipartUpload", True), - ) - days_after_initiation = findtext(element, "DaysAfterInitiation") - return cls( - int(days_after_initiation) if days_after_initiation else None, - ) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - if element is None: - raise ValueError("element must be provided") - element = SubElement(element, "AbortIncompleteMultipartUpload") - if self.days_after_initiation: - SubElement( - element, - "DaysAfterInitiation", - str(self.days_after_initiation), - ) - return element - - -F = TypeVar("F", bound="Rule") - - -@dataclass(frozen=True) -class Rule(BaseRule): - """Lifecycle rule. """ - abort_incomplete_multipart_upload: Optional[ - AbortIncompleteMultipartUpload] = None - expiration: Optional[Expiration] = None - noncurrent_version_expiration: Optional[NoncurrentVersionExpiration] = None - noncurrent_version_transition: Optional[NoncurrentVersionTransition] = None - transition: Optional[Transition] = None - - def __post_init__(self): - if (not self.abort_incomplete_multipart_upload and not self.expiration - and not self.noncurrent_version_expiration - and not self.noncurrent_version_transition - and not self.transition): - raise ValueError( - "at least one of action (AbortIncompleteMultipartUpload, " - "Expiration, NoncurrentVersionExpiration, " - "NoncurrentVersionTransition or Transition) must be specified " - "in a rule") - - def _require_subclass_implementation(self) -> None: - """Dummy abstract method to enforce abstract class behavior.""" - - @classmethod - def fromxml(cls: Type[F], element: ET.Element) -> F: - """Create new object with values from XML element.""" - status, rule_filter, rule_id = cls.parsexml(element) - abort_incomplete_multipart_upload = ( - None if find(element, "AbortIncompleteMultipartUpload") is None - else AbortIncompleteMultipartUpload.fromxml(element) - ) - expiration = ( - None if find(element, "Expiration") is None - else Expiration.fromxml(element) - ) - noncurrent_version_expiration = ( - None if find(element, "NoncurrentVersionExpiration") is None - else NoncurrentVersionExpiration.fromxml(element) - ) - noncurrent_version_transition = ( - None if find(element, "NoncurrentVersionTransition") is None - else NoncurrentVersionTransition.fromxml(element) - ) - transition = ( - None if find(element, "Transition") is None - else Transition.fromxml(element) - ) - - return cls( - status=status, - rule_filter=rule_filter, - rule_id=rule_id, - abort_incomplete_multipart_upload=( - abort_incomplete_multipart_upload - ), - expiration=expiration, - noncurrent_version_expiration=noncurrent_version_expiration, - noncurrent_version_transition=noncurrent_version_transition, - transition=transition, - ) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - if element is None: - raise ValueError("element must be provided") - element = SubElement(element, "Rule") - super().toxml(element) - if self.abort_incomplete_multipart_upload: - self.abort_incomplete_multipart_upload.toxml(element) - if self.expiration: - self.expiration.toxml(element) - if self.noncurrent_version_expiration: - self.noncurrent_version_expiration.toxml(element) - if self.noncurrent_version_transition: - self.noncurrent_version_transition.toxml(element) - if self.transition: - self.transition.toxml(element) - return element - - -G = TypeVar("G", bound="LifecycleConfig") - - -@dataclass(frozen=True) -class LifecycleConfig: - """Lifecycle configuration.""" - rules: list[Rule] - - @classmethod - def fromxml(cls: Type[G], element: ET.Element) -> G: - """Create new object with values from XML element.""" - elements = findall(element, "Rule") - rules = [] - for tag in elements: - rules.append(Rule.fromxml(tag)) - return cls(rules) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - element = Element("LifecycleConfiguration") - for rule in self.rules: - rule.toxml(element) - return element diff --git a/minio/api.py b/minio/minio.py similarity index 81% rename from minio/api.py rename to minio/minio.py index fbc9d557..b02d4ccc 100644 --- a/minio/api.py +++ b/minio/minio.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) -# 2015, 2016, 2017 MinIO, Inc. +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -26,70 +26,64 @@ Simple Storage Service (aka S3) client to perform bucket and object operations. """ -from __future__ import absolute_import, annotations +from __future__ import annotations import io import itertools import json +import math import os import tarfile from collections.abc import Iterable from datetime import datetime, timedelta from io import BytesIO from random import random -from typing import Any, BinaryIO, Iterator, Optional, TextIO, Union, cast +from typing import Any, BinaryIO, Dict, Iterator, Optional, TextIO, Union, cast from urllib.parse import quote, urlencode, urlunsplit -from xml.etree import ElementTree as ET import certifi import urllib3 from urllib3 import Retry -from urllib3._collections import HTTPHeaderDict - -try: - from urllib3.response import BaseHTTPResponse # type: ignore[attr-defined] -except ImportError: - from urllib3.response import HTTPResponse as BaseHTTPResponse - +from urllib3.filepost import encode_multipart_formdata from urllib3.util import Timeout from . import time +from .args import (Directive, ProgressType, PutObjectFanOutEntry, + SnowballObject, SourceObject) from .checksum import (MD5, SHA256, UNSIGNED_PAYLOAD, ZERO_MD5_HASH, - ZERO_SHA256_HASH, Algorithm, base64_string, + ZERO_SHA256_HASH, Algorithm, Hasher, base64_string, base64_string_to_sum, hex_string, make_headers, - new_hashers) -from .commonconfig import (COPY, REPLACE, ComposeSource, CopySource, - SnowballObject, Tags) + new_hashers, reset_hashers, update_hashers) +from .compat import (HTTPHeaderDict, HTTPQueryDict, HTTPResponse, makedirs, + queryencode) from .credentials import StaticProvider from .credentials.providers import Provider -from .datatypes import (Bucket, CompleteMultipartUploadResult, EventIterable, - ListAllMyBucketsResult, ListMultipartUploadsResult, - ListPartsResult, Object, Part, PostPolicy, - parse_copy_object, parse_list_objects) -from .deleteobjects import (DeleteError, DeleteObject, DeleteRequest, - DeleteResult) from .error import InvalidResponseError, S3Error, ServerError -from .helpers import (_DEFAULT_USER_AGENT, MAX_MULTIPART_COUNT, - MAX_MULTIPART_OBJECT_SIZE, MAX_PART_SIZE, MIN_PART_SIZE, - BaseURL, HTTPQueryDict, ObjectWriteResult, ProgressType, - RegionMap, ThreadPool, check_bucket_name, - check_object_name, check_sse, check_ssec, get_part_info, - headers_to_strings, is_valid_policy_type, makedirs, - normalize_headers, queryencode, read_part_data) -from .legalhold import LegalHold -from .lifecycleconfig import LifecycleConfig -from .notificationconfig import NotificationConfig -from .objectlockconfig import ObjectLockConfig -from .replicationconfig import ReplicationConfig -from .retention import Retention -from .select import SelectObjectReader, SelectRequest +from .helpers import (MAX_MULTIPART_COUNT, MAX_MULTIPART_OBJECT_SIZE, + MAX_PART_SIZE, MIN_PART_SIZE, BaseURL, RegionMap, + ThreadPool, check_bucket_name, check_object_name, + check_policy, get_user_agent, headers_to_strings, + normalize_headers) +from .models import (AbortMultipartUploadResponse, Checksum, + CompleteMultipartUploadResult, CopyObjectResult, + CORSConfig, CreateBucketConfiguration, + CreateMultipartUploadResponse, DeleteObjectsResponse, + DeleteRequest, DeleteResult, EventIterable, + GenericResponse, GetObjectAclResponse, + GetObjectAttributesResponse, GetObjectResponse, + HeadBucketResponse, HeadObjectResponse, LegalHold, + LifecycleConfig, ListAllMyBucketsResult, + ListBucketsResponse, ListMultipartUploadsResponse, + ListPartsResponse, NotificationConfig, Object, + ObjectLockConfig, ObjectWriteResponse, Part, PostPolicy, + PromptObjectResponse, PutObjectFanOutResponse, + ReplicationConfig, Retention, SelectObjectContentRequest, + SelectObjectResponse, SSEConfig, StatObjectResponse, + Tagging, Tags, UploadPartCopyResponse, UploadPartResponse, + VersioningConfig, parse_list_objects) from .signer import presign_v4, sign_v4_s3 from .sse import Sse, SseCustomerKey -from .sseconfig import SSEConfig -from .tagging import Tagging -from .time import to_http_header, to_iso8601utc -from .versioningconfig import VersioningConfig -from .xml import Element, SubElement, findtext, getbytes, marshal, unmarshal +from .xml import ET, Element, SubElement, getbytes, marshal, unmarshal class Minio: @@ -198,18 +192,14 @@ def __init__( ... ) """ # Validate http client has correct base class. - if http_client and not isinstance(http_client, urllib3.PoolManager): - raise TypeError( - "HTTP client should be urllib3.PoolManager like object, " - f"got {type(http_client).__name__}", - ) - self._region_map = RegionMap() self._base_url = BaseURL( ("https://" if secure else "http://") + endpoint, region, ) - self._user_agent = _DEFAULT_USER_AGENT + self._user_agent = get_user_agent( + app_name="", app_version="", default=True, + ) self._trace_stream = None if access_key: if secret_key is None: @@ -235,6 +225,97 @@ def __del__(self): if hasattr(self, "_http"): # Only required for unit test run self._http.clear() + @staticmethod + def _get_part_info(object_size: int, part_size: int) -> tuple[int, int]: + """Compute part information for object and part size.""" + def _calc_part_info(object_size: int, part_size: int): + """Compute part information for object and part size.""" + def _validate_sizes(): + """Validate object and part size.""" + if part_size > 0: + if part_size < MIN_PART_SIZE: + raise ValueError( + f"part size {part_size} is not supported; " + f"minimum allowed 5MiB" + ) + if part_size > MAX_PART_SIZE: + raise ValueError( + f"part size {part_size} is not supported; " + f"maximum allowed 5GiB" + ) + + if object_size >= 0: + if object_size > MAX_MULTIPART_OBJECT_SIZE: + raise ValueError( + f"object size {object_size} is not supported; " + f"maximum allowed 5TiB" + ) + elif part_size <= 0: + raise ValueError( + "valid part size must be provided when " + "object size is unknown", + ) + + _validate_sizes() + + if object_size < 0: + return part_size, -1 + + if part_size > 0: + part_size = min(part_size, object_size) + return ( + part_size, + math.ceil(object_size / part_size) if part_size else 1, + ) + + part_size = math.ceil( + math.ceil(object_size / MAX_MULTIPART_COUNT) / MIN_PART_SIZE, + ) * MIN_PART_SIZE + return ( + part_size, + math.ceil(object_size / part_size) if part_size else 1, + ) + + part_size, part_count = _calc_part_info(object_size, part_size) + if part_count > MAX_MULTIPART_COUNT: + raise ValueError( + f"object size {object_size} and part size {part_size} " + f"make more than {MAX_MULTIPART_COUNT} parts for upload" + ) + return part_size, part_count + + @staticmethod + def _read_part_data( + *, + stream: BinaryIO, + size: int, + part_data: bytes = b"", + progress: Optional[ProgressType] = None, + hashers: Optional[Dict[Algorithm, Hasher]] = None, + ) -> bytes: + """Read part data of given size from stream.""" + reset_hashers(hashers) + initial_length = len(part_data) + size -= initial_length + if part_data: + update_hashers(hashers, part_data, initial_length) + while size: + data = stream.read(size) + if not data: + break # EOF reached + if not isinstance(data, bytes): + raise ValueError("read() must return 'bytes' object") + part_data += data + size -= len(data) + update_hashers( + hashers, + data, + len(data) - (initial_length if size == 0 else 0), + ) + if progress: + progress.update(len(data)) + return part_data + @staticmethod def _gen_read_headers( *, @@ -259,9 +340,11 @@ def _gen_read_headers( if not_match_etag: headers["if-none-match"] = not_match_etag if modified_since: - headers["if-modified-since"] = to_http_header(modified_since) + headers["if-modified-since"] = time.to_http_header(modified_since) if unmodified_since: - headers["if-unmodified-since"] = to_http_header(unmodified_since) + headers["if-unmodified-since"] = ( + time.to_http_header(unmodified_since) + ) if fetch_checksum: headers["x-amz-checksum-mode"] = "ENABLED" return headers @@ -290,7 +373,7 @@ def _gen_write_headers( if retention and retention.mode: headers["x-amz-object-lock-mode"] = retention.mode headers["x-amz-object-lock-retain-until-date"] = cast( - str, to_iso8601utc(retention.retain_until_date), + str, time.to_iso8601utc(retention.retain_until_date), ) if legal_hold: headers["x-amz-object-lock-legal-hold"] = "ON" @@ -299,7 +382,7 @@ def _gen_write_headers( def _handle_redirect_response( self, method: str, - response: BaseHTTPResponse, + response: HTTPResponse, bucket_name: Optional[str] = None, retry: bool = False, ) -> tuple[Optional[str], Optional[str]]: @@ -338,7 +421,8 @@ def _url_open( no_body_trace: bool = False, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None, - ) -> BaseHTTPResponse: + skip_signing: bool = False, + ) -> HTTPResponse: """Execute HTTP request.""" url = self._base_url.build( method=method, @@ -383,7 +467,7 @@ def _url_open( date = time.utcnow() headers["x-amz-date"] = time.to_amz_date(date) - if self._provider is not None: + if self._provider is not None and not skip_signing: creds = self._provider.retrieve() if creds.session_token: headers["X-Amz-Security-Token"] = creds.session_token @@ -473,7 +557,7 @@ def _url_open( None, ) - response_error = S3Error.fromxml(response) if response.data else None + response_error = S3Error.new(response) if response.data else None if self._trace_stream: self._trace_stream.write("----------END-HTTP----------\n") @@ -505,6 +589,13 @@ def _url_open( if bucket_name else ("ResourceConflict", "Request resource conflicts"), ), + 412: lambda: ( + ("PreconditionFailed", + "At least one of the preconditions you specified did not hold") + ), + 416: lambda: ( + "InvalidRange", "The requested range cannot be satisfied", + ), 501: lambda: ( "MethodNotAllowed", "The specified method is not allowed against this resource", @@ -536,6 +627,61 @@ def _url_open( raise response_error + def _get_region( + self, + *, + bucket_name: Optional[str] = None, + region: Optional[str] = None, + extra_headers: Optional[HTTPHeaderDict] = None, + extra_query_params: Optional[HTTPQueryDict] = None, + ) -> str: + """ + Return region of given bucket either from region cache or set in + constructor. + """ + + if ( + region is not None and self._base_url.region is not None and + region != self._base_url.region + ): + raise ValueError( + f"region must be {self._base_url.region}, but passed {region}", + ) + + if region is not None: + return region + + if self._base_url.region is not None: + return self._base_url.region + + if not bucket_name or not self._provider: + return "us-east-1" + + region = self._region_map.get(bucket_name) + if region: + return region + + # Execute GetBucketLocation REST API to get region of the bucket. + response = self._url_open( + method="GET", + region="us-east-1", + bucket_name=bucket_name, + query_params=HTTPQueryDict({"location": ""}), + extra_headers=extra_headers, + extra_query_params=extra_query_params, + ) + + element = ET.fromstring(response.data.decode()) + if not element.text: + region = "us-east-1" + elif element.text == "EU" and self._base_url.is_aws_host: + region = "eu-west-1" + else: + region = element.text + + self._region_map.set(bucket_name, region) + return region + def _execute( self, *, @@ -550,7 +696,7 @@ def _execute( region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None, - ) -> BaseHTTPResponse: + ) -> HTTPResponse: """Execute HTTP request.""" region = self._get_region( bucket_name=bucket_name, @@ -599,182 +745,662 @@ def _execute( ) raise exc.copy(cast(str, code), cast(str, message)) - def _get_region( + def _abort_multipart_upload( self, *, - bucket_name: Optional[str] = None, + bucket_name: str, + object_name: str, + upload_id: str, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None, - ) -> str: - """ - Return region of given bucket either from region cache or set in - constructor. - """ - - if ( - region is not None and self._base_url.region is not None and - region != self._base_url.region - ): - raise ValueError( - f"region must be {self._base_url.region}, but passed {region}", - ) - - if region is not None: - return region - - if self._base_url.region is not None: - return self._base_url.region - - if not bucket_name or not self._provider: - return "us-east-1" - - region = self._region_map.get(bucket_name) - if region: - return region - - # Execute GetBucketLocation REST API to get region of the bucket. - response = self._url_open( - method="GET", - region="us-east-1", + ) -> AbortMultipartUploadResponse: + """Execute AbortMultipartUpload S3 API.""" + response = self._execute( + method="DELETE", bucket_name=bucket_name, - query_params=HTTPQueryDict({"location": ""}), + object_name=object_name, + query_params=HTTPQueryDict({'uploadId': upload_id}), + region=region, extra_headers=extra_headers, extra_query_params=extra_query_params, ) + return AbortMultipartUploadResponse( + headers=response.headers, + bucket_name=bucket_name, + region=region or self._region_map.get(bucket_name) or "", + object_name=object_name, + upload_id=upload_id, + ) - element = ET.fromstring(response.data.decode()) - if not element.text: - region = "us-east-1" - elif element.text == "EU" and self._base_url.is_aws_host: - region = "eu-west-1" - else: - region = element.text - - self._region_map.set(bucket_name, region) - return region - - def set_app_info(self, app_name: str, app_version: str): - """ - Set your application name and version to user agent header. - - Args: - app_name (str): - Application name. - - app_version (str): - Application version. - - Example: - >>> client.set_app_info("my_app", "1.0.2") - """ - if not (app_name and app_version): - raise ValueError("Application name/version cannot be empty.") - self._user_agent = f"{_DEFAULT_USER_AGENT} {app_name}/{app_version}" - - def trace_on(self, stream: TextIO): - """ - Enable http trace. - - Args: - stream (TextIO): - Stream for writing HTTP call tracing. - - Example: - >>> client.trace_on(sys.stdout) - """ - if not stream: - raise ValueError('Input stream for trace output is invalid.') - # Save new output stream. - self._trace_stream = stream - - def trace_off(self): - """Disable HTTP trace.""" - self._trace_stream = None - - def enable_accelerate_endpoint(self): - """Enables accelerate endpoint for Amazon S3 endpoint.""" - self._base_url.accelerate_host_flag = True - - def disable_accelerate_endpoint(self): - """Disables accelerate endpoint for Amazon S3 endpoint.""" - self._base_url.accelerate_host_flag = False - - def enable_dualstack_endpoint(self): - """Enables dualstack endpoint for Amazon S3 endpoint.""" - self._base_url.dualstack_host_flag = True - - def disable_dualstack_endpoint(self): - """Disables dualstack endpoint for Amazon S3 endpoint.""" - self._base_url.dualstack_host_flag = False - - def enable_virtual_style_endpoint(self): - """Enables virtual style endpoint.""" - self._base_url.virtual_style_flag = True - - def disable_virtual_style_endpoint(self): - """Disables virtual style endpoint.""" - self._base_url.virtual_style_flag = False - - def select_object_content( + def _complete_multipart_upload( self, *, bucket_name: str, object_name: str, - request: SelectRequest, + upload_id: str, + parts: list[Part], + ssec: Optional[SseCustomerKey] = None, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None, - ) -> SelectObjectReader: - """ - Select content of an object by SQL expression. - - Args: - bucket_name (str): - Name of the bucket. - - object_name (str): - Object name in the bucket. - - request (SelectRequest): - Select request. - - region (Optional[str], default=None): - Region of the bucket to skip auto probing. - - extra_headers (Optional[HTTPHeaderDict], default=None): - Extra headers for advanced usage. - - extra_query_params (Optional[HTTPQueryDict], default=None): - Extra query parameters for advanced usage. - - Returns: - SelectObjectReader: - A reader object representing the results of the select - operation. - - Example: - >>> with client.select_object_content( - ... bucket_name="my-bucket", - ... object_name="my-object.csv", - ... request=SelectRequest( - ... expression="select * from S3Object", - ... input_serialization=CSVInputSerialization(), - ... output_serialization=CSVOutputSerialization(), - ... request_progress=True, - ... ), - ... ) as result: - ... for data in result.stream(): - ... print(data.decode()) - ... print(result.stats()) - """ + ) -> ObjectWriteResponse: + """Execute CompleteMultipartUpload S3 API.""" check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) check_object_name(object_name) - if not isinstance(request, SelectRequest): - raise ValueError("request must be SelectRequest type") - body = marshal(request) + element = Element("CompleteMultipartUpload") + for part in parts: + part.toxml(SubElement(element, "Part")) + body = getbytes(element) headers = HTTPHeaderDict( - {"Content-MD5": base64_string(MD5.hash(body))}, + { + "Content-Type": "application/xml", + "Content-MD5": base64_string(MD5.hash(body)), + }, + ) + if ssec: + headers.extend(ssec.headers()) + response = self._execute( + method="POST", + bucket_name=bucket_name, + object_name=object_name, + body=body, + headers=headers, + query_params=HTTPQueryDict({'uploadId': upload_id}), + region=region, + extra_headers=extra_headers, + extra_query_params=extra_query_params, + ) + result = CompleteMultipartUploadResult.new(response) + return ObjectWriteResponse( + headers=response.headers, + bucket_name=bucket_name, + region=region or self._region_map.get(bucket_name) or "", + object_name=object_name, + etag=result.etag, + result=result, + ) + + def _copy_object( + self, + *, + bucket_name: str, + object_name: str, + source: SourceObject, + sse: Optional[Sse] = None, + user_metadata: Optional[HTTPHeaderDict] = None, + tags: Optional[Tags] = None, + retention: Optional[Retention] = None, + legal_hold: bool = False, + metadata_directive: Optional[str] = None, + tagging_directive: Optional[str] = None, + region: Optional[str] = None, + extra_headers: Optional[HTTPHeaderDict] = None, + extra_query_params: Optional[HTTPQueryDict] = None, + ) -> ObjectWriteResponse: + """Execute CopyObject S3 API.""" + check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) + check_object_name(object_name) + if source.offset or source.length: + raise ValueError("copy object with offset/length is unsupported") + if ( + metadata_directive is not None and + metadata_directive not in [Directive.COPY, Directive.REPLACE] + ): + raise ValueError( + f"metadata directive must be {Directive.COPY} or " + f"{Directive.REPLACE}", + ) + if ( + tagging_directive is not None and + tagging_directive not in [Directive.COPY, Directive.REPLACE] + ): + raise ValueError( + f"tagging directive must be {Directive.COPY} or " + f"{Directive.REPLACE}", + ) + headers = self._gen_write_headers( + user_metadata=user_metadata, + sse=sse, + tags=tags, + retention=retention, + legal_hold=legal_hold, + ) + if metadata_directive: + headers["x-amz-metadata-directive"] = metadata_directive + if tagging_directive: + headers["x-amz-tagging-directive"] = tagging_directive + headers.extend(source.headers) + response = self._execute( + method="PUT", + bucket_name=bucket_name, + object_name=object_name, + headers=headers, + region=region, + extra_headers=extra_headers, + extra_query_params=extra_query_params, + ) + return ObjectWriteResponse( + headers=response.headers, + bucket_name=bucket_name, + region=region or self._region_map.get(bucket_name) or "", + object_name=object_name, + result=unmarshal(CopyObjectResult, response.data.decode()), + ) + + def _create_bucket( + self, + *, + bucket_name: str, + object_lock: bool = False, + location_config: Optional[ + CreateBucketConfiguration.Location] = None, + bucket_config: Optional[CreateBucketConfiguration.Bucket] = None, + tags: Optional[Tags] = None, + region: Optional[str] = None, + extra_headers: Optional[HTTPHeaderDict] = None, + extra_query_params: Optional[HTTPQueryDict] = None, + ) -> GenericResponse: + """Execute CreateBucket S3 API.""" + check_bucket_name(bucket_name, True, + s3_check=self._base_url.is_aws_host) + if self._base_url.region: + # Error out if region does not match with region passed via + # constructor. + if region and self._base_url.region != region: + raise ValueError( + f"region must be {self._base_url.region}, " + f"but passed {region}" + ) + location = self._base_url.region or region or "us-east-1" + headers = HTTPHeaderDict() + if object_lock: + headers["x-amz-bucket-object-lock-enabled"] = "true" + body = marshal( + CreateBucketConfiguration( + location_constraint=location, + location=location_config, + bucket=bucket_config, + tags=tags, + ), + ) + response = self._url_open( + method="PUT", + region=location, + bucket_name=bucket_name, + body=body, + headers=headers, + extra_headers=extra_headers, + extra_query_params=extra_query_params, + ) + self._region_map.set(bucket_name, location) + return GenericResponse( + headers=response.headers, + bucket_name=bucket_name, + region=location, + ) + + def _create_multipart_upload( + self, + *, + bucket_name: str, + object_name: str, + headers: HTTPHeaderDict, + algorithm: Optional[Algorithm] = None, + region: Optional[str] = None, + extra_headers: Optional[HTTPHeaderDict] = None, + extra_query_params: Optional[HTTPQueryDict] = None, + ) -> CreateMultipartUploadResponse: + """Execute CreateMultipartUpload S3 API.""" + if not headers.get("Content-Type"): + headers["Content-Type"] = "application/octet-stream" + if algorithm: + headers["x-amz-checksum-algorithm"] = str(algorithm) + response = self._execute( + method="POST", + bucket_name=bucket_name, + object_name=object_name, + headers=headers, + query_params=HTTPQueryDict({"uploads": ""}), + region=region, + extra_headers=extra_headers, + extra_query_params=extra_query_params, + ) + return CreateMultipartUploadResponse( + response=response, + bucket_name=bucket_name, + region=region or self._region_map.get(bucket_name) or "", + object_name=object_name, + ) + + def _delete_objects( + self, + *, + bucket_name: str, + objects: list[DeleteRequest.Object], + quiet: bool = False, + bypass_governance_mode: bool = False, + region: Optional[str] = None, + extra_headers: Optional[HTTPHeaderDict] = None, + extra_query_params: Optional[HTTPQueryDict] = None, + ) -> DeleteObjectsResponse: + """Execute DeleteObjects S3 API.""" + body = marshal(DeleteRequest(objects=objects, quiet=quiet)) + headers = HTTPHeaderDict( + {"Content-MD5": base64_string(MD5.hash(body))}, + ) + if bypass_governance_mode: + headers["x-amz-bypass-governance-retention"] = "true" + response = self._execute( + method="POST", + bucket_name=bucket_name, + body=body, + headers=headers, + query_params=HTTPQueryDict({"delete": ""}), + region=region, + extra_headers=extra_headers, + extra_query_params=extra_query_params, + ) + return DeleteObjectsResponse( + response=response, + bucket_name=bucket_name, + region=region or self._region_map.get(bucket_name) or "", + ) + + _get_bucket_location = _get_region + + def _head_object( + self, + *, + bucket_name: str, + object_name: str, + version_id: Optional[str] = None, + ssec: Optional[SseCustomerKey] = None, + offset: int = 0, + length: Optional[int] = None, + match_etag: Optional[str] = None, + not_match_etag: Optional[str] = None, + modified_since: Optional[datetime] = None, + unmodified_since: Optional[datetime] = None, + fetch_checksum: bool = False, + region: Optional[str] = None, + extra_headers: Optional[HTTPHeaderDict] = None, + extra_query_params: Optional[HTTPQueryDict] = None, + ) -> HeadObjectResponse: + """Execute HeadObject S3 API.""" + check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) + check_object_name(object_name) + headers = self._gen_read_headers( + ssec=ssec, + offset=offset, + length=length, + match_etag=match_etag, + not_match_etag=not_match_etag, + modified_since=modified_since, + unmodified_since=unmodified_since, + fetch_checksum=fetch_checksum, + ) + query_params = HTTPQueryDict() + if version_id: + query_params["versionId"] = version_id + response = self._execute( + method="HEAD", + bucket_name=bucket_name, + object_name=object_name, + headers=headers, + query_params=query_params, + region=region, + extra_headers=extra_headers, + extra_query_params=extra_query_params, + ) + return HeadObjectResponse( + headers=response.headers, + bucket_name=bucket_name, + region=region or self._region_map.get(bucket_name) or "", + object_name=object_name, + ) + + def _list_buckets( + self, + *, + bucket_region: Optional[str] = None, + max_buckets: int = 10000, + prefix: Optional[str] = None, + continuation_token: Optional[str] = None, + extra_headers: Optional[HTTPHeaderDict] = None, + extra_query_params: Optional[HTTPQueryDict] = None, + ) -> ListBucketsResponse: + """Execute ListBuckets S3 API.""" + query_params = HTTPQueryDict() + query_params["max-buckets"] = str( + max_buckets if max_buckets > 0 else 10000, + ) + if bucket_region is not None: + query_params["bucket-region"] = bucket_region + if prefix: + query_params["prefix"] = prefix + if continuation_token: + query_params["continuation-token"] = continuation_token + + response = self._execute( + method="GET", + query_params=query_params, + extra_headers=extra_headers, + extra_query_params=extra_query_params, + ) + return ListBucketsResponse( + response=response, + region=bucket_region, + ) + + def _list_multipart_uploads( + self, + *, + bucket_name: str, + delimiter: Optional[str] = None, + encoding_type: Optional[str] = None, + key_marker: Optional[str] = None, + max_uploads: Optional[int] = None, + prefix: Optional[str] = None, + upload_id_marker: Optional[str] = None, + region: Optional[str] = None, + extra_headers: Optional[HTTPHeaderDict] = None, + extra_query_params: Optional[HTTPQueryDict] = None, + ) -> ListMultipartUploadsResponse: + """Execute ListMultipartUploads S3 API.""" + query_params = HTTPQueryDict( + { + "uploads": "", + "delimiter": delimiter or "", + "max-uploads": str(max_uploads or 1000), + "prefix": prefix or "", + "encoding-type": "url", + }, + ) + if encoding_type: + query_params["encoding-type"] = encoding_type + if key_marker: + query_params["key-marker"] = key_marker + if upload_id_marker: + query_params["upload-id-marker"] = upload_id_marker + + response = self._execute( + method="GET", + bucket_name=bucket_name, + query_params=query_params, + region=region, + extra_headers=extra_headers, + extra_query_params=extra_query_params, + ) + return ListMultipartUploadsResponse( + response=response, + bucket_name=bucket_name, + region=region or self._region_map.get(bucket_name) or "", + ) + + def _list_parts( + self, + *, + bucket_name: str, + object_name: str, + upload_id: str, + max_parts: Optional[int] = None, + part_number_marker: Optional[str] = None, + region: Optional[str] = None, + extra_headers: Optional[HTTPHeaderDict] = None, + extra_query_params: Optional[HTTPQueryDict] = None, + ) -> ListPartsResponse: + """Execute ListParts S3 API.""" + query_params = HTTPQueryDict( + { + "uploadId": upload_id, + "max-parts": str(max_parts or 1000), + }, + ) + if part_number_marker: + query_params["part-number-marker"] = part_number_marker + + response = self._execute( + method="GET", + bucket_name=bucket_name, + object_name=object_name, + query_params=query_params, + region=region, + extra_headers=extra_headers, + extra_query_params=extra_query_params, + ) + return ListPartsResponse( + response=response, + bucket_name=bucket_name, + region=region or self._region_map.get(bucket_name) or "", + object_name=object_name, + ) + + def _put_object( + self, + *, + bucket_name: str, + object_name: str, + data: bytes, + headers: Optional[HTTPHeaderDict] = None, + query_params: Optional[HTTPQueryDict] = None, + region: Optional[str] = None, + extra_headers: Optional[HTTPHeaderDict] = None, + extra_query_params: Optional[HTTPQueryDict] = None, + ) -> ObjectWriteResponse: + """Execute PutObject S3 API.""" + response = self._execute( + method="PUT", + bucket_name=bucket_name, + object_name=object_name, + body=data, + headers=headers, + query_params=query_params, + no_body_trace=True, + region=region, + extra_headers=extra_headers, + extra_query_params=extra_query_params, + ) + return ObjectWriteResponse( + headers=response.headers, + bucket_name=bucket_name, + region=region or self._region_map.get(bucket_name) or "", + object_name=object_name, + ) + + def _upload_part( + self, + *, + bucket_name: str, + object_name: str, + data: bytes, + upload_id: str, + part_number: int, + checksum_headers: Optional[HTTPHeaderDict] = None, + headers: Optional[HTTPHeaderDict] = None, + region: Optional[str] = None, + extra_headers: Optional[HTTPHeaderDict] = None, + extra_query_params: Optional[HTTPQueryDict] = None, + ) -> UploadPartResponse: + """Execute UploadPart S3 API.""" + query_params = HTTPQueryDict({ + "partNumber": str(part_number), + "uploadId": upload_id, + }) + headers = headers.copy() if headers else HTTPHeaderDict() + if checksum_headers: + headers.extend(checksum_headers) + response = self._put_object( + bucket_name=bucket_name, + object_name=object_name, + data=data, + headers=headers, + query_params=query_params, + region=region, + extra_headers=extra_headers, + extra_query_params=extra_query_params, + ) + return UploadPartResponse( + response=response, + upload_id=upload_id, + part_number=part_number, + ) + + def _upload_part_copy( + self, + *, + bucket_name: str, + object_name: str, + upload_id: str, + part_number: int, + headers: HTTPHeaderDict, + region: Optional[str] = None, + extra_headers: Optional[HTTPHeaderDict] = None, + extra_query_params: Optional[HTTPQueryDict] = None, + ) -> UploadPartCopyResponse: + """Execute UploadPartCopy S3 API.""" + query_params = HTTPQueryDict( + { + "partNumber": str(part_number), + "uploadId": upload_id, + }, + ) + response = self._execute( + method="PUT", + bucket_name=bucket_name, + object_name=object_name, + headers=headers, + query_params=query_params, + region=region, + extra_headers=extra_headers, + extra_query_params=extra_query_params, + ) + return UploadPartCopyResponse( + response=response, + bucket_name=bucket_name, + region=region or self._region_map.get(bucket_name) or "", + object_name=object_name, + upload_id=upload_id, + part_number=part_number, + ) + + def set_app_info(self, app_name: str, app_version: str): + """ + Set your application name and version to user agent header. + + Args: + app_name (str): + Application name. + + app_version (str): + Application version. + + Example: + >>> client.set_app_info("my_app", "1.0.2") + """ + self._user_agent = get_user_agent(app_name, app_version) + + def trace_on(self, stream: TextIO): + """ + Enable http trace. + + Args: + stream (TextIO): + Stream for writing HTTP call tracing. + + Example: + >>> client.trace_on(sys.stdout) + """ + if not stream: + raise ValueError('Input stream for trace output is invalid.') + # Save new output stream. + self._trace_stream = stream + + def trace_off(self): + """Disable HTTP trace.""" + self._trace_stream = None + + def enable_accelerate_endpoint(self): + """Enables accelerate endpoint for Amazon S3 endpoint.""" + self._base_url.accelerate_host_flag = True + + def disable_accelerate_endpoint(self): + """Disables accelerate endpoint for Amazon S3 endpoint.""" + self._base_url.accelerate_host_flag = False + + def enable_dualstack_endpoint(self): + """Enables dualstack endpoint for Amazon S3 endpoint.""" + self._base_url.dualstack_host_flag = True + + def disable_dualstack_endpoint(self): + """Disables dualstack endpoint for Amazon S3 endpoint.""" + self._base_url.dualstack_host_flag = False + + def enable_virtual_style_endpoint(self): + """Enables virtual style endpoint.""" + self._base_url.virtual_style_flag = True + + def disable_virtual_style_endpoint(self): + """Disables virtual style endpoint.""" + self._base_url.virtual_style_flag = False + + def select_object_content( + self, + *, + bucket_name: str, + object_name: str, + request: SelectObjectContentRequest, + region: Optional[str] = None, + extra_headers: Optional[HTTPHeaderDict] = None, + extra_query_params: Optional[HTTPQueryDict] = None, + ) -> SelectObjectResponse: + """ + Select content of an object by SQL expression. + + Args: + bucket_name (str): + Name of the bucket. + + object_name (str): + Object name in the bucket. + + request (SelectObjectContentRequest): + Select request. + + region (Optional[str], default=None): + Region of the bucket to skip auto probing. + + extra_headers (Optional[HTTPHeaderDict], default=None): + Extra headers for advanced usage. + + extra_query_params (Optional[HTTPQueryDict], default=None): + Extra query parameters for advanced usage. + + Returns: + SelectObjectResponse: + A reader object representing the results of the select + operation. + + Example: + >>> with client.select_object_content( + ... bucket_name="my-bucket", + ... object_name="my-object.csv", + ... request=SelectObjectContentRequest( + ... expression="select * from S3Object", + ... input_serialization=CSVInputSerialization(), + ... output_serialization=CSVOutputSerialization(), + ... request_progress=True, + ... ), + ... ) as response: + ... for data in response.stream(): + ... print(data.decode()) + ... print(response.stats()) + """ + check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) + check_object_name(object_name) + body = marshal(request) + headers = HTTPHeaderDict( + {"Content-MD5": base64_string(MD5.hash(body))}, ) response = self._execute( method="POST", @@ -788,17 +1414,26 @@ def select_object_content( extra_headers=extra_headers, extra_query_params=extra_query_params, ) - return SelectObjectReader(response) + return SelectObjectResponse( + response=response, + bucket_name=bucket_name, + region=region or self._region_map.get(bucket_name) or "", + object_name=object_name, + ) def make_bucket( self, *, bucket_name: str, - location: Optional[str] = None, object_lock: bool = False, + location_config: Optional[ + CreateBucketConfiguration.Location] = None, + bucket_config: Optional[CreateBucketConfiguration.Bucket] = None, + tags: Optional[Tags] = None, + region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None, - ): + ) -> GenericResponse: """ Create a bucket with region and optional object lock. @@ -806,12 +1441,23 @@ def make_bucket( bucket_name (str): Name of the bucket. - location (Optional[str], default=None): - Region in which the bucket is to be created. - object_lock (bool, default=False): Flag to enable the object-lock feature. + location_config + (Optional[CreateBucketConfiguration.Location], default=None): + Location configuration. + + bucket_config + (Optional[CreateBucketConfiguration.Bucket], default=None): + Bucket configuration. + + tags (Optional[Tags], default=None): + Bucket tags. + + region (Optional[str], default=None): + Region in which the bucket is to be created. + extra_headers (Optional[HTTPHeaderDict], default=None): Extra headers for advanced usage. @@ -825,75 +1471,26 @@ def make_bucket( >>> # Create bucket in a specific region >>> client.make_bucket( ... bucket_name="my-bucket", - ... location="eu-west-1", + ... region="eu-west-1", ... ) >>> >>> # Create bucket with object-lock in a region >>> client.make_bucket( ... bucket_name="my-bucket", - ... location="eu-west-2", + ... region="eu-west-2", ... object_lock=True, ... ) """ - check_bucket_name(bucket_name, True, - s3_check=self._base_url.is_aws_host) - if self._base_url.region: - # Error out if region does not match with region passed via - # constructor. - if location and self._base_url.region != location: - raise ValueError( - f"region must be {self._base_url.region}, " - f"but passed {location}" - ) - location = self._base_url.region or location or "us-east-1" - headers = HTTPHeaderDict() - if object_lock: - headers["x-amz-bucket-object-lock-enabled"] = "true" - body = None - if location != "us-east-1": - element = Element("CreateBucketConfiguration") - SubElement(element, "LocationConstraint", location) - body = getbytes(element) - self._url_open( - method="PUT", - region=location, + return self._create_bucket( bucket_name=bucket_name, - body=body, - headers=headers, - extra_headers=extra_headers, - extra_query_params=extra_query_params, - ) - self._region_map.set(bucket_name, location) - - def _list_buckets( - self, - *, - bucket_region: Optional[str] = None, - max_buckets: int = 10000, - prefix: Optional[str] = None, - continuation_token: Optional[str] = None, - extra_headers: Optional[HTTPHeaderDict] = None, - extra_query_params: Optional[HTTPQueryDict] = None, - ) -> ListAllMyBucketsResult: - """Do ListBuckets S3 API.""" - query_params = HTTPQueryDict() - query_params["max-buckets"] = str( - max_buckets if max_buckets > 0 else 10000, - ) - if bucket_region is not None: - query_params["bucket-region"] = bucket_region - if prefix: - query_params["prefix"] = prefix - if continuation_token: - query_params["continuation-token"] = continuation_token - - response = self._execute( - method="GET", - query_params=query_params, + object_lock=object_lock, + location_config=location_config, + bucket_config=bucket_config, + tags=tags, + region=region, extra_headers=extra_headers, extra_query_params=extra_query_params, ) - return unmarshal(ListAllMyBucketsResult, response.data.decode()) def list_buckets( self, @@ -903,7 +1500,7 @@ def list_buckets( prefix: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None, - ) -> Iterator[Bucket]: + ) -> Iterator[ListAllMyBucketsResult.Bucket]: """ List information of all accessible buckets. @@ -924,8 +1521,9 @@ def list_buckets( Extra query parameters for advanced usage. Returns: - Iterator[Bucket]: - An iterator of :class:`minio.datatypes.Bucket` objects. + Iterator[ListAllMyBucketsResult.Bucket]: + An iterator of + :class:`minio.models.ListAllMyBucketsResult.Bucket` objects. Example: >>> buckets = client.list_buckets() @@ -934,7 +1532,7 @@ def list_buckets( """ continuation_token: Optional[str] = "" while continuation_token is not None: - result = self._list_buckets( + response = self._list_buckets( bucket_region=bucket_region, max_buckets=max_buckets, prefix=prefix, @@ -942,19 +1540,209 @@ def list_buckets( extra_headers=extra_headers, extra_query_params=extra_query_params, ) - continuation_token = result.continuation_token - yield from result.buckets + if not response.result: + raise ValueError( + "ListBucketsResponse has empty result; " + "this should not happen", + ) + continuation_token = response.result.continuation_token + yield from response.result.buckets + + def head_bucket( + self, + *, + bucket_name: str, + region: Optional[str] = None, + extra_headers: Optional[HTTPHeaderDict] = None, + extra_query_params: Optional[HTTPQueryDict] = None, + ) -> Optional[HeadBucketResponse]: + """Execute HeadBucket API.""" + check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) + try: + response = self._execute( + method="HEAD", + bucket_name=bucket_name, + region=region, + extra_headers=extra_headers, + extra_query_params=extra_query_params, + ) + bucket_region = response.headers.get("x-amz-bucket-region", "") + if ( + bucket_region and + self._region_map.get(bucket_name) != bucket_region + ): + self._region_map.set(bucket_name, bucket_region) + return HeadBucketResponse( + headers=response.headers, + bucket_name=bucket_name, + region=( + region or self._region_map.get( + bucket_name) or bucket_region + ), + ) + except S3Error as exc: + if exc.code != "NoSuchBucket": + raise + return None + + def bucket_exists( + self, + *, + bucket_name: str, + region: Optional[str] = None, + extra_headers: Optional[HTTPHeaderDict] = None, + extra_query_params: Optional[HTTPQueryDict] = None, + ) -> bool: + """ + Check if a bucket exists. + + Args: + bucket_name (str): + Name of the bucket. + + region (Optional[str], default=None): + Region of the bucket to skip auto probing. + + extra_headers (Optional[HTTPHeaderDict], default=None): + Extra headers for advanced usage. + + extra_query_params (Optional[HTTPQueryDict], default=None): + Extra query parameters for advanced usage. + + Returns: + bool: + True if the bucket exists, False otherwise. + + Example: + >>> if client.bucket_exists(bucket_name="my-bucket"): + ... print("my-bucket exists") + ... else: + ... print("my-bucket does not exist") + """ + check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) + try: + self._execute( + method="HEAD", + bucket_name=bucket_name, + region=region, + extra_headers=extra_headers, + extra_query_params=extra_query_params, + ) + return True + except S3Error as exc: + if exc.code != "NoSuchBucket": + raise + return False + + def remove_bucket( + self, + *, + bucket_name: str, + region: Optional[str] = None, + extra_headers: Optional[HTTPHeaderDict] = None, + extra_query_params: Optional[HTTPQueryDict] = None, + ): + """ + Remove an empty bucket. + + Args: + bucket_name (str): + Name of the bucket. + + region (Optional[str], default=None): + Region of the bucket to skip auto probing. + + extra_headers (Optional[HTTPHeaderDict], default=None): + Extra headers for advanced usage. + + extra_query_params (Optional[HTTPQueryDict], default=None): + Extra query parameters for advanced usage. + + Example: + >>> client.remove_bucket(bucket_name="my-bucket") + """ + check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) + self._execute( + method="DELETE", + bucket_name=bucket_name, + region=region, + extra_headers=extra_headers, + extra_query_params=extra_query_params, + ) + self._region_map.remove(bucket_name) + + def get_bucket_policy( + self, + *, + bucket_name: str, + region: Optional[str] = None, + extra_headers: Optional[HTTPHeaderDict] = None, + extra_query_params: Optional[HTTPQueryDict] = None, + ) -> str: + """ + Get the bucket policy configuration of a bucket. + + Args: + bucket_name (str): + Name of the bucket. + + region (Optional[str], default=None): + Region of the bucket to skip auto probing. + + extra_headers (Optional[HTTPHeaderDict], default=None): + Extra headers for advanced usage. + + extra_query_params (Optional[HTTPQueryDict], default=None): + Extra query parameters for advanced usage. + + Returns: + str: + Bucket policy configuration as a JSON string. + + Example: + >>> policy = client.get_bucket_policy(bucket_name="my-bucket") + """ + check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) + response = self._execute( + method="GET", + bucket_name=bucket_name, + query_params=HTTPQueryDict({"policy": ""}), + region=region, + extra_headers=extra_headers, + extra_query_params=extra_query_params, + ) + return response.data.decode() + + def _execute_delete_bucket( + self, + *, + bucket_name: str, + query_params: HTTPQueryDict, + region: Optional[str] = None, + extra_headers: Optional[HTTPHeaderDict] = None, + extra_query_params: Optional[HTTPQueryDict] = None, + ): + """ Delete any bucket API. """ + check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) + self._execute( + method="DELETE", + bucket_name=bucket_name, + query_params=query_params, + region=region, + extra_headers=extra_headers, + extra_query_params=extra_query_params, + ) - def bucket_exists( + def get_bucket_cors( self, *, bucket_name: str, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None, - ) -> bool: + ) -> CORSConfig: """ - Check if a bucket exists. + Get CORS configuration of a bucket. Args: bucket_name (str): @@ -970,45 +1758,42 @@ def bucket_exists( Extra query parameters for advanced usage. Returns: - bool: - True if the bucket exists, False otherwise. + CORSConfig: + CORS configuration of the bucket. Example: - >>> if client.bucket_exists(bucket_name="my-bucket"): - ... print("my-bucket exists") - ... else: - ... print("my-bucket does not exist") - """ + >>> config = client.get_bucket_cors(bucket_name="my-bucket") + """ check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) - try: - self._execute( - method="HEAD", - bucket_name=bucket_name, - region=region, - extra_headers=extra_headers, - extra_query_params=extra_query_params, - ) - return True - except S3Error as exc: - if exc.code != "NoSuchBucket": - raise - return False + response = self._execute( + method="GET", + bucket_name=bucket_name, + query_params=HTTPQueryDict({"cors": ""}), + region=region, + extra_headers=extra_headers, + extra_query_params=extra_query_params, + ) + return unmarshal(CORSConfig, response.data.decode()) - def remove_bucket( + def set_bucket_cors( self, *, bucket_name: str, + config: CORSConfig, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None, ): """ - Remove an empty bucket. + Set CORS configuration of a bucket. Args: bucket_name (str): Name of the bucket. + config (CORSConfig): + CORS configuration. + region (Optional[str], default=None): Region of the bucket to skip auto probing. @@ -1019,28 +1804,52 @@ def remove_bucket( Extra query parameters for advanced usage. Example: - >>> client.remove_bucket(bucket_name="my-bucket") + >>> config = CORSConfig( + ... rules=[ + ... CORSConfig.CORSRule( + ... allowed_headers=["*"], + ... allowed_methods=["PUT", "POST", "DELETE"], + ... allowed_origins=["http://www.example.com"], + ... expose_headers=["x-amz-server-side-encryption"], + ... max_age_seconds=3000, + ... ), + ... CORSConfig.CORSRule( + ... allowed_methods=["GET"], + ... allowed_origins=["*"], + ... ), + ... ], + ... ) + >>> client.set_bucket_cors( + ... bucket_name="my-bucket", + ... config=config, + ... ) """ check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) + body = marshal(config) + headers = HTTPHeaderDict( + {"Content-MD5": base64_string(MD5.hash(body))}, + ) self._execute( - method="DELETE", + method="PUT", bucket_name=bucket_name, + body=body, + headers=headers, + query_params=HTTPQueryDict({"cors": ""}), region=region, extra_headers=extra_headers, extra_query_params=extra_query_params, ) - self._region_map.remove(bucket_name) - def get_bucket_policy( + def delete_bucket_cors( self, *, bucket_name: str, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None, - ) -> str: + ): """ - Get the bucket policy configuration of a bucket. + Delete CORS configuration of a bucket. Args: bucket_name (str): @@ -1055,39 +1864,12 @@ def get_bucket_policy( extra_query_params (Optional[HTTPQueryDict], default=None): Extra query parameters for advanced usage. - Returns: - str: - Bucket policy configuration as a JSON string. - Example: - >>> policy = client.get_bucket_policy(bucket_name="my-bucket") + >>> client.delete_bucket_cors(bucket_name="my-bucket") """ - check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) - response = self._execute( - method="GET", - bucket_name=bucket_name, - query_params=HTTPQueryDict({"policy": ""}), - region=region, - extra_headers=extra_headers, - extra_query_params=extra_query_params, - ) - return response.data.decode() - - def _execute_delete_bucket( - self, - *, - bucket_name: str, - query_params: HTTPQueryDict, - region: Optional[str] = None, - extra_headers: Optional[HTTPHeaderDict] = None, - extra_query_params: Optional[HTTPQueryDict] = None, - ): - """ Delete any bucket API. """ - check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) - self._execute( - method="DELETE", + self.set_bucket_cors( bucket_name=bucket_name, - query_params=query_params, + config=CORSConfig(), region=region, extra_headers=extra_headers, extra_query_params=extra_query_params, @@ -1213,7 +1995,7 @@ def set_bucket_policy( ... ) """ check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) - is_valid_policy_type(policy) + check_policy(policy) body = policy if isinstance(policy, bytes) else policy.encode() headers = HTTPHeaderDict( {"Content-MD5": base64_string(MD5.hash(body))}, @@ -1316,8 +2098,6 @@ def set_bucket_notification( ... ) """ check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) - if not isinstance(config, NotificationConfig): - raise ValueError("config must be NotificationConfig type") body = marshal(config) headers = HTTPHeaderDict( {"Content-MD5": base64_string(MD5.hash(body))}, @@ -1406,8 +2186,6 @@ def set_bucket_encryption( ... ) """ check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) - if not isinstance(config, SSEConfig): - raise ValueError("config must be SSEConfig type") body = marshal(config) headers = HTTPHeaderDict( {"Content-MD5": base64_string(MD5.hash(body))}, @@ -1556,7 +2334,7 @@ def listen_bucket_notification( Returns: EventIterable: - An iterator of :class:`minio.datatypes.EventIterable` containing + An iterator of :class:`minio.models.EventIterable` containing event records. Example: @@ -1626,8 +2404,6 @@ def set_bucket_versioning( ... ) """ check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) - if not isinstance(config, VersioningConfig): - raise ValueError("config must be VersioningConfig type") body = marshal(config) headers = HTTPHeaderDict( {"Content-MD5": base64_string(MD5.hash(body))}, @@ -1706,7 +2482,7 @@ def fput_object( region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None, - ) -> ObjectWriteResult: + ) -> ObjectWriteResponse: """ Upload data from a file to an object in a bucket. @@ -1763,8 +2539,8 @@ def fput_object( Extra query parameters for advanced usage. Returns: - ObjectWriteResult: - The result of the object upload operation. + ObjectWriteResponse: + The response of the object upload operation. Example: >>> # Upload data @@ -1893,7 +2669,7 @@ def fget_object( region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None, - ): + ) -> GetObjectResponse: """ Download an object to a file. @@ -1943,6 +2719,11 @@ def fget_object( extra_query_params (Optional[HTTPQueryDict], default=None): Extra query parameters for advanced usage. + Returns: + GetObjectResponse: + An :class:`minio.models.GetObjectResponse` object containing + the object data. + Example: >>> # Download object >>> client.fget_object( @@ -1976,14 +2757,19 @@ def fget_object( # Create top level directory if needed. makedirs(os.path.dirname(file_path)) - stat = self.stat_object( + head_response = self._head_object( bucket_name=bucket_name, object_name=object_name, ssec=ssec, version_id=version_id, + region=region, + match_etag=match_etag, + not_match_etag=not_match_etag, + modified_since=modified_since, + unmodified_since=unmodified_since, ) - etag = queryencode(cast(str, stat.etag)) + etag = queryencode(cast(str, head_response.etag)) # Write to a temporary file "file_path.ETAG.part.minio" before saving. tmp_file_path = ( tmp_file_path or f"{file_path}.{etag}.part.minio" @@ -2012,18 +2798,17 @@ def fget_object( progress.set_meta(object_name=object_name, total_length=length) with open(tmp_file_path, "wb") as tmp_file: - for data in response.stream(amt=1024 * 1024): + for data in response.stream(1024*1024): size = tmp_file.write(data) if progress: progress.update(size) if os.path.exists(file_path): os.remove(file_path) # For windows compatibility. os.rename(tmp_file_path, file_path) - return stat + return response finally: if response: response.close() - response.release_conn() def get_object( self, @@ -2042,14 +2827,13 @@ def get_object( region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None, - ) -> BaseHTTPResponse: + ) -> GetObjectResponse: """ Get object data from a bucket. Data is read starting at the specified offset up to the given length. The returned response must be closed after use to release network - resources. To reuse the connection, explicitly call - ``response.release_conn()``. + resources. Args: bucket_name (str): @@ -2095,9 +2879,8 @@ def get_object( Extra query parameters for advanced usage. Returns: - BaseHTTPResponse: - An :class:`urllib3.response.BaseHTTPResponse` or - :class:`urllib3.response.HTTPResponse` object containing + GetObjectResponse: + An :class:`minio.models.GetObjectResponse` object containing the object data. Example: @@ -2110,7 +2893,6 @@ def get_object( ... # Read data from response ... finally: ... response.close() - ... response.release_conn() >>> >>> # Get specific version of an object >>> try: @@ -2121,7 +2903,6 @@ def get_object( ... ) ... finally: ... response.close() - ... response.release_conn() >>> >>> # Get object data from offset and length >>> try: @@ -2133,7 +2914,6 @@ def get_object( ... ) ... finally: ... response.close() - ... response.release_conn() >>> >>> # Get SSE-C encrypted object >>> try: @@ -2146,12 +2926,9 @@ def get_object( ... ) ... finally: ... response.close() - ... response.release_conn() """ check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) check_object_name(object_name) - check_ssec(ssec) - headers = self._gen_read_headers( ssec=ssec, offset=offset, @@ -2166,7 +2943,7 @@ def get_object( if version_id: query_params["versionId"] = version_id - return self._execute( + response = self._execute( method="GET", bucket_name=bucket_name, object_name=object_name, @@ -2177,6 +2954,173 @@ def get_object( extra_headers=extra_headers, extra_query_params=extra_query_params, ) + return GetObjectResponse( + response=response, + bucket_name=bucket_name, + region=region or self._region_map.get(bucket_name) or "", + object_name=object_name, + version_id=version_id, + ) + + def get_object_attributes( + self, + *, + bucket_name: str, + object_name: str, + version_id: Optional[str] = None, + ssec: Optional[SseCustomerKey] = None, + object_attributes: Optional[list[str]] = None, + max_parts: Optional[int] = None, + part_number_marker: Optional[int] = None, + region: Optional[str] = None, + extra_headers: Optional[HTTPHeaderDict] = None, + extra_query_params: Optional[HTTPQueryDict] = None, + ) -> GetObjectAttributesResponse: + """Get object attributes. + + Args: + bucket_name (str): + Name of the bucket. + + object_name (str): + Object name in the bucket. + + version_id (Optional[str], default=None): + Version ID of the object. + + ssec (Optional[SseCustomerKey], default=None): + Server-side encryption customer key. + + object_attributes (Optional[list[str]], default=None): + Object attributes. + + max_parts (Optional[int], default=None): + Maximum parts to fetch. + + part_number_marker (Optional[int], default=None): + Part number marker to fetch remaining. + + region (Optional[str], default=None): + Region of the bucket to skip auto probing. + + extra_headers (Optional[HTTPHeaderDict], default=None): + Extra headers for advanced usage. + + extra_query_params (Optional[HTTPQueryDict], default=None): + Extra query parameters for advanced usage. + + Returns: + GetObjectAttributesResponse: + An :class:`minio.models.GetObjectAttributesResponse` object + + Example: + >>> # Get data of an object + >>> response = client.get_object_attributes( + ... bucket_name="my-bucket", + ... object_name="my-object", + ... ) + """ + check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) + check_object_name(object_name) + headers = HTTPHeaderDict() + if max_parts: + headers["x-amz-max-parts"] = str(max_parts) + if part_number_marker: + headers["x-amz-part-number-marker"] = str(part_number_marker) + for attribute in object_attributes or []: + if attribute: + headers["x-amz-object-attributes"] = attribute + if ssec: + headers.extend(ssec.headers()) + + query_params = HTTPQueryDict() + query_params["attributes"] = "" + if version_id: + query_params["versionId"] = version_id + + response = self._execute( + method="GET", + bucket_name=bucket_name, + object_name=object_name, + headers=headers, + query_params=query_params, + region=region, + extra_headers=extra_headers, + extra_query_params=extra_query_params, + ) + return GetObjectAttributesResponse( + response=response, + bucket_name=bucket_name, + region=region or self._region_map.get(bucket_name) or "", + object_name=object_name, + ) + + def get_object_acl( + self, + *, + bucket_name: str, + object_name: str, + version_id: Optional[str] = None, + region: Optional[str] = None, + extra_headers: Optional[HTTPHeaderDict] = None, + extra_query_params: Optional[HTTPQueryDict] = None, + ) -> GetObjectAclResponse: + """Get object ACLs. + + Args: + bucket_name (str): + Name of the bucket. + + object_name (str): + Object name in the bucket. + + version_id (Optional[str], default=None): + Version ID of the object. + + region (Optional[str], default=None): + Region of the bucket to skip auto probing. + + extra_headers (Optional[HTTPHeaderDict], default=None): + Extra headers for advanced usage. + + extra_query_params (Optional[HTTPQueryDict], default=None): + Extra query parameters for advanced usage. + + Returns: + GetObjectAclResponse: + An :class:`minio.models.GetObjectAclResponse` object + + Example: + >>> # Get data of an object + >>> response = client.get_object_acl( + ... bucket_name="my-bucket", + ... object_name="my-object", + ... ) + """ + check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) + check_object_name(object_name) + + query_params = HTTPQueryDict() + query_params["acl"] = "" + if version_id: + query_params["versionId"] = version_id + + response = self._execute( + method="GET", + bucket_name=bucket_name, + object_name=object_name, + query_params=query_params, + region=region, + extra_headers=extra_headers, + extra_query_params=extra_query_params, + ) + return GetObjectAclResponse( + response=response, + bucket_name=bucket_name, + region=region or self._region_map.get(bucket_name) or "", + object_name=object_name, + version_id=version_id, + ) def prompt_object( self, @@ -2191,7 +3135,7 @@ def prompt_object( extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None, **kwargs: Optional[Any], - ) -> BaseHTTPResponse: + ) -> PromptObjectResponse: """ Prompt an object using natural language. @@ -2228,8 +3172,8 @@ def prompt_object( Additional parameters for advanced usage. Returns: - BaseHTTPResponse: - An :class:`urllib3.response.BaseHTTPResponse` object. + PromptObjectResponse: + An :class:`minio.models.PromptObjectResponse` object. Example: >>> response = None @@ -2243,12 +3187,9 @@ def prompt_object( ... finally: ... if response: ... response.close() - ... response.release_conn() """ check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) check_object_name(object_name) - check_ssec(ssec) - query_params = HTTPQueryDict() if version_id: query_params["versionId"] = version_id @@ -2258,7 +3199,7 @@ def prompt_object( prompt_body["prompt"] = prompt body = json.dumps(prompt_body) - return self._execute( + response = self._execute( method="POST", bucket_name=bucket_name, object_name=object_name, @@ -2270,13 +3211,19 @@ def prompt_object( extra_headers=extra_headers, extra_query_params=extra_query_params, ) + return PromptObjectResponse( + response=response, + bucket_name=bucket_name, + region=region or self._region_map.get(bucket_name) or "", + object_name=object_name, + ) def copy_object( self, *, bucket_name: str, object_name: str, - source: CopySource, + source: SourceObject, sse: Optional[Sse] = None, user_metadata: Optional[HTTPHeaderDict] = None, tags: Optional[Tags] = None, @@ -2287,7 +3234,7 @@ def copy_object( region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None, - ) -> ObjectWriteResult: + ) -> ObjectWriteResponse: """ Create an object by server-side copying data from another object. @@ -2300,7 +3247,7 @@ def copy_object( object_name (str): Object name in the bucket. - source (CopySource): + source (SourceObject): Source object information. sse (Optional[Sse], default=None): @@ -2337,18 +3284,18 @@ def copy_object( Extra query parameters for advanced usage. Returns: - ObjectWriteResult: - The result of the copy operation. + ObjectWriteResponse: + The response of the copy operation. Example: >>> from datetime import datetime, timezone - >>> from minio.commonconfig import REPLACE, CopySource + >>> from minio.args import Directive, SourceObject >>> >>> # Copy an object from a bucket to another >>> result = client.copy_object( ... bucket_name="my-bucket", ... object_name="my-object", - ... source=CopySource( + ... source=SourceObject( ... bucket_name="my-sourcebucket", ... object_name="my-sourceobject", ... ), @@ -2359,7 +3306,7 @@ def copy_object( >>> result = client.copy_object( ... bucket_name="my-bucket", ... object_name="my-object", - ... source=CopySource( + ... source=SourceObject( ... bucket_name="my-sourcebucket", ... object_name="my-sourceobject", ... modified_since=datetime( @@ -2374,56 +3321,40 @@ def copy_object( >>> result = client.copy_object( ... bucket_name="my-bucket", ... object_name="my-object", - ... source=CopySource( + ... source=SourceObject( ... bucket_name="my-sourcebucket", ... object_name="my-sourceobject", ... ), ... user_metadata=user_metadata, - ... metadata_directive=REPLACE, + ... metadata_directive=Directive.REPLACE, ... ) - >>> print(result.object_name, result.version_id) - """ - check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) - check_object_name(object_name) - if not isinstance(source, CopySource): - raise ValueError("source must be CopySource type") - check_sse(sse) - if tags is not None and not isinstance(tags, Tags): - raise ValueError("tags must be Tags type") - if retention is not None and not isinstance(retention, Retention): - raise ValueError("retention must be Retention type") - if ( - metadata_directive is not None and - metadata_directive not in [COPY, REPLACE] - ): - raise ValueError(f"metadata directive must be {COPY} or {REPLACE}") - if ( - tagging_directive is not None and - tagging_directive not in [COPY, REPLACE] - ): - raise ValueError(f"tagging directive must be {COPY} or {REPLACE}") - + >>> print(result.object_name, result.version_id) + """ size = -1 if source.offset is None and source.length is None: - stat = self.stat_object( + response = self._head_object( bucket_name=source.bucket_name, object_name=source.object_name, version_id=source.version_id, ssec=source.ssec, + region=source.region, + match_etag=source.match_etag, + not_match_etag=source.not_match_etag, + modified_since=source.modified_since, + unmodified_since=source.unmodified_since, ) - size = cast(int, stat.size) - + size = cast(int, response.size) if ( source.offset is not None or source.length is not None or size > MAX_PART_SIZE ): - if metadata_directive == COPY: + if metadata_directive == Directive.COPY: raise ValueError( "COPY metadata directive is not applicable to source " "object size greater than 5 GiB", ) - if tagging_directive == COPY: + if tagging_directive == Directive.COPY: raise ValueError( "COPY tagging directive is not applicable to source " "object size greater than 5 GiB" @@ -2431,79 +3362,96 @@ def copy_object( return self.compose_object( bucket_name=bucket_name, object_name=object_name, - sources=[ComposeSource.of(source)], + sources=[source], sse=sse, user_metadata=user_metadata, tags=tags, retention=retention, legal_hold=legal_hold, + region=region, + extra_headers=extra_headers, + extra_query_params=extra_query_params, ) - - headers = self._gen_write_headers( - user_metadata=user_metadata, + return self._copy_object( + bucket_name=bucket_name, + object_name=object_name, + source=source, sse=sse, + user_metadata=user_metadata, tags=tags, retention=retention, legal_hold=legal_hold, - ) - if metadata_directive: - headers["x-amz-metadata-directive"] = metadata_directive - if tagging_directive: - headers["x-amz-tagging-directive"] = tagging_directive - headers.extend(source.gen_copy_headers()) - response = self._execute( - method="PUT", - bucket_name=bucket_name, - object_name=object_name, - headers=headers, + metadata_directive=metadata_directive, + tagging_directive=tagging_directive, region=region, extra_headers=extra_headers, extra_query_params=extra_query_params, ) - etag, last_modified = parse_copy_object(response) - return ObjectWriteResult.new( - headers=response.headers, - bucket_name=bucket_name, - object_name=object_name, - etag=etag, - last_modified=last_modified, - ) - def _calc_part_count(self, sources: list[ComposeSource]) -> int: + def _calc_part_count(self, sources: list[SourceObject]) -> int: """Calculate part count.""" object_size = 0 part_count = 0 - i = 0 - for src in sources: - i += 1 - stat = self.stat_object( - bucket_name=src.bucket_name, - object_name=src.object_name, - version_id=src.version_id, - ssec=src.ssec, + source_len = len(sources) + for i in range(source_len): + interim_part = source_len not in (1, i + 1) + if sources[i].object_size is None: + response = self._head_object( + bucket_name=sources[i].bucket_name, + object_name=sources[i].object_name, + version_id=sources[i].version_id, + ssec=sources[i].ssec, + region=sources[i].region, + match_etag=sources[i].match_etag, + not_match_etag=sources[i].not_match_etag, + modified_since=sources[i].modified_since, + unmodified_since=sources[i].unmodified_since, + ) + sources[i] = sources[i].of( + cast(int, response.size), cast(str, response.etag), + ) + + keys = [] + if sources[i].version_id: + keys.append(f"version_id={sources[i].version_id}") + if sources[i].offset is not None: + keys.append(f"offset={sources[i].offset}") + if sources[i].length is not None: + keys.append(f"length={sources[i].length}") + message = " ".join(keys) + source = ( + f"source {sources[i].bucket_name}/{sources[i].object_name}" + + (" " if message else "") + message ) - src.build_headers(cast(int, stat.size), cast(str, stat.etag)) - size = cast(int, stat.size) - if src.length is not None: - size = src.length - elif src.offset is not None: - size -= src.offset - if ( - size < MIN_PART_SIZE and - len(sources) != 1 and - i != len(sources) - ): + if (sources[i].offset or 0) >= cast(int, sources[i].object_size): + raise ValueError( + f"source {source}: offset is beyond object size " + f"{sources[i].object_size}", + ) + size = ( + cast(int, sources[i].object_size) - (sources[i].offset or 0) + ) + if size < (sources[i].length or 0): + raise ValueError( + f"source {source}: insufficient object size " + f"{sources[i].object_size}", + ) + size = ( + size if sources[i].length is None + else cast(int, sources[i].length) + ) + if interim_part and size < MIN_PART_SIZE: raise ValueError( - f"source {src.bucket_name}/{src.object_name}: size {size} " - f"must be greater than {MIN_PART_SIZE}" + f"source {source}: size {size} must be greater than " + f"{MIN_PART_SIZE}", ) object_size += size if object_size > MAX_MULTIPART_OBJECT_SIZE: raise ValueError( - f"destination object size must be less than " - f"{MAX_MULTIPART_OBJECT_SIZE}" + "source objects yield destination object size greater " + f"than {MAX_MULTIPART_OBJECT_SIZE}", ) if size > MAX_PART_SIZE: @@ -2513,15 +3461,10 @@ def _calc_part_count(self, sources: list[ComposeSource]) -> int: count += 1 else: last_part_size = MAX_PART_SIZE - if ( - last_part_size < MIN_PART_SIZE and - len(sources) != 1 and - i != len(sources) - ): + if last_part_size < MIN_PART_SIZE and interim_part: raise ValueError( - f"source {src.bucket_name}/{src.object_name}: " - f"for multipart split upload of {size}, " - f"last part size is less than {MIN_PART_SIZE}" + f"source {source}: multipart split upload for {size} " + f"yields last part size less than {MIN_PART_SIZE}", ) part_count += count else: @@ -2529,48 +3472,17 @@ def _calc_part_count(self, sources: list[ComposeSource]) -> int: if part_count > MAX_MULTIPART_COUNT: raise ValueError( - f"Compose sources create more than allowed multipart " - f"count {MAX_MULTIPART_COUNT}" + f"source objects yield multipart count {part_count} more than " + f"allowed multipart count {MAX_MULTIPART_COUNT}", ) return part_count - def _upload_part_copy( - self, - *, - bucket_name: str, - object_name: str, - upload_id: str, - part_number: int, - headers: HTTPHeaderDict, - region: Optional[str] = None, - extra_headers: Optional[HTTPHeaderDict] = None, - extra_query_params: Optional[HTTPQueryDict] = None, - ) -> tuple[str, Optional[datetime]]: - """Execute UploadPartCopy S3 API.""" - query_params = HTTPQueryDict( - { - "partNumber": str(part_number), - "uploadId": upload_id, - }, - ) - response = self._execute( - method="PUT", - bucket_name=bucket_name, - object_name=object_name, - headers=headers, - query_params=query_params, - region=region, - extra_headers=extra_headers, - extra_query_params=extra_query_params, - ) - return parse_copy_object(response) - def compose_object( self, *, bucket_name: str, object_name: str, - sources: list[ComposeSource], + sources: list[SourceObject], sse: Optional[Sse] = None, user_metadata: Optional[HTTPHeaderDict] = None, tags: Optional[Tags] = None, @@ -2579,7 +3491,7 @@ def compose_object( region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None, - ) -> ObjectWriteResult: + ) -> ObjectWriteResponse: """ Create an object by combining data from multiple source objects using server-side copy. @@ -2591,7 +3503,7 @@ def compose_object( object_name (str): Object name in the bucket. - sources (list[ComposeSource]): + sources (list[SourceObject]): List of source objects to be combined. sse (Optional[Sse], default=None): @@ -2621,23 +3533,23 @@ def compose_object( Extra query parameters for advanced usage. Returns: - ObjectWriteResult: - The result of the compose operation. + ObjectWriteResponse: + The response of the compose operation. Example: - >>> from minio.commonconfig import ComposeSource + >>> from minio.args import SourceObject >>> from minio.sse import SseS3 >>> >>> sources = [ - ... ComposeSource( + ... SourceObject( ... bucket_name="my-job-bucket", ... object_name="my-object-part-one", ... ), - ... ComposeSource( + ... SourceObject( ... bucket_name="my-job-bucket", ... object_name="my-object-part-two", ... ), - ... ComposeSource( + ... SourceObject( ... bucket_name="my-job-bucket", ... object_name="my-object-part-three", ... ), @@ -2671,19 +3583,6 @@ def compose_object( """ check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) check_object_name(object_name) - if not isinstance(sources, (list, tuple)) or not sources: - raise ValueError("sources must be non-empty list or tuple type") - i = 0 - for src in sources: - if not isinstance(src, ComposeSource): - raise ValueError(f"sources[{i}] must be ComposeSource type") - i += 1 - check_sse(sse) - if tags is not None and not isinstance(tags, Tags): - raise ValueError("tags must be Tags type") - if retention is not None and not isinstance(retention, Retention): - raise ValueError("retention must be Retention type") - part_count = self._calc_part_count(sources) if ( part_count == 1 and @@ -2693,14 +3592,14 @@ def compose_object( return self.copy_object( bucket_name=bucket_name, object_name=object_name, - source=CopySource.of(sources[0]), + source=sources[0], sse=sse, user_metadata=user_metadata, tags=tags, retention=retention, legal_hold=legal_hold, - metadata_directive=REPLACE if user_metadata else None, - tagging_directive=REPLACE if tags else None, + metadata_directive=Directive.REPLACE if user_metadata else None, + tagging_directive=Directive.REPLACE if tags else None, region=region, extra_headers=extra_headers, extra_query_params=extra_query_params, @@ -2713,239 +3612,79 @@ def compose_object( retention=retention, legal_hold=legal_hold, ) - upload_id = self._create_multipart_upload( + cmu_response = self._create_multipart_upload( bucket_name=bucket_name, object_name=object_name, headers=headers, ) + upload_id = cmu_response.result.upload_id ssec_headers = ( sse.headers() if isinstance(sse, SseCustomerKey) else HTTPHeaderDict() ) try: part_number = 0 - total_parts = [] - for src in sources: - size = cast(int, src.object_size) - if src.length is not None: - size = src.length - elif src.offset is not None: - size -= src.offset - offset = src.offset or 0 - headers = cast(HTTPHeaderDict, src.headers) - headers.extend(ssec_headers) - if size <= MAX_PART_SIZE: - part_number += 1 - if src.length is not None: - headers["x-amz-copy-source-range"] = ( - f"bytes={offset}-{offset + src.length - 1}" - ) - elif src.offset is not None: - headers["x-amz-copy-source-range"] = ( - f"bytes={offset}-{offset + size - 1}" - ) - etag, _ = self._upload_part_copy( - bucket_name=bucket_name, - object_name=object_name, - upload_id=upload_id, - part_number=part_number, - headers=headers, - ) - total_parts.append(Part(part_number, etag)) - continue - while size > 0: - part_number += 1 - length = size if size < MAX_PART_SIZE else MAX_PART_SIZE - end_bytes = offset + length - 1 - headers_copy = headers.copy() - headers_copy["x-amz-copy-source-range"] = ( - f"bytes={offset}-{end_bytes}" - ) - etag, _ = self._upload_part_copy( - bucket_name=bucket_name, - object_name=object_name, - upload_id=upload_id, - part_number=part_number, - headers=headers_copy, - ) - total_parts.append(Part(part_number, etag)) - offset += length - size -= length - result = self._complete_multipart_upload( - bucket_name=bucket_name, - object_name=object_name, - upload_id=upload_id, - parts=total_parts, - ) - return ObjectWriteResult.new( - headers=result.headers, - bucket_name=cast(str, result.bucket_name), - object_name=cast(str, result.object_name), - version_id=result.version_id, - etag=result.etag, - location=result.location, - ) - except Exception as exc: - if upload_id: - self._abort_multipart_upload( - bucket_name=bucket_name, - object_name=object_name, - upload_id=upload_id, - ) - raise exc - - def _abort_multipart_upload( - self, - *, - bucket_name: str, - object_name: str, - upload_id: str, - region: Optional[str] = None, - extra_headers: Optional[HTTPHeaderDict] = None, - extra_query_params: Optional[HTTPQueryDict] = None, - ): - """Execute AbortMultipartUpload S3 API.""" - self._execute( - method="DELETE", - bucket_name=bucket_name, - object_name=object_name, - query_params=HTTPQueryDict({'uploadId': upload_id}), - region=region, - extra_headers=extra_headers, - extra_query_params=extra_query_params, - ) - - def _complete_multipart_upload( - self, - *, - bucket_name: str, - object_name: str, - upload_id: str, - parts: list[Part], - region: Optional[str] = None, - extra_headers: Optional[HTTPHeaderDict] = None, - extra_query_params: Optional[HTTPQueryDict] = None, - ) -> CompleteMultipartUploadResult: - """Execute CompleteMultipartUpload S3 API.""" - element = Element("CompleteMultipartUpload") - for part in parts: - tag = SubElement(element, "Part") - SubElement(tag, "PartNumber", str(part.part_number)) - SubElement(tag, "ETag", '"' + part.etag + '"') - if part.checksum_crc32: - SubElement(tag, "ChecksumCRC32", part.checksum_crc32) - elif part.checksum_crc32c: - SubElement(tag, "ChecksumCRC32C", part.checksum_crc32c) - elif part.checksum_sha1: - SubElement(tag, "ChecksumSHA1", part.checksum_sha1) - elif part.checksum_sha256: - SubElement(tag, "ChecksumSHA256", part.checksum_sha256) - body = getbytes(element) - headers = HTTPHeaderDict( - { - "Content-Type": 'application/xml', - "Content-MD5": base64_string(MD5.hash(body)), - }, - ) - response = self._execute( - method="POST", - bucket_name=bucket_name, - object_name=object_name, - body=body, - headers=headers, - query_params=HTTPQueryDict({'uploadId': upload_id}), - region=region, - extra_headers=extra_headers, - extra_query_params=extra_query_params, - ) - return CompleteMultipartUploadResult(response) - - def _create_multipart_upload( - self, - *, - bucket_name: str, - object_name: str, - headers: HTTPHeaderDict, - region: Optional[str] = None, - extra_headers: Optional[HTTPHeaderDict] = None, - extra_query_params: Optional[HTTPQueryDict] = None, - ) -> str: - """Execute CreateMultipartUpload S3 API.""" - if not headers.get("Content-Type"): - headers["Content-Type"] = "application/octet-stream" - response = self._execute( - method="POST", - bucket_name=bucket_name, - object_name=object_name, - headers=headers, - query_params=HTTPQueryDict({"uploads": ""}), - region=region, - extra_headers=extra_headers, - extra_query_params=extra_query_params, - ) - element = ET.fromstring(response.data.decode()) - return cast(str, findtext(element, "UploadId", True)) - - def _put_object( - self, - *, - bucket_name: str, - object_name: str, - data: bytes, - headers: Optional[HTTPHeaderDict] = None, - query_params: Optional[HTTPQueryDict] = None, - region: Optional[str] = None, - extra_headers: Optional[HTTPHeaderDict] = None, - extra_query_params: Optional[HTTPQueryDict] = None, - ) -> ObjectWriteResult: - """Execute PutObject S3 API.""" - response = self._execute( - method="PUT", - bucket_name=bucket_name, - object_name=object_name, - body=data, - headers=headers, - query_params=query_params, - no_body_trace=True, - region=region, - extra_headers=extra_headers, - extra_query_params=extra_query_params, - ) - return ObjectWriteResult.new( - headers=response.headers, - bucket_name=bucket_name, - object_name=object_name, - ) - - def _upload_part( - self, - *, - bucket_name: str, - object_name: str, - data: bytes, - headers: Optional[HTTPHeaderDict], - upload_id: str, - part_number: int, - region: Optional[str] = None, - extra_headers: Optional[HTTPHeaderDict] = None, - extra_query_params: Optional[HTTPQueryDict] = None, - ) -> ObjectWriteResult: - """Execute UploadPart S3 API.""" - query_params = HTTPQueryDict({ - "partNumber": str(part_number), - "uploadId": upload_id, - }) - result = self._put_object( - bucket_name=bucket_name, - object_name=object_name, - data=data, - headers=headers, - query_params=query_params, - region=region, - extra_headers=extra_headers, - extra_query_params=extra_query_params, - ) - return result + total_parts = [] + for src in sources: + size = cast(int, src.object_size) + if src.length is not None: + size = src.length + elif src.offset is not None: + size -= src.offset + offset = src.offset or 0 + headers = cast(HTTPHeaderDict, src.headers) + headers.extend(ssec_headers) + if size <= MAX_PART_SIZE: + part_number += 1 + if src.length is not None: + headers["x-amz-copy-source-range"] = ( + f"bytes={offset}-{offset + src.length - 1}" + ) + elif src.offset is not None: + headers["x-amz-copy-source-range"] = ( + f"bytes={offset}-{offset + size - 1}" + ) + response = self._upload_part_copy( + bucket_name=bucket_name, + object_name=object_name, + upload_id=upload_id, + part_number=part_number, + headers=headers, + ) + total_parts.append(response.part) + continue + while size > 0: + part_number += 1 + length = size if size < MAX_PART_SIZE else MAX_PART_SIZE + end_bytes = offset + length - 1 + headers_copy = headers.copy() + headers_copy["x-amz-copy-source-range"] = ( + f"bytes={offset}-{end_bytes}" + ) + response = self._upload_part_copy( + bucket_name=bucket_name, + object_name=object_name, + upload_id=upload_id, + part_number=part_number, + headers=headers_copy, + ) + total_parts.append(response.part) + offset += length + size -= length + return self._complete_multipart_upload( + bucket_name=bucket_name, + object_name=object_name, + upload_id=upload_id, + parts=total_parts, + ) + except Exception as exc: + if upload_id: + self._abort_multipart_upload( + bucket_name=bucket_name, + object_name=object_name, + upload_id=upload_id, + ) + raise exc def _upload_part_task(self, kwargs): """Upload_part task for ThreadPool.""" @@ -2972,7 +3711,7 @@ def put_object( region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None, - ) -> ObjectWriteResult: + ) -> ObjectWriteResponse: """ Upload data from a stream to an object in a bucket. @@ -3034,8 +3773,8 @@ def put_object( Extra query parameters for advanced usage. Returns: - ObjectWriteResult: - The result of the object upload operation. + ObjectWriteResponse: + The response of the object upload operation. Example: >>> # Upload simple data @@ -3136,20 +3875,14 @@ def put_object( """ check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) check_object_name(object_name) - check_sse(sse) - if tags is not None and not isinstance(tags, Tags): - raise ValueError("tags must be Tags type") - if retention is not None and not isinstance(retention, Retention): - raise ValueError("retention must be Retention type") - if not callable(getattr(data, "read")): - raise ValueError("input data must have callable read()") - part_size, part_count = get_part_info(length, part_size) + part_size, part_count = self._get_part_info(length, part_size) if progress: # Set progress bar length and object name before upload progress.set_meta(object_name=object_name, total_length=length) add_content_sha256 = self._base_url.is_https - algorithms = [checksum or Algorithm.CRC32C] + algorithm = checksum or Algorithm.CRC32C + algorithms = [algorithm] add_sha256_checksum = algorithms[0] == Algorithm.SHA256 if add_content_sha256 and not add_sha256_checksum: algorithms.append(Algorithm.SHA256) @@ -3181,7 +3914,7 @@ def put_object( if part_number == part_count: part_size = object_size - uploaded_size stop = True - part_data = read_part_data( + part_data = self._read_part_data( stream=data, size=part_size, progress=progress, @@ -3194,7 +3927,7 @@ def put_object( f"got: {len(part_data)} bytes" ) else: - part_data = read_part_data( + part_data = self._read_part_data( stream=data, size=part_size + 1, part_data=one_byte, @@ -3229,18 +3962,16 @@ def put_object( ) if not upload_id: - headers.extend(make_headers( - hashers, add_content_sha256, add_sha256_checksum, - algorithm_only=True, - )) - upload_id = self._create_multipart_upload( + cmu_response = self._create_multipart_upload( bucket_name=bucket_name, object_name=object_name, headers=headers, + algorithm=algorithm, region=region, extra_headers=extra_headers, extra_query_params=extra_query_params, ) + upload_id = cmu_response.result.upload_id if num_parallel_uploads and num_parallel_uploads > 1: pool = ThreadPool(num_parallel_uploads) pool.start_parallel() @@ -3262,7 +3993,7 @@ def put_object( self._upload_part_task, kwargs, ) else: - result = self._upload_part( + response = self._upload_part( bucket_name=bucket_name, object_name=object_name, data=part_data, @@ -3270,45 +4001,21 @@ def put_object( upload_id=upload_id, part_number=part_number, ) - parts.append(Part( - part_number=part_number, - etag=result.etag, - checksum_crc32=result.checksum_crc32, - checksum_crc32c=result.checksum_crc32c, - checksum_sha1=result.checksum_sha1, - checksum_sha256=result.checksum_sha256, - )) + parts.append(response.part) if pool: - result_queue = pool.result() - parts = [Part(0, "")] * part_count - while not result_queue.empty(): - part_number, upload_result = result_queue.get() - parts[part_number - 1] = Part( - part_number=part_number, - etag=upload_result.etag, - checksum_crc32=upload_result.checksum_crc32, - checksum_crc32c=upload_result.checksum_crc32c, - checksum_sha1=upload_result.checksum_sha1, - checksum_sha256=upload_result.checksum_sha256, - ) + result = pool.result() + parts = [Part()] * part_count + while not result.empty(): + part_number, response = result.get() + parts[part_number - 1] = response.part - upload_result = self._complete_multipart_upload( + return self._complete_multipart_upload( bucket_name=bucket_name, object_name=object_name, upload_id=cast(str, upload_id), parts=parts, - extra_headers=HTTPHeaderDict( - sse.headers() if isinstance(sse, SseCustomerKey) else None - ), - ) - return ObjectWriteResult.new( - headers=upload_result.headers, - bucket_name=cast(str, upload_result.bucket_name), - object_name=cast(str, upload_result.object_name), - version_id=upload_result.version_id, - etag=upload_result.etag, - location=upload_result.location, + ssec=sse if isinstance(sse, SseCustomerKey) else None, ) except Exception as exc: if upload_id: @@ -3319,6 +4026,131 @@ def put_object( ) raise exc + def put_object_fan_out( + self, + *, + bucket_name: str, + data: BinaryIO, + length: int, + entries: list[PutObjectFanOutEntry], + sse: Optional[Sse] = None, + checksum: Optional[Checksum] = None, + region: Optional[str] = None, + extra_headers: Optional[HTTPHeaderDict] = None, + extra_query_params: Optional[HTTPQueryDict] = None, + ) -> PutObjectFanOutResponse: + """ + Uploads multiple objects with same content from single stream with + optional metadata and tags. + + Args: + bucket_name (str): + Name of the bucket. + + data (BinaryIO): + An object with a callable ``read()`` method that returns a + bytes object. + + length (int): + Size of the data in bytes. + + entries (list[PutObjectFanOutEntry]): + Objects to be created. + + sse (Optional[Sse], default=None): + Server-side encryption configuration. + + checksum (Optional[Checksum], default=None): + Checksum information. + + region (Optional[str], default=None): + Region of the bucket to skip auto probing. + + extra_headers (Optional[HTTPHeaderDict], default=None): + Extra headers for advanced usage. + + extra_query_params (Optional[HTTPQueryDict], default=None): + Extra query parameters for advanced usage. + + Returns: + PutObjectFanOutResponse: + The response of the object upload operation. + + Example: + >>> # Fan out objects + >>> response = client.put_object_fan_out( + ... bucket_name="my-bucket", + ... data=io.BytesIO(b"hello"), + ... length=5, + ... entries=[ + ... PutObjectFanOutEntry(key="fan-out.0"), + ... PutObjectFanOutEntry( + ... key="fan-out.1", + ... tags={"Project": "Project One", "User": "jsmith"}, + ... ), + ... ], + ... ) + >>> for result in response.results: + ... print( + ... f"created {result.key} object; etag: {result.etag}, " + ... f"version-id: {result.version_id}, ", + ... f"error: {result.error}", + ... ) + """ + check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) + if length < 0: + raise ValueError(f"invalid stream length {length}") + if length > MAX_PART_SIZE: + raise ValueError( + f"length {length} is not supported; maximum allowed 5GiB", + ) + + fan_out_list = "".join( + [entry.to_json() for entry in entries], + ) + + part_data = self._read_part_data(stream=data, size=length) + timestamp = int(time.utcnow().timestamp() * 1000) + object_name = f"fan-out-{random()}-{timestamp}" + + policy = PostPolicy(bucket_name, time.utcnow() + timedelta(minutes=15)) + policy.add_equals_condition("key", object_name) + if sse: + for key, value in sse.headers().items(): + policy.add_equals_condition(key, value) + if checksum: + for key, value in checksum.headers().items(): + policy.add_equals_condition(key, value) + form_data = self.presigned_post_policy(policy) + + fields: list[tuple[str, Any]] = [] + for key, value in form_data.items(): + fields.append((key, value)) + fields.append(("key", object_name)) + fields.append(("x-minio-fanout-list", fan_out_list)) + fields.append( + ( + "file", + ("fanout-content", part_data, "application/octet-stream"), + ), + ) + body, content_type = encode_multipart_formdata(fields) + response = self._url_open( + method="POST", + region=self._get_region(bucket_name=bucket_name, region=region), + bucket_name=bucket_name, + body=body, + headers=HTTPHeaderDict({"Content-Type": content_type}), + extra_headers=extra_headers, + extra_query_params=extra_query_params, + skip_signing=True, + ) + return PutObjectFanOutResponse( + response=response, + bucket_name=bucket_name, + region=region or self._region_map.get(bucket_name) or "", + ) + def _append_object( self, *, @@ -3331,7 +4163,7 @@ def _append_object( region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None, - ) -> ObjectWriteResult: + ) -> ObjectWriteResponse: """Do append object.""" chunk_count = -1 if length is not None: @@ -3343,11 +4175,11 @@ def _append_object( one_byte = b"" stop = False - stat = self.stat_object( + head_response = self._head_object( bucket_name=bucket_name, object_name=object_name, ) - write_offset = cast(int, stat.size) + write_offset = cast(int, head_response.size) while not stop: chunk_number += 1 @@ -3355,7 +4187,7 @@ def _append_object( if chunk_number == chunk_count and object_size is not None: chunk_size = object_size - uploaded_size stop = True - chunk_data = read_part_data( + chunk_data = self._read_part_data( stream=stream, size=chunk_size, progress=progress, ) if len(chunk_data) != chunk_size: @@ -3365,7 +4197,7 @@ def _append_object( f"got: {len(chunk_data)} bytes" ) else: - chunk_data = read_part_data( + chunk_data = self._read_part_data( stream=stream, size=chunk_size + 1, part_data=one_byte, @@ -3385,7 +4217,7 @@ def _append_object( headers = HTTPHeaderDict( {"x-amz-write-offset-bytes": str(write_offset)}, ) - upload_result = self._put_object( + response = self._put_object( bucket_name=bucket_name, object_name=object_name, data=chunk_data, @@ -3395,7 +4227,7 @@ def _append_object( extra_query_params=extra_query_params, ) write_offset += len(chunk_data) - return upload_result + return response def append_object( self, @@ -3411,7 +4243,7 @@ def append_object( region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None, - ) -> ObjectWriteResult: + ) -> ObjectWriteResponse: """ Append data to an existing object in a bucket. @@ -3454,8 +4286,8 @@ def append_object( Extra query parameters for advanced usage. Returns: - ObjectWriteResult: - The result of the append operation. + ObjectWriteResponse: + The response of the append operation. Example: >>> # Append simple data @@ -3593,7 +4425,7 @@ def list_objects( Returns: Iterator[Object]: - An iterator of :class:`minio.datatypes.Object`. + An iterator of :class:`minio.models.Object`. Example: >>> # List all objects in a bucket @@ -3665,7 +4497,7 @@ def stat_object( region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None, - ) -> Object: + ) -> StatObjectResponse: """ Get object information and metadata of an object. @@ -3713,9 +4545,8 @@ def stat_object( Extra query parameters for advanced usage. Returns: - Object: - A :class:`minio.datatypes.Object` object containing metadata - and information about the object. + StatObjectResponse: + A :class:`minio.response.StatObjectResponse` object Example: >>> # Get object information @@ -3746,11 +4577,10 @@ def stat_object( >>> print(f"last-modified: {result.last_modified}, " ... f"size: {result.size}") """ - check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) - check_object_name(object_name) - check_ssec(ssec) - - headers = self._gen_read_headers( + return self._head_object( + bucket_name=bucket_name, + object_name=object_name, + version_id=version_id, ssec=ssec, offset=offset, length=length, @@ -3759,38 +4589,11 @@ def stat_object( modified_since=modified_since, unmodified_since=unmodified_since, fetch_checksum=fetch_checksum, - ) - query_params = HTTPQueryDict() - if version_id: - query_params["versionId"] = version_id - response = self._execute( - method="HEAD", - bucket_name=bucket_name, - object_name=object_name, - headers=headers, - query_params=query_params, region=region, extra_headers=extra_headers, extra_query_params=extra_query_params, ) - value = response.headers.get("last-modified") - if value is not None: - last_modified = time.from_http_header(value) - else: - last_modified = None - - return Object( - bucket_name, - object_name, - last_modified=last_modified, - etag=response.headers.get("etag", "").replace('"', ""), - size=int(response.headers.get("content-length", "0")), - content_type=response.headers.get("content-type"), - metadata=response.headers, - version_id=response.headers.get("x-amz-version-id"), - ) - def remove_object( self, *, @@ -3852,61 +4655,16 @@ def remove_object( extra_query_params=extra_query_params, ) - def _delete_objects( - self, - *, - bucket_name: str, - delete_object_list: list[DeleteObject], - quiet: bool = False, - bypass_governance_mode: bool = False, - region: Optional[str] = None, - extra_headers: Optional[HTTPHeaderDict] = None, - extra_query_params: Optional[HTTPQueryDict] = None, - ) -> DeleteResult: - """ - Delete multiple objects. - - :param bucket_name: Name of the bucket. - :param delete_object_list: List of maximum 1000 - :class:`DeleteObject ` object. - :param quiet: quiet flag. - :param bypass_governance_mode: Bypass Governance retention mode. - :return: :class:`DeleteResult ` object. - """ - body = marshal(DeleteRequest(delete_object_list, quiet=quiet)) - headers = HTTPHeaderDict( - {"Content-MD5": base64_string(MD5.hash(body))}, - ) - if bypass_governance_mode: - headers["x-amz-bypass-governance-retention"] = "true" - response = self._execute( - method="POST", - bucket_name=bucket_name, - body=body, - headers=headers, - query_params=HTTPQueryDict({"delete": ""}), - region=region, - extra_headers=extra_headers, - extra_query_params=extra_query_params, - ) - - element = ET.fromstring(response.data.decode()) - return ( - DeleteResult([], [DeleteError.fromxml(element)]) - if element.tag.endswith("Error") - else unmarshal(DeleteResult, response.data.decode()) - ) - def remove_objects( self, *, bucket_name: str, - delete_object_list: Iterable[DeleteObject], + objects: Iterable[DeleteRequest.Object], bypass_governance_mode: bool = False, region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None, - ) -> Iterator[DeleteError]: + ) -> Iterator[DeleteResult.Error]: """ Remove multiple objects from a bucket. @@ -3914,8 +4672,8 @@ def remove_objects( bucket_name (str): Name of the bucket. - delete_object_list (Iterable[DeleteObject]): - Iterable of :class:`minio.deleteobjects.DeleteObject` + objects (Iterable[DeleteRequest.Object]): + Iterable of :class:`minio.request.DeleteRequest.Object` instances to be deleted. bypass_governance_mode (bool, default=False): @@ -3931,18 +4689,18 @@ def remove_objects( Extra query parameters for advanced usage. Returns: - Iterator[DeleteError]: - An iterator of :class:`minio.deleteobjects.DeleteError` + Iterator[DeleteResult.Error]: + An iterator of :class:`minio.models.DeleteResult.Error` objects for any failures. Example: >>> # Remove a list of objects >>> errors = client.remove_objects( ... bucket_name="my-bucket", - ... delete_object_list=[ - ... DeleteObject(name="my-object1"), - ... DeleteObject(name="my-object2"), - ... DeleteObject( + ... objects=[ + ... DeleteRequest.Object(name="my-object1"), + ... DeleteRequest.Object(name="my-object2"), + ... DeleteRequest.Object( ... name="my-object3", ... version_id="13f88b18-8dcd-4c83-88f2-8631fdb6250c", ... ), @@ -3952,8 +4710,8 @@ def remove_objects( ... print("error occurred when deleting object", error) >>> >>> # Remove objects under a prefix recursively - >>> delete_object_list = map( - ... lambda x: DeleteObject(x.object_name), + >>> objects = map( + ... lambda x: DeleteRequest.Object(x.object_name), ... client.list_objects( ... bucket_name="my-bucket", ... prefix="my/prefix/", @@ -3962,7 +4720,7 @@ def remove_objects( ... ) >>> errors = client.remove_objects( ... bucket_name="my-bucket", - ... delete_object_list=delete_object_list, + ... objects=objects, ... ) >>> for error in errors: ... print("error occurred when deleting object", error) @@ -3970,22 +4728,22 @@ def remove_objects( check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) # turn list like objects into an iterator. - delete_object_list = itertools.chain(delete_object_list) + objects = itertools.chain(objects) while True: # get 1000 entries or whatever available. - objects = [ + object_list = [ delete_object for _, delete_object in zip( - range(1000), delete_object_list, + range(1000), objects, ) ] - if not objects: + if not object_list: break - result = self._delete_objects( + response = self._delete_objects( bucket_name=bucket_name, - delete_object_list=objects, + objects=object_list, quiet=True, bypass_governance_mode=bypass_governance_mode, region=region, @@ -3993,7 +4751,7 @@ def remove_objects( extra_query_params=extra_query_params, ) - for error in result.error_list: + for error in response.result.errors: # AWS S3 returns "NoSuchVersion" error when # version doesn't exist ignore this error # yield all errors otherwise @@ -4266,8 +5024,6 @@ def presigned_post_policy(self, policy: PostPolicy) -> dict[str, str]: ... ) >>> form_data = client.presigned_post_policy(policy) """ - if not isinstance(policy, PostPolicy): - raise ValueError("policy must be PostPolicy type") if not self._provider: raise ValueError( "anonymous access does not require presigned post form-data", @@ -4340,7 +5096,7 @@ def get_bucket_replication( Returns: Optional[ReplicationConfig]: - A :class:`minio.replicationconfig.ReplicationConfig` object + A :class:`minio.models.ReplicationConfig` object if replication is configured, otherwise ``None``. Example: @@ -4419,8 +5175,6 @@ def set_bucket_replication( ... ) """ check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) - if not isinstance(config, ReplicationConfig): - raise ValueError("config must be ReplicationConfig type") body = marshal(config) headers = HTTPHeaderDict( {"Content-MD5": base64_string(MD5.hash(body))}, @@ -4497,7 +5251,7 @@ def get_bucket_lifecycle( Returns: Optional[LifecycleConfig]: - A :class:`minio.lifecycleconfig.LifecycleConfig` object if + A :class:`minio.models.LifecycleConfig` object if configured, otherwise ``None``. Example: @@ -4573,8 +5327,6 @@ def set_bucket_lifecycle( ... ) """ check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) - if not isinstance(config, LifecycleConfig): - raise ValueError("config must be LifecycleConfig type") body = marshal(config) headers = HTTPHeaderDict( {"Content-MD5": base64_string(MD5.hash(body))}, @@ -4651,7 +5403,7 @@ def get_bucket_tags( Returns: Optional[Tags]: - A :class:`minio.commonconfig.Tags` object if tags are + A :class:`minio.models.Tags` object if tags are configured, otherwise ``None``. Example: @@ -4692,7 +5444,7 @@ def set_bucket_tags( tags (Tags): Tags configuration as a - :class:`minio.commonconfig.Tags` object. + :class:`minio.models.Tags` object. region (Optional[str], default=None): Region of the bucket to skip auto probing. @@ -4710,8 +5462,6 @@ def set_bucket_tags( >>> client.set_bucket_tags(bucket_name="my-bucket", tags=tags) """ check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) - if not isinstance(tags, Tags): - raise ValueError("tags must be Tags type") body = marshal(Tagging(tags)) headers = HTTPHeaderDict( {"Content-MD5": base64_string(MD5.hash(body))}, @@ -4815,7 +5565,7 @@ def get_object_tags( Returns: Optional[Tags]: - A :class:`minio.commonconfig.Tags` object if tags are + A :class:`minio.models.Tags` object if tags are configured, otherwise ``None``. Example: @@ -4870,7 +5620,7 @@ def set_object_tags( tags (Tags): Tags configuration as a - :class:`minio.commonconfig.Tags` object. + :class:`minio.models.Tags` object. version_id (Optional[str], default=None): Version ID of the object. @@ -4896,8 +5646,6 @@ def set_object_tags( """ check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) check_object_name(object_name) - if not isinstance(tags, Tags): - raise ValueError("tags must be Tags type") body = marshal(Tagging(tags)) headers = HTTPHeaderDict( {"Content-MD5": base64_string(MD5.hash(body))}, @@ -5167,7 +5915,7 @@ def get_object_lock_config( Returns: ObjectLockConfig: - A :class:`minio.objectlockconfig.ObjectLockConfig` + A :class:`minio.models.ObjectLockConfig` object representing the bucket's object-lock configuration. @@ -5223,8 +5971,6 @@ def set_object_lock_config( ... ) """ check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) - if not isinstance(config, ObjectLockConfig): - raise ValueError("config must be ObjectLockConfig type") body = marshal(config) headers = HTTPHeaderDict( {"Content-MD5": base64_string(MD5.hash(body))}, @@ -5274,7 +6020,7 @@ def get_object_retention( Returns: Optional[Retention]: - A :class:`minio.retention.Retention` object if retention + A :class:`minio.models.Retention` object if retention is set, otherwise ``None``. Example: @@ -5354,8 +6100,6 @@ def set_object_retention( """ check_bucket_name(bucket_name, s3_check=self._base_url.is_aws_host) check_object_name(object_name) - if not isinstance(config, Retention): - raise ValueError("config must be Retention type") body = marshal(config) headers = HTTPHeaderDict( {"Content-MD5": base64_string(MD5.hash(body))}, @@ -5392,7 +6136,7 @@ def upload_snowball_objects( region: Optional[str] = None, extra_headers: Optional[HTTPHeaderDict] = None, extra_query_params: Optional[HTTPQueryDict] = None, - ) -> ObjectWriteResult: + ) -> ObjectWriteResponse: """ Upload multiple objects in a single PUT call. @@ -5440,8 +6184,8 @@ def upload_snowball_objects( Extra query parameters for advanced usage. Returns: - ObjectWriteResult: - A :class:`minio.helpers.ObjectWriteResult` object. + ObjectWriteResponse: + The response of the snowball upload operation. Example: >>> client.upload_snowball_objects( @@ -5613,107 +6357,3 @@ def _list_objects( continuation_token = start_after yield from objects - - def _list_multipart_uploads( - self, - *, - bucket_name: str, - delimiter: Optional[str] = None, - encoding_type: Optional[str] = None, - key_marker: Optional[str] = None, - max_uploads: Optional[int] = None, - prefix: Optional[str] = None, - upload_id_marker: Optional[str] = None, - region: Optional[str] = None, - extra_headers: Optional[HTTPHeaderDict] = None, - extra_query_params: Optional[HTTPQueryDict] = None, - ) -> ListMultipartUploadsResult: - """ - Execute ListMultipartUploads S3 API. - - :param bucket_name: Name of the bucket. - :param delimiter: (Optional) Delimiter on listing. - :param encoding_type: (Optional) Encoding type. - :param key_marker: (Optional) Key marker. - :param max_uploads: (Optional) Maximum upload information to fetch. - :param prefix: (Optional) Prefix on listing. - :param upload_id_marker: (Optional) Upload ID marker. - :param extra_headers: (Optional) Extra headers for advanced usage. - :param extra_query_params: (Optional) Extra query parameters for - advanced usage. - :return: - :class:`ListMultipartUploadsResult ` - object - """ - - query_params = HTTPQueryDict( - { - "uploads": "", - "delimiter": delimiter or "", - "max-uploads": str(max_uploads or 1000), - "prefix": prefix or "", - "encoding-type": "url", - }, - ) - if encoding_type: - query_params["encoding-type"] = encoding_type - if key_marker: - query_params["key-marker"] = key_marker - if upload_id_marker: - query_params["upload-id-marker"] = upload_id_marker - - response = self._execute( - method="GET", - bucket_name=bucket_name, - query_params=query_params, - region=region, - extra_headers=extra_headers, - extra_query_params=extra_query_params, - ) - return ListMultipartUploadsResult(response) - - def _list_parts( - self, - *, - bucket_name: str, - object_name: str, - upload_id: str, - max_parts: Optional[int] = None, - part_number_marker: Optional[str] = None, - region: Optional[str] = None, - extra_headers: Optional[HTTPHeaderDict] = None, - extra_query_params: Optional[HTTPQueryDict] = None, - ) -> ListPartsResult: - """ - Execute ListParts S3 API. - - :param bucket_name: Name of the bucket. - :param object_name: Object name in the bucket. - :param upload_id: Upload ID. - :param max_parts: (Optional) Maximum parts information to fetch. - :param part_number_marker: (Optional) Part number marker. - :param extra_headers: (Optional) Extra headers for advanced usage. - :param extra_query_params: (Optional) Extra query parameters for - advanced usage. - :return: :class:`ListPartsResult ` object - """ - - query_params = HTTPQueryDict( - { - "uploadId": upload_id, - "max-parts": str(max_parts or 1000), - }, - ) - if part_number_marker: - query_params["part-number-marker"] = part_number_marker - - response = self._execute( - method="GET", - bucket_name=bucket_name, - object_name=object_name, - query_params=query_params, - region=region, - extra_headers=extra_headers, - extra_query_params=extra_query_params, - ) - return ListPartsResult(response) diff --git a/minio/minioadmin.py b/minio/minioadmin.py index 42d0b851..5b7f15f4 100644 --- a/minio/minioadmin.py +++ b/minio/minioadmin.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2021 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,40 +14,35 @@ # See the License for the specific language governing permissions and # limitations under the License. -# pylint: disable=too-many-public-methods -# pylint: disable=too-many-lines +# pylint: disable=too-many-public-methods disable=too-many-lines """MinIO Admin Client to perform MinIO administration operations.""" -from __future__ import absolute_import, annotations +from __future__ import annotations import json import os -from datetime import timedelta +from dataclasses import dataclass +from datetime import datetime, timedelta from enum import Enum, unique from typing import Any, Optional, TextIO, Tuple, cast from urllib.parse import urlunsplit import certifi from urllib3 import Retry -from urllib3._collections import HTTPHeaderDict from urllib3.poolmanager import PoolManager - -try: - from urllib3.response import BaseHTTPResponse # type: ignore[attr-defined] -except ImportError: - from urllib3.response import HTTPResponse as BaseHTTPResponse - from urllib3.util import Timeout from . import time +from .checksum import sha256_hash +from .compat import HTTPHeaderDict, HTTPQueryDict, HTTPResponse from .credentials import Provider from .crypto import decrypt, encrypt -from .datatypes import PeerInfo, PeerSite, SiteReplicationStatusOptions from .error import MinioAdminException -from .helpers import (_DEFAULT_USER_AGENT, _REGION_REGEX, HTTPQueryDict, - _parse_url, headers_to_strings, sha256_hash, url_replace) +from .helpers import (REGION_REGEX, get_user_agent, headers_to_strings, + parse_url, url_replace) from .signer import sign_v4_s3 +from .time import to_iso8601utc @unique @@ -124,18 +119,10 @@ def __init__( cert_check: bool = True, http_client: Optional[PoolManager] = None, ): - url = _parse_url(("https://" if secure else "http://") + endpoint) - if not isinstance(credentials, Provider): - raise ValueError("valid credentials must be provided") - if region and not _REGION_REGEX.match(region): + url = parse_url(("https://" if secure else "http://") + endpoint) + if region and not REGION_REGEX.match(region): raise ValueError(f"invalid region {region}") - if http_client: - if not isinstance(http_client, PoolManager): - raise ValueError( - "HTTP client should be instance of " - "`urllib3.poolmanager.PoolManager`" - ) - else: + if not http_client: timeout = timedelta(minutes=5).seconds http_client = PoolManager( timeout=Timeout(connect=timeout, read=timeout), @@ -155,7 +142,9 @@ def __init__( self._secure = secure self._cert_check = cert_check self._http = http_client - self._user_agent = _DEFAULT_USER_AGENT + self._user_agent = get_user_agent( + app_name="", app_version="", default=True, + ) self._trace_stream: Optional[TextIO] = None def __del__(self): @@ -169,7 +158,7 @@ def _url_open( query_params: Optional[HTTPQueryDict] = None, body: Optional[bytes] = None, preload_content: bool = True, - ) -> BaseHTTPResponse: + ) -> HTTPResponse: """Execute HTTP request.""" creds = self._provider.retrieve() @@ -263,9 +252,7 @@ def set_app_info(self, app_name: str, app_version: str): Example:: client.set_app_info('my_app', '1.0.2') """ - if not (app_name and app_version): - raise ValueError("Application name/version cannot be empty.") - self._user_agent = f"{_DEFAULT_USER_AGENT} {app_name}/{app_version}" + self._user_agent = get_user_agent(app_name, app_version) def trace_on(self, stream: TextIO): """ @@ -1036,3 +1023,88 @@ def get_policy_entities( response, self._provider.retrieve().secret_key, ) return plain_data.decode() + + @dataclass(frozen=True) + class PeerSite: + """ + Represents a cluster/site to be added to the set of replicated sites. + """ + name: str + endpoint: str + access_key: str + secret_key: str + + def to_dict(self) -> dict[str, str]: + """Convert to dictionary.""" + return { + "name": self.name, + "endpoints": self.endpoint, + "accessKey": self.access_key, + "secretKey": self.secret_key, + } + + @dataclass(frozen=True) + class SiteReplicationStatusOptions: + """Represents site replication status options.""" + ENTITY_TYPE = Enum( + "ENTITY_TYPE", + { + "BUCKET": "bucket", + "POLICY": "policy", + "USER": "user", + "GROUP": "group", + }, + ) + buckets: bool = False + policies: bool = False + users: bool = False + groups: bool = False + metrics: bool = False + show_deleted: bool = False + entity: Optional[str] = None + entity_value: Optional[str] = None + + def to_query_params(self) -> HTTPQueryDict: + """Convert this options to query parameters.""" + params = HTTPQueryDict() + params["buckets"] = str(self.buckets).lower() + params["policies"] = str(self.policies).lower() + params["users"] = str(self.users).lower() + params["groups"] = str(self.groups).lower() + params["metrics"] = str(self.metrics).lower() + params["showDeleted"] = str(self.show_deleted).lower() + if self.entity and self.entity_value: + params["entity"] = self.entity + params["entityvalue"] = self.entity_value + return params + + @dataclass(frozen=True) + class PeerInfo: + """Site replication peer information.""" + deployment_id: str + endpoint: str + bucket_bandwidth_limit: str + bucket_bandwidth_set: str + name: Optional[str] = None + sync_status: Optional[str] = None + bucket_bandwidth_updated_at: Optional[datetime] = None + + def to_dict(self): + """Converts peer information to dictionary.""" + data = { + "endpoint": self.endpoint, + "deploymentID": self.deployment_id, + "defaultbandwidth": { + "bandwidthLimitPerBucket": self.bucket_bandwidth_limit, + "set": self.bucket_bandwidth_set, + }, + } + if self.name: + data["name"] = self.name + if self.sync_status is not None: + data["sync"] = "enable" if self.sync_status else "disable" + if self.bucket_bandwidth_updated_at: + data["defaultbandwidth"]["updatedAt"] = to_iso8601utc( + self.bucket_bandwidth_updated_at, + ) + return data diff --git a/minio/models.py b/minio/models.py new file mode 100644 index 00000000..ce32e445 --- /dev/null +++ b/minio/models.py @@ -0,0 +1,4111 @@ +# -*- coding: utf-8 -*- +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# pylint: disable=too-many-lines disable=invalid-name + +"""API request, response, result and configuration.""" + +from __future__ import annotations + +import base64 +import json +from abc import ABC +from binascii import crc32 +from collections import OrderedDict +from dataclasses import dataclass, field +from datetime import datetime +from enum import Enum +from io import BufferedIOBase, BytesIO +from typing import Any, List, Optional, Tuple, Type, TypeVar, Union, cast +from urllib.parse import unquote_plus + +from .checksum import Algorithm +from .checksum import Type as ChecksumType +from .compat import HTTPHeaderDict, HTTPResponse, JSONDecodeError +from .credentials import Credentials +from .error import MinioException +from .helpers import check_bucket_name +from .signer import get_credential_string, post_presign_v4 +from .time import (from_http_header, from_iso8601utc, to_amz_date, + to_http_header, to_iso8601utc) +from .xml import ET, Element, SubElement, find, findall, findtext, unmarshal + +################################################################################ +########### Common data structures ########### +################################################################################ + + +@dataclass(frozen=True) +class Checksum: + """Object checksum information.""" + checksum_crc32: Optional[str] = None + checksum_crc32c: Optional[str] = None + checksum_crc64nvme: Optional[str] = None + checksum_sha1: Optional[str] = None + checksum_sha256: Optional[str] = None + checksum_type: Optional[str] = None + + def headers(self) -> HTTPHeaderDict: + """Generate headers for checksum values.""" + headers = HTTPHeaderDict() + for algorithm, value in ( + ("crc32", self.checksum_crc32), + ("crc32c", self.checksum_crc32c), + ("crc64nvme", self.checksum_crc64nvme), + ("sha1", self.checksum_sha1), + ("sha256", self.checksum_sha256), + ): + if value: + headers[f"x-amz-checksum-algorithm-{algorithm}"] = value + headers["x-amz-checksum-algorithm"] = algorithm + return headers + + @classmethod + def fromxml(cls: Type[Checksum], element: ET.Element) -> Checksum: + """Create new object with values from XML element.""" + return cls( + checksum_crc32=findtext(element, "ChecksumCRC32"), + checksum_crc32c=findtext(element, "ChecksumCRC32C"), + checksum_crc64nvme=findtext(element, "ChecksumCRC64NVME"), + checksum_sha1=findtext(element, "ChecksumSHA1"), + checksum_sha256=findtext(element, "ChecksumSHA256"), + checksum_type=findtext(element, "ChecksumType"), + ) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + for tag, text in ( + ("ChecksumCRC32", self.checksum_crc32), + ("ChecksumCRC32C", self.checksum_crc32c), + ("ChecksumCRC64NVME", self.checksum_crc64nvme), + ("ChecksumSHA1", self.checksum_sha1), + ("ChecksumSHA256", self.checksum_sha256), + ): + if text: + SubElement(element, tag, text) + return element + + +@dataclass(frozen=True) +class Filter: + """Filter rule.""" + and_operator: Optional[And] = None + prefix: Optional[str] = None + tag: Optional[Tag] = None + + def __post_init__(self): + if not ( + (self.and_operator is not None) ^ + (self.prefix is not None) ^ + (self.tag is not None) + ): + raise ValueError( + "only one of and operator, prefix or tag must be provided", + ) + + @classmethod + def fromxml(cls: Type[Filter], element: ET.Element) -> Filter: + """Create new object with values from XML element.""" + return cls( + and_operator=( + None if find(element, "And") is None + else Filter.And.fromxml(cast(ET.Element, find(element, "And"))) + ), + prefix=findtext(element, "Prefix"), + tag=( + None if find(element, "Tag") is None + else Tag.fromxml(cast(ET.Element, find(element, "Tag"))) + ), + ) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + if self.and_operator: + self.and_operator.toxml(SubElement(element, "And")) + if self.prefix is not None: + SubElement(element, "Prefix", self.prefix) + if self.tag is not None: + self.tag.toxml(SubElement(element, "Tag")) + return element + + @dataclass(frozen=True) + class And: + """AND operator.""" + prefix: Optional[str] = None + tags: Optional[Tags] = None + + def __post_init__(self): + if self.prefix is None and not self.tags: + raise ValueError("at least prefix or tags must be provided") + + @classmethod + def fromxml(cls: Type[Filter.And], element: ET.Element) -> Filter.And: + """Create new object with values from XML element.""" + return cls( + prefix=findtext(element, "Prefix"), + tags=( + None if find(element, "Tag") is None + else Tags.fromxml(element) + ), + ) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + if self.prefix is not None: + SubElement(element, "Prefix", self.prefix) + if self.tags is not None: + self.tags.toxml(element) + return element + + +@dataclass(frozen=True) +class Object: + """Object information.""" + bucket_name: str + object_name: Optional[str] + last_modified: Optional[datetime] = None + etag: Optional[str] = None + size: Optional[int] = None + metadata: Optional[Union[dict[str, str], HTTPHeaderDict]] = None + version_id: Optional[str] = None + is_latest: Optional[str] = None + storage_class: Optional[str] = None + owner_id: Optional[str] = None + owner_name: Optional[str] = None + content_type: Optional[str] = None + is_delete_marker: bool = False + tags: Optional[Tags] = None + is_dir: bool = field(default=False, init=False) + checksum_algorithms: Optional[List[str]] = None + checksum_type: Optional[str] = None + is_restore_in_progress: bool = False + restore_expiry_date: Optional[datetime] = None + + def __post_init__(self): + object.__setattr__( + self, + "is_dir", + bool(self.object_name and self.object_name.endswith("/")), + ) + + @classmethod + def fromxml( + cls: Type[Object], + element: ET.Element, + bucket_name: str, + is_delete_marker: bool = False, + encoding_type: Optional[str] = None, + ) -> Object: + """Create new object with values from XML element.""" + tag = findtext(element, "LastModified") + last_modified = None if tag is None else from_iso8601utc(tag) + + tag = findtext(element, "ETag") + etag = None if tag is None else tag.replace('"', "") + + tag = findtext(element, "Size") + size = None if tag is None else int(tag) + + elem = find(element, "Owner") + owner_id, owner_name = ( + (None, None) if elem is None + else (findtext(elem, "ID"), findtext(elem, "DisplayName")) + ) + + elems: ET.Element | list = find(element, "UserMetadata") or [] + metadata: dict[str, str] = {} + for child in elems: + key = child.tag.split("}")[1] if "}" in child.tag else child.tag + metadata[key] = child.text or "" + + object_name = cast(str, findtext(element, "Key", True)) + if encoding_type == "url": + object_name = unquote_plus(object_name) + + tags_text = findtext(element, "UserTags") + tags: Optional[Tags] = None + if tags_text: + tags = Tags.new_object_tags() + tags.update( + cast( + List[Tuple[Any, Any]], + [tokens.split("=") for tokens in tags_text.split("&")], + ), + ) + + checksum_algorithms = [ + elem.text for elem in findall(element, "ChecksumAlgorithm") + if elem.text + ] + checksum_type = findtext(element, "ChecksumType") + + is_restore_in_progress = cast( + str, + findtext(element, "RestoreStatus/IsRestoreInProgress", default=""), + ) + + restore_expiry_date = findtext( + element, "RestoreStatus/RestoreExpiryDate", + ) + + return cls( + bucket_name=bucket_name, + object_name=object_name, + last_modified=last_modified, + etag=etag, + size=size, + version_id=findtext(element, "VersionId"), + is_latest=findtext(element, "IsLatest"), + storage_class=findtext(element, "StorageClass"), + owner_id=owner_id, + owner_name=owner_name, + metadata=metadata, + is_delete_marker=is_delete_marker, + tags=tags, + checksum_algorithms=checksum_algorithms or None, + checksum_type=checksum_type, + is_restore_in_progress=is_restore_in_progress.lower() == "true", + restore_expiry_date=( + from_iso8601utc(restore_expiry_date) if restore_expiry_date + else None + ), + ) + + +def parse_list_objects( + response: HTTPResponse, + bucket_name: Optional[str] = None, +) -> tuple[list[Object], bool, Optional[str], Optional[str]]: + """Parse ListObjects/ListObjectsV2/ListObjectVersions response.""" + element = ET.fromstring(response.data.decode()) + bucket_name = cast(str, findtext(element, "Name", True)) + encoding_type = findtext(element, "EncodingType") + elements = findall(element, "Contents") + objects = [ + Object.fromxml(tag, bucket_name, encoding_type=encoding_type) + for tag in elements + ] + marker = objects[-1].object_name if objects else None + + elements = findall(element, "Version") + objects += [ + Object.fromxml(tag, bucket_name, encoding_type=encoding_type) + for tag in elements + ] + + elements = findall(element, "CommonPrefixes") + objects += [ + Object( + bucket_name, + unquote_plus(cast(str, findtext(tag, "Prefix", True))) + if encoding_type == "url" else findtext(tag, "Prefix", True), + ) for tag in elements + ] + + elements = findall(element, "DeleteMarker") + objects += [ + Object.fromxml(tag, bucket_name, is_delete_marker=True, + encoding_type=encoding_type) + for tag in elements + ] + + is_truncated = cast( + str, findtext(element, "IsTruncated", default="")).lower() == "true" + key_marker = findtext(element, "NextKeyMarker") + if key_marker and encoding_type == "url": + key_marker = unquote_plus(key_marker) + version_id_marker = findtext(element, "NextVersionIdMarker") + continuation_token = findtext(element, "NextContinuationToken") + if key_marker is not None: + continuation_token = key_marker + if continuation_token is None: + continuation_token = findtext(element, "NextMarker") + if continuation_token and encoding_type == "url": + continuation_token = unquote_plus(continuation_token) + if continuation_token is None and is_truncated: + continuation_token = marker + return objects, is_truncated, continuation_token, version_id_marker + + +@dataclass(frozen=True) +class Part(Checksum): + """Part information of a multipart upload.""" + part_number: int = 0 + etag: str = "" + last_modified: Optional[datetime] = None + size: Optional[int] = None + + @classmethod + def fromxml(cls: Type[Part], element: ET.Element) -> Part: + """Create new object with values from XML element.""" + part_number = int(cast(str, findtext(element, "PartNumber", True))) + etag = cast(str, findtext(element, "ETag", True)).replace('"', "") + tag = findtext(element, "LastModified") + last_modified = from_iso8601utc(tag) if tag else None + tag = findtext(element, "Size") + size = int(tag) if tag else None + checksum = Checksum.fromxml(element) + return cls( + part_number=part_number, + etag=etag, + last_modified=last_modified, + size=size, + **vars(checksum), + ) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + SubElement(element, "PartNumber", str(self.part_number)) + SubElement(element, "ETag", f'"{self.etag}"') + super().toxml(element) + return element + + @classmethod + def new( + cls: Type[Part], + result: CopyPartResult, + part_number: int, + ) -> Part: + """Create new object by CopyPartResult.""" + return cls( + part_number=part_number, + **vars(result), + ) + + +StatusT = TypeVar("StatusT", bound="Status") + + +@dataclass(frozen=True) +class Status(ABC): + """Status.""" + DISABLED = "Disabled" + ENABLED = "Enabled" + status: str + + @staticmethod + def check(status: str): + """Validate status.""" + if status not in [Status.ENABLED, Status.DISABLED]: + raise ValueError( + f"status must be {Status.ENABLED} or {Status.DISABLED}", + ) + + @classmethod + def fromxml(cls: Type[StatusT], element: ET.Element) -> StatusT: + """Create new object with values from XML element.""" + return cls( + status=cast(str, findtext(element, "Status", True)), + ) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + SubElement(element, "Status", self.status) + return element + + +@dataclass(frozen=True) +class Tag: + """Tag.""" + + key: str + value: str + + def __post_init__(self): + if not self.key: + raise ValueError("key must be provided") + if self.value is None: + raise ValueError("value must be provided") + + @classmethod + def fromxml(cls: Type[Tag], element: ET.Element) -> Tag: + """Create new object with values from XML element.""" + return cls( + key=cast(str, findtext(element, "Key", True)), + value=cast(str, findtext(element, "Value", True)), + ) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + SubElement(element, "Key", self.key) + SubElement(element, "Value", self.value) + return element + + +class Tags(dict): + """dict extended to bucket/object tags.""" + _MAX_KEY_LENGTH = 128 + _MAX_VALUE_LENGTH = 256 + _MAX_OBJECT_TAG_COUNT = 10 + _MAX_TAG_COUNT = 50 + + def __init__(self, for_object: bool = False): + self._for_object = for_object + super().__init__() + + def __setitem__(self, key: str, value: str): + limit = ( + self._MAX_OBJECT_TAG_COUNT + if self._for_object else self._MAX_TAG_COUNT + ) + if len(self) == limit: + tag_type = "object" if self._for_object else "bucket" + raise ValueError(f"only {limit} {tag_type} tags are allowed") + if not key or len(key) > self._MAX_KEY_LENGTH or "&" in key: + raise ValueError(f"invalid tag key '{key}'") + if value is None or len(value) > self._MAX_VALUE_LENGTH or "&" in value: + raise ValueError(f"invalid tag value '{value}'") + super().__setitem__(key, value) + + @classmethod + def new_bucket_tags(cls: Type[Tags]) -> Tags: + """Create new bucket tags.""" + return cls() + + @classmethod + def new_object_tags(cls: Type[Tags]) -> Tags: + """Create new object tags.""" + return cls(True) + + @classmethod + def fromxml(cls: Type[Tags], element: ET.Element) -> Tags: + """Create new object with values from XML element.""" + obj = cls() + for tag in findall(element, "Tag"): + key = cast(str, findtext(tag, "Key", True)) + value = cast(str, findtext(tag, "Value", True)) + obj[key] = value + return obj + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + for key, value in self.items(): + tag = SubElement(element, "Tag") + SubElement(tag, "Key", key) + SubElement(tag, "Value", value) + return element + + +################################################################################ +########### API configuration XML models ########### +################################################################################ + + +@dataclass(frozen=True) +class CORSConfig: + """CORS configuration.""" + rules: Optional[List[CORSRule]] = None + + @classmethod + def fromxml(cls: Type[CORSConfig], element: ET.Element) -> CORSConfig: + """Create new object with values from XML element.""" + return cls( + rules=[ + CORSConfig.CORSRule.fromxml(elem) + for elem in findall(element, "CORSRule") + ], + ) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + element = Element("CORSConfiguration") + for rule in self.rules or []: + rule.toxml(SubElement(element, "CORSRule")) + return element + + @dataclass(frozen=True) + class CORSRule: + """CORS rule.""" + allowed_headers: Optional[List[str]] = None + allowed_methods: Optional[List[str]] = None + allowed_origins: Optional[List[str]] = None + expose_headers: Optional[List[str]] = None + id: Optional[str] = None + max_age_seconds: Optional[int] = None + + @classmethod + def fromxml( + cls: Type[CORSConfig.CORSRule], + element: ET.Element, + ) -> CORSConfig.CORSRule: + """Create new object with values from XML element.""" + max_age_seconds = findtext(element, "MaxAgeSeconds") + return cls( + allowed_headers=[ + elem.text for elem in findall(element, "AllowedHeader") + if elem.text + ], + allowed_methods=[ + elem.text for elem in findall(element, "AllowedMethod") + if elem.text + ], + allowed_origins=[ + elem.text for elem in findall(element, "AllowedOrigin") + if elem.text + ], + expose_headers=[ + elem.text for elem in findall(element, "ExposeHeader") + if elem.text + ], + id=findtext(element, "ID"), + max_age_seconds=( + int(max_age_seconds) if max_age_seconds else None + ), + ) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + for value in self.allowed_headers or []: + SubElement(element, "AllowedHeader", value) + for value in self.allowed_methods or []: + SubElement(element, "AllowedMethod", value) + for value in self.allowed_origins or []: + SubElement(element, "AllowedOrigin", value) + for value in self.expose_headers or []: + SubElement(element, "ExposeHeader", value) + if self.id: + SubElement(element, "ID", self.id) + if self.max_age_seconds is not None: + SubElement(element, "MaxAgeSeconds", str(self.max_age_seconds)) + return element + + +@dataclass(frozen=True) +class LegalHold: + """Legal hold configuration.""" + status: bool = False + + @classmethod + def fromxml(cls: Type[LegalHold], element: ET.Element) -> LegalHold: + """Create new object with values from XML element.""" + status = findtext(element, "Status") + return cls(status=status == "ON") + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + element = Element("LegalHold") + SubElement(element, "Status", "ON" if self.status is True else "OFF") + return element + + +@dataclass(frozen=True) +class LifecycleConfig: + """Lifecycle configuration.""" + rules: list[Rule] + + @classmethod + def fromxml( + cls: Type[LifecycleConfig], + element: ET.Element, + ) -> LifecycleConfig: + """Create new object with values from XML element.""" + return cls( + rules=[ + LifecycleConfig.Rule.fromxml(tag) + for tag in findall(element, "Rule") + ], + ) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + element = Element("LifecycleConfiguration") + for rule in self.rules: + rule.toxml(SubElement(element, "Rule")) + return element + + @dataclass(frozen=True) + class DateDays(ABC): + """Base class holds date and days of Transition and Expiration.""" + date: Optional[datetime] = None + days: Optional[int] = None + + @staticmethod + def parsexml( + element: ET.Element, + ) -> tuple[Optional[datetime], Optional[int]]: + """Parse XML to date and days.""" + date = from_iso8601utc(findtext(element, "Date")) + days = findtext(element, "Days") + return date, int(days) if days else None + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + if self.date is not None: + SubElement( + element, "Date", to_iso8601utc(self.date), + ) + if self.days: + SubElement(element, "Days", str(self.days)) + return element + + @dataclass(frozen=True) + class Transition(DateDays): + """Transition.""" + storage_class: Optional[str] = None + + @classmethod + def fromxml( + cls: Type[LifecycleConfig.Transition], + element: ET.Element, + ) -> LifecycleConfig.Transition: + """Create new object with values from XML element.""" + date, days = cls.parsexml(element) + return cls(date, days, findtext(element, "StorageClass")) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + super().toxml(element) + if self.storage_class: + SubElement(element, "StorageClass", self.storage_class) + return element + + @dataclass(frozen=True) + class NoncurrentVersionTransition: + """Noncurrent version transition.""" + noncurrent_days: Optional[int] = None + storage_class: Optional[str] = None + newer_noncurrent_versions: Optional[int] = None + + @classmethod + def fromxml( + cls: Type[LifecycleConfig.NoncurrentVersionTransition], + element: ET.Element, + ) -> LifecycleConfig.NoncurrentVersionTransition: + """Create new object with values from XML element.""" + noncurrent_days = findtext(element, "NoncurrentDays") + versions = findtext(element, "NewerNoncurrentVersions") + return cls( + noncurrent_days=( + int(noncurrent_days) if noncurrent_days else None + ), + storage_class=findtext(element, "StorageClass"), + newer_noncurrent_versions=int(versions) if versions else None, + ) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + if self.noncurrent_days: + SubElement(element, "NoncurrentDays", + str(self.noncurrent_days)) + if self.storage_class: + SubElement(element, "StorageClass", self.storage_class) + if self.newer_noncurrent_versions: + SubElement(element, "NewerNoncurrentVersions", + str(self.newer_noncurrent_versions)) + return element + + @dataclass(frozen=True) + class NoncurrentVersionExpiration: + """Noncurrent version expiration.""" + noncurrent_days: Optional[int] = None + newer_noncurrent_versions: Optional[int] = None + + @classmethod + def fromxml( + cls: Type[LifecycleConfig.NoncurrentVersionExpiration], + element: ET.Element, + ) -> LifecycleConfig.NoncurrentVersionExpiration: + """Create new object with values from XML element.""" + noncurrent_days = findtext(element, "NoncurrentDays") + versions = findtext(element, "NewerNoncurrentVersions") + return cls( + noncurrent_days=( + int(noncurrent_days) if noncurrent_days else None + ), + newer_noncurrent_versions=int(versions) if versions else None, + ) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + if self.noncurrent_days: + SubElement(element, "NoncurrentDays", + str(self.noncurrent_days)) + if self.newer_noncurrent_versions: + SubElement(element, "NewerNoncurrentVersions", + str(self.newer_noncurrent_versions)) + return element + + @dataclass(frozen=True) + class Expiration(DateDays): + """Expiration.""" + expired_object_delete_marker: Optional[bool] = None + + @classmethod + def fromxml( + cls: Type[LifecycleConfig.Expiration], + element: ET.Element, + ) -> LifecycleConfig.Expiration: + """Create new object with values from XML element.""" + date, days = cls.parsexml(element) + expired_object_delete_marker = cast( + str, + findtext(element, "ExpiredObjectDeleteMarker", default=""), + ) + if expired_object_delete_marker is None: + return cls(date, days, None) + + if expired_object_delete_marker.title() not in ["False", "True"]: + raise ValueError( + "value of ExpiredObjectDeleteMarker must be " + "'True' or 'False'", + ) + return cls( + date, days, expired_object_delete_marker.lower() == "true", + ) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + super().toxml(element) + if self.expired_object_delete_marker is not None: + SubElement( + element, + "ExpiredObjectDeleteMarker", + str(self.expired_object_delete_marker).lower(), + ) + return element + + @dataclass(frozen=True) + class AbortIncompleteMultipartUpload: + """Abort incomplete multipart upload.""" + days_after_initiation: Optional[int] = None + + @classmethod + def fromxml( + cls: Type[LifecycleConfig.AbortIncompleteMultipartUpload], + element: ET.Element, + ) -> LifecycleConfig.AbortIncompleteMultipartUpload: + """Create new object with values from XML element.""" + days_after_initiation = findtext(element, "DaysAfterInitiation") + return cls( + days_after_initiation=( + int(days_after_initiation) if days_after_initiation + else None + ), + ) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + if self.days_after_initiation: + SubElement( + element, + "DaysAfterInitiation", + str(self.days_after_initiation), + ) + return element + + @dataclass(frozen=True) + class Rule: + """Lifecycle rule. """ + status: str + rule_filter: Optional[Filter] = None + rule_id: Optional[str] = None + abort_incomplete_multipart_upload: Optional[ + LifecycleConfig.AbortIncompleteMultipartUpload] = None + expiration: Optional[LifecycleConfig.Expiration] = None + noncurrent_version_expiration: Optional[ + LifecycleConfig.NoncurrentVersionExpiration] = None + noncurrent_version_transition: Optional[ + LifecycleConfig.NoncurrentVersionTransition] = None + transition: Optional[LifecycleConfig.Transition] = None + + def __post_init__(self): + Status.check(self.status) + if self.rule_id is not None: + object.__setattr__(self, "rule_id", self.rule_id.strip()) + if not self.rule_id: + raise ValueError("rule ID must be non-empty string") + if len(self.rule_id) > 255: + raise ValueError("rule ID must not exceed 255 characters") + if (not self.abort_incomplete_multipart_upload + and not self.expiration + and not self.noncurrent_version_expiration + and not self.noncurrent_version_transition + and not self.transition): + raise ValueError( + "at least one of action (AbortIncompleteMultipartUpload, " + "Expiration, NoncurrentVersionExpiration, " + "NoncurrentVersionTransition or Transition) must be " + "specified in a rule") + + @classmethod + def fromxml( + cls: Type[LifecycleConfig.Rule], + element: ET.Element, + ) -> LifecycleConfig.Rule: + """Create new object with values from XML element.""" + status = cast(str, findtext(element, "Status", True)) + rule_filter = ( + None if find(element, "Filter") is None + else Filter.fromxml(cast(ET.Element, find(element, "Filter"))) + ) + rule_id = findtext(element, "ID") + abort_incomplete_multipart_upload = ( + None if find(element, "AbortIncompleteMultipartUpload") is None + else LifecycleConfig.AbortIncompleteMultipartUpload.fromxml( + cast( + ET.Element, + find(element, "AbortIncompleteMultipartUpload"), + ), + ) + ) + expiration = ( + None if find(element, "Expiration") is None + else LifecycleConfig.Expiration.fromxml( + cast(ET.Element, find(element, "Expiration")), + ) + ) + noncurrent_version_expiration = ( + None if find(element, "NoncurrentVersionExpiration") is None + else + LifecycleConfig.NoncurrentVersionExpiration.fromxml( + cast( + ET.Element, + find(element, "NoncurrentVersionExpiration"), + ), + ) + ) + noncurrent_version_transition = ( + None if find(element, "NoncurrentVersionTransition") is None + else + LifecycleConfig.NoncurrentVersionTransition.fromxml( + cast( + ET.Element, + find(element, "NoncurrentVersionTransition"), + ), + ) + ) + transition = ( + None if find(element, "Transition") is None + else LifecycleConfig.Transition.fromxml( + cast(ET.Element, find(element, "Transition")), + ) + ) + + return cls( + status=status, + rule_filter=rule_filter, + rule_id=rule_id, + abort_incomplete_multipart_upload=( + abort_incomplete_multipart_upload + ), + expiration=expiration, + noncurrent_version_expiration=noncurrent_version_expiration, + noncurrent_version_transition=noncurrent_version_transition, + transition=transition, + ) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + SubElement(element, "Status", self.status) + if self.rule_filter: + self.rule_filter.toxml(SubElement(element, "Filter")) + if self.rule_id is not None: + SubElement(element, "ID", self.rule_id) + if self.abort_incomplete_multipart_upload: + self.abort_incomplete_multipart_upload.toxml( + SubElement(element, "AbortIncompleteMultipartUpload"), + ) + if self.expiration: + self.expiration.toxml(SubElement(element, "Expiration")) + if self.noncurrent_version_expiration: + self.noncurrent_version_expiration.toxml( + SubElement(element, "NoncurrentVersionExpiration"), + ) + if self.noncurrent_version_transition: + self.noncurrent_version_transition.toxml( + SubElement(element, "NoncurrentVersionTransition"), + ) + if self.transition: + self.transition.toxml(SubElement(element, "Transition")) + return element + + +@dataclass(frozen=True) +class NotificationConfig: + """Notification configuration.""" + cloud_func_config_list: list[CloudFuncConfig] = field(default_factory=list) + queue_config_list: list[QueueConfig] = field(default_factory=list) + topic_config_list: list[TopicConfig] = field(default_factory=list) + + @classmethod + def fromxml( + cls: Type[NotificationConfig], + element: ET.Element, + ) -> NotificationConfig: + """Create new object with values from XML element.""" + elements = findall(element, "CloudFunctionConfiguration") + cloud_func_config_list = [ + NotificationConfig.CloudFuncConfig.fromxml(tag) + for tag in elements + ] + elements = findall(element, "QueueConfiguration") + queue_config_list = [ + NotificationConfig.QueueConfig.fromxml(tag) + for tag in elements + ] + elements = findall(element, "TopicConfiguration") + topic_config_list = [ + NotificationConfig.TopicConfig.fromxml(tag) + for tag in elements + ] + return cls( + cloud_func_config_list, queue_config_list, topic_config_list, + ) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + element = Element("NotificationConfiguration") + for cloud_func_config in self.cloud_func_config_list: + cloud_func_config.toxml( + SubElement(element, "CloudFunctionConfiguration"), + ) + for queue_config in self.queue_config_list: + queue_config.toxml(SubElement(element, "QueueConfiguration")) + for config in self.topic_config_list: + config.toxml(SubElement(element, "TopicConfiguration")) + return element + + @dataclass(frozen=True) + class FilterRule(ABC): + """Filter rule.""" + name: str + value: str + + @classmethod + def fromxml( + cls: Type[NotificationConfig.FilterRule], + element: ET.Element, + ) -> NotificationConfig.FilterRule: + """Create new object with values from XML element.""" + name = cast(str, findtext(element, "Name", True)) + value = cast(str, findtext(element, "Value", True)) + return cls(name, value) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + # element = SubElement(element, "FilterRule") + SubElement(element, "Name", self.name) + SubElement(element, "Value", self.value) + return element + + @dataclass(frozen=True) + class PrefixFilterRule(FilterRule): + """Prefix filter rule.""" + + def __init__(self, value: str): + super().__init__(name="prefix", value=value) + + @dataclass(frozen=True) + class SuffixFilterRule(FilterRule): + """Suffix filter rule.""" + + def __init__(self, value: str): + super().__init__(name="suffix", value=value) + + @dataclass(frozen=True) + class CommonConfig(ABC): + """Common for cloud-function/queue/topic configuration.""" + events: list[str] + config_id: Optional[str] = None + prefix_filter_rule: Optional[NotificationConfig.PrefixFilterRule] = None + suffix_filter_rule: Optional[NotificationConfig.SuffixFilterRule] = None + + def __post_init__(self): + if not self.events: + raise ValueError("events must be provided") + + @staticmethod + def parsexml( + element: ET.Element, + ) -> tuple[ + list[str], + Optional[str], + Optional[NotificationConfig.PrefixFilterRule], + Optional[NotificationConfig.SuffixFilterRule], + ]: + """Parse XML.""" + elements = findall(element, "Event") + events = [] + for tag in elements: + if tag.text is None: + raise ValueError("missing value in XML tag 'Event'") + events.append(tag.text) + config_id = findtext(element, "Id") + elem = find(element, "Filter") + if elem is None: + return events, config_id, None, None + prefix_filter_rule = None + suffix_filter_rule = None + elem = cast(ET.Element, find(elem, "S3Key", True)) + elements = findall(elem, "FilterRule") + for tag in elements: + filter_rule = NotificationConfig.FilterRule.fromxml(tag) + if filter_rule.name == "prefix": + prefix_filter_rule = NotificationConfig.PrefixFilterRule( + filter_rule.value, + ) + else: + suffix_filter_rule = NotificationConfig.SuffixFilterRule( + filter_rule.value, + ) + return ( + events, config_id, prefix_filter_rule, suffix_filter_rule, + ) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + for event in self.events: + SubElement(element, "Event", event) + if self.config_id is not None: + SubElement(element, "Id", self.config_id) + if self.prefix_filter_rule or self.suffix_filter_rule: + rule = SubElement(element, "Filter") + rule = SubElement(rule, "S3Key") + if self.prefix_filter_rule: + self.prefix_filter_rule.toxml(SubElement(rule, "FilterRule")) + if self.suffix_filter_rule: + self.suffix_filter_rule.toxml(SubElement(rule, "FilterRule")) + return element + + @dataclass(frozen=True) + class CloudFuncConfig(CommonConfig): + """Cloud function configuration.""" + cloud_func: Optional[str] = None + + def __post_init__(self): + if not self.cloud_func: + raise ValueError("cloud function must be provided") + + @classmethod + def fromxml( + cls: Type[NotificationConfig.CloudFuncConfig], + element: ET.Element, + ) -> NotificationConfig.CloudFuncConfig: + """Create new object with values from XML element.""" + cloud_func = cast(str, findtext(element, "CloudFunction", True)) + (events, config_id, prefix_filter_rule, + suffix_filter_rule) = cls.parsexml(element) + return cls( + cloud_func=cloud_func, + events=events, + config_id=config_id, + prefix_filter_rule=prefix_filter_rule, + suffix_filter_rule=suffix_filter_rule, + ) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + SubElement(element, "CloudFunction", self.cloud_func) + super().toxml(element) + return element + + @dataclass(frozen=True) + class QueueConfig(CommonConfig): + """Queue configuration.""" + queue: Optional[str] = None + + def __post_init__(self): + if not self.queue: + raise ValueError("queue must be provided") + + @classmethod + def fromxml( + cls: Type[NotificationConfig.QueueConfig], + element: ET.Element, + ) -> NotificationConfig.QueueConfig: + """Create new object with values from XML element.""" + queue = cast(str, findtext(element, "Queue", True)) + (events, config_id, prefix_filter_rule, + suffix_filter_rule) = cls.parsexml(element) + return cls( + queue=queue, + events=events, + config_id=config_id, + prefix_filter_rule=prefix_filter_rule, + suffix_filter_rule=suffix_filter_rule, + ) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + SubElement(element, "Queue", self.queue) + super().toxml(element) + return element + + @dataclass(frozen=True) + class TopicConfig(CommonConfig): + """Get topic configuration.""" + topic: Optional[str] = None + + def __post_init__(self): + if not self.topic: + raise ValueError("topic must be provided") + + @classmethod + def fromxml( + cls: Type[NotificationConfig.TopicConfig], + element: ET.Element, + ) -> NotificationConfig.TopicConfig: + """Create new object with values from XML element.""" + topic = cast(str, findtext(element, "Topic", True)) + (events, config_id, prefix_filter_rule, + suffix_filter_rule) = cls.parsexml(element) + return cls( + topic=topic, + events=events, + config_id=config_id, + prefix_filter_rule=prefix_filter_rule, + suffix_filter_rule=suffix_filter_rule, + ) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + SubElement(element, "Topic", self.topic) + super().toxml(element) + return element + + +@dataclass(frozen=True) +class ObjectLockConfig: + """Object lock configuration.""" + COMPLIANCE = "COMPLIANCE" + GOVERNANCE = "GOVERNANCE" + DAYS = "Days" + YEARS = "Years" + + mode: Optional[str] + duration: Optional[int] + duration_unit: Optional[str] + + def __post_init__(self): + if (self.mode is not None) ^ (self.duration is not None): + if self.mode is None: + raise ValueError("mode must be provided") + raise ValueError("duration must be provided") + if ( + self.mode is not None and self.mode not in [ + ObjectLockConfig.GOVERNANCE, ObjectLockConfig.COMPLIANCE, + ] + ): + raise ValueError( + f"mode must be {ObjectLockConfig.GOVERNANCE} or " + f"{ObjectLockConfig.COMPLIANCE}", + ) + if ( + self.duration is not None and + self.duration_unit not in [ + ObjectLockConfig.DAYS, ObjectLockConfig.YEARS, + ] + ): + raise ValueError( + f"duration unit must be {ObjectLockConfig.DAYS} or ", + f"{ObjectLockConfig.YEARS}", + ) + if self.duration_unit: + object.__setattr__( + self, "duration_unit", self.duration_unit.title(), + ) + + @classmethod + def fromxml( + cls: Type[ObjectLockConfig], + element: ET.Element, + ) -> ObjectLockConfig: + """Create new object with values from XML element.""" + elem = find(element, "Rule") + if elem is None: + return cls(None, None, None) + elem = cast(ET.Element, find(elem, "DefaultRetention", True)) + mode = findtext(elem, "Mode") + duration_unit = ObjectLockConfig.DAYS + duration = findtext(elem, duration_unit) + if not duration: + duration_unit = ObjectLockConfig.YEARS + duration = findtext(elem, duration_unit) + if not duration: + raise ValueError( + f"XML element <{ObjectLockConfig.DAYS}> or " + f"<{ObjectLockConfig.YEARS}> not found", + ) + return cls( + mode=mode, + duration=int(duration), + duration_unit=duration_unit, + ) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + element = Element("ObjectLockConfiguration") + SubElement(element, "ObjectLockEnabled", "Enabled") + if self.mode: + rule = SubElement(element, "Rule") + retention = SubElement(rule, "DefaultRetention") + SubElement(retention, "Mode", self.mode) + if not self.duration_unit: + raise ValueError("duration unit must be provided") + SubElement(retention, self.duration_unit, str(self.duration)) + return element + + +@dataclass(frozen=True) +class ReplicationConfig: + """Replication configuration.""" + role: str + rules: list[Rule] + + def __post_init__(self): + if not self.rules: + raise ValueError("rules must be provided") + if len(self.rules) > 1000: + raise ValueError("more than 1000 rules are not supported") + + @classmethod + def fromxml( + cls: Type[ReplicationConfig], + element: ET.Element, + ) -> ReplicationConfig: + """Create new object with values from XML element.""" + role = cast(str, findtext(element, "Role", True)) + rules = [ + ReplicationConfig.Rule.fromxml(tag) + for tag in findall(element, "Rule") + ] + return cls(role, rules) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + element = Element("ReplicationConfiguration") + SubElement(element, "Role", self.role) + for rule in self.rules: + rule.toxml(SubElement(element, "Rule")) + return element + + @dataclass(frozen=True) + class SseKmsEncryptedObjects(Status): + """SSE KMS encrypted objects.""" + + @dataclass(frozen=True) + class SourceSelectionCriteria: + """Source selection criteria.""" + sse_kms_encrypted_objects: Optional[ + ReplicationConfig.SseKmsEncryptedObjects] = None + + @classmethod + def fromxml( + cls: Type[ReplicationConfig.SourceSelectionCriteria], + element: ET.Element, + ) -> ReplicationConfig.SourceSelectionCriteria: + """Create new object with values from XML element.""" + return cls( + sse_kms_encrypted_objects=( + None if find(element, "SseKmsEncryptedObjects") is None + else ReplicationConfig.SseKmsEncryptedObjects.fromxml( + cast( + ET.Element, + find(element, "SseKmsEncryptedObjects"), + ), + ) + ), + ) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + if self.sse_kms_encrypted_objects: + self.sse_kms_encrypted_objects.toxml( + SubElement(element, "SseKmsEncryptedObjects"), + ) + return element + + @dataclass(frozen=True) + class ExistingObjectReplication(Status): + """Existing object replication.""" + + @dataclass(frozen=True) + class DeleteMarkerReplication(Status): + """Delete marker replication.""" + + def __init__(self, status="Disabled"): + super().__init__(status) + + ReplicationTimeValueT = TypeVar( + "ReplicationTimeValueT", bound="ReplicationTimeValue", + ) + + @dataclass(frozen=True) + class ReplicationTimeValue(ABC): + """Replication time value.""" + minutes: Optional[int] = 15 + + @classmethod + def fromxml( + cls: Type[ReplicationConfig.ReplicationTimeValueT], + element: ET.Element, + ) -> ReplicationConfig.ReplicationTimeValueT: + """Create new object with values from XML element.""" + minutes = findtext(element, "Minutes") + return cls( + minutes=int(minutes) if minutes else None, + ) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + if self.minutes is not None: + SubElement(element, "Minutes", str(self.minutes)) + return element + + @dataclass(frozen=True) + class Time(ReplicationTimeValue): + """Time.""" + + @dataclass(frozen=True) + class ReplicationTime: + """Replication time.""" + time: ReplicationConfig.Time + status: str + + def __post_init__(self,): + if not self.time: + raise ValueError("time must be provided") + Status.check(self.status) + + @classmethod + def fromxml( + cls: Type[ReplicationConfig.ReplicationTime], + element: ET.Element, + ) -> ReplicationConfig.ReplicationTime: + """Create new object with values from XML element.""" + time = ReplicationConfig.Time.fromxml( + cast(ET.Element, find(element, "Time", strict=True)), + ) + status = cast(str, findtext(element, "Status", True)) + return cls(time, status) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + self.time.toxml(SubElement(element, "Time")) + SubElement(element, "Status", self.status) + return element + + @dataclass(frozen=True) + class EventThreshold(ReplicationTimeValue): + """Event threshold.""" + + @dataclass(frozen=True) + class Metrics: + """Metrics.""" + event_threshold: ReplicationConfig.EventThreshold + status: str + + def __post_init__(self): + if not self.event_threshold: + raise ValueError("event threshold must be provided") + Status.check(self.status) + + @classmethod + def fromxml( + cls: Type[ReplicationConfig.Metrics], + element: ET.Element, + ) -> ReplicationConfig.Metrics: + """Create new object with values from XML element.""" + event_threshold = ReplicationConfig.EventThreshold.fromxml( + cast(ET.Element, find(element, "EventThreshold", True)), + ) + status = cast(str, findtext(element, "Status", True)) + return cls(event_threshold, status) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + self.event_threshold.toxml(SubElement(element, "EventThreshold")) + SubElement(element, "Status", self.status) + return element + + @dataclass(frozen=True) + class EncryptionConfig: + """Encryption configuration.""" + replica_kms_key_id: Optional[str] = None + + @classmethod + def fromxml( + cls: Type[ReplicationConfig.EncryptionConfig], + element: ET.Element, + ) -> ReplicationConfig.EncryptionConfig: + """Create new object with values from XML element.""" + return cls(findtext(element, "ReplicaKmsKeyID")) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + SubElement(element, "ReplicaKmsKeyID", self.replica_kms_key_id) + return element + + @dataclass(frozen=True) + class AccessControlTranslation: + """Access control translation.""" + owner: str = "Destination" + + def __post_init__(self): + if not self.owner: + raise ValueError("owner must be provided") + + @classmethod + def fromxml( + cls: Type[ReplicationConfig.AccessControlTranslation], + element: ET.Element, + ) -> ReplicationConfig.AccessControlTranslation: + """Create new object with values from XML element.""" + owner = cast(str, findtext(element, "Owner", True)) + return cls(owner) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + SubElement(element, "Owner", self.owner) + return element + + @dataclass(frozen=True) + class Destination: + """Replication destination.""" + bucket_arn: str + access_control_translation: Optional[ + ReplicationConfig.AccessControlTranslation] = None + account: Optional[str] = None + encryption_config: Optional[ReplicationConfig.EncryptionConfig] = None + metrics: Optional[ReplicationConfig.Metrics] = None + replication_time: Optional[ReplicationConfig.ReplicationTime] = None + storage_class: Optional[str] = None + + def __post_init__(self): + if not self.bucket_arn: + raise ValueError("bucket ARN must be provided") + + @classmethod + def fromxml( + cls: Type[ReplicationConfig.Destination], + element: ET.Element, + ) -> ReplicationConfig.Destination: + """Create new object with values from XML element.""" + access_control_translation = ( + None if find(element, "AccessControlTranslation") is None + else ReplicationConfig.AccessControlTranslation.fromxml( + cast( + ET.Element, + find(element, "AccessControlTranslation"), + ), + ) + ) + account = findtext(element, "Account") + bucket_arn = cast(str, findtext(element, "Bucket", True)) + encryption_config = ( + None if find(element, "EncryptionConfiguration") is None + else ReplicationConfig.EncryptionConfig.fromxml( + cast( + ET.Element, + find(element, "EncryptionConfiguration"), + ), + ) + ) + metrics = ( + None if find(element, "Metrics") is None + else ReplicationConfig.Metrics.fromxml( + cast(ET.Element, find(element, "Metrics")), + ) + ) + replication_time = ( + None if find(element, "ReplicationTime") is None + else ReplicationConfig.ReplicationTime.fromxml( + cast(ET.Element, find(element, "ReplicationTime")), + ) + ) + storage_class = findtext(element, "StorageClass") + return cls( + bucket_arn, access_control_translation, account, + encryption_config, metrics, replication_time, storage_class, + ) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + if self.access_control_translation: + self.access_control_translation.toxml( + SubElement(element, "AccessControlTranslation"), + ) + if self.account is not None: + SubElement(element, "Account", self.account) + SubElement(element, "Bucket", self.bucket_arn) + if self.encryption_config: + self.encryption_config.toxml( + SubElement(element, "EncryptionConfiguration"), + ) + if self.metrics: + self.metrics.toxml(SubElement(element, "Metrics")) + if self.replication_time: + self.replication_time.toxml( + SubElement(element, "ReplicationTime"), + ) + if self.storage_class: + SubElement(element, "StorageClass", self.storage_class) + return element + + @dataclass(frozen=True) + class Rule: + """Replication rule. """ + status: str + rule_id: Optional[str] = None + rule_filter: Optional[Filter] = None + destination: Optional[ReplicationConfig.Destination] = None + delete_marker_replication: Optional[ + ReplicationConfig.DeleteMarkerReplication] = None + existing_object_replication: Optional[ + ReplicationConfig.ExistingObjectReplication] = None + prefix: Optional[str] = None + priority: Optional[int] = None + source_selection_criteria: Optional[ + ReplicationConfig.SourceSelectionCriteria] = None + + def __post_init__(self): + Status.check(self.status) + if self.rule_id is not None: + object.__setattr__(self, "rule_id", self.rule_id.strip()) + if not self.rule_id: + raise ValueError("rule ID must be non-empty string") + if len(self.rule_id) > 255: + raise ValueError("rule ID must not exceed 255 characters") + if not self.destination: + raise ValueError("destination must be provided") + + @classmethod + def fromxml( + cls: Type[ReplicationConfig.Rule], + element: ET.Element, + ) -> ReplicationConfig.Rule: + """Create new object with values from XML element.""" + status = cast(str, findtext(element, "Status", True)) + rule_id = findtext(element, "ID") + rule_filter = ( + None if find(element, "Filter") is None + else Filter.fromxml(cast(ET.Element, find(element, "Filter"))) + ) + destination = ( + None if find(element, "Destination") is None + else ReplicationConfig.Destination.fromxml( + cast(ET.Element, find(element, "Destination")), + ) + ) + delete_marker_replication = ( + None if find(element, "DeleteMarkerReplication") is None + else ReplicationConfig.DeleteMarkerReplication.fromxml( + cast( + ET.Element, + find(element, "DeleteMarkerReplication"), + ), + ) + ) + existing_object_replication = ( + None + if find(element, "ExistingObjectReplication") is None + else ReplicationConfig.ExistingObjectReplication.fromxml( + cast( + ET.Element, + find(element, "ExistingObjectReplication"), + ), + ) + ) + prefix = findtext(element, "Prefix") + priority = findtext(element, "Priority") + source_selection_criteria = ( + None if find(element, "SourceSelectionCriteria") is None + else ReplicationConfig.SourceSelectionCriteria.fromxml( + cast( + ET.Element, + find(element, "SourceSelectionCriteria"), + ), + ) + ) + + return cls( + status=status, + rule_id=rule_id, + rule_filter=rule_filter, + destination=destination, + delete_marker_replication=delete_marker_replication, + existing_object_replication=existing_object_replication, + prefix=prefix, + priority=int(priority) if priority else None, + source_selection_criteria=source_selection_criteria, + ) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + SubElement(element, "Status", self.status) + if self.rule_id is not None: + SubElement(element, "ID", self.rule_id) + if self.rule_filter: + self.rule_filter.toxml(SubElement(element, "Filter")) + if self.delete_marker_replication: + self.delete_marker_replication.toxml( + SubElement(element, "DeleteMarkerReplication"), + ) + if self.destination: + self.destination.toxml(SubElement(element, "Destination")) + if self.existing_object_replication: + self.existing_object_replication.toxml( + SubElement(element, "ExistingObjectReplication"), + ) + if self.prefix is not None: + SubElement(element, "Prefix", self.prefix) + if self.priority is not None: + SubElement(element, "Priority", str(self.priority)) + if self.source_selection_criteria: + self.source_selection_criteria.toxml( + SubElement(element, "SourceSelectionCriteria"), + ) + return element + + +@dataclass(frozen=True) +class Retention: + """Retention configuration.""" + COMPLIANCE = "COMPLIANCE" + GOVERNANCE = "GOVERNANCE" + mode: str + retain_until_date: datetime + + def __post_init__(self): + if self.mode not in [ + Retention.GOVERNANCE, Retention.COMPLIANCE, + ]: + raise ValueError( + f"mode must be {Retention.GOVERNANCE} or " + f"{Retention.COMPLIANCE}", + ) + + @classmethod + def fromxml(cls: Type[Retention], element: ET.Element) -> Retention: + """Create new object with values from XML element.""" + mode = cast(str, findtext(element, "Mode", True)) + retain_until_date = cast( + datetime, + from_iso8601utc( + cast(str, findtext(element, "RetainUntilDate", True)), + ), + ) + return cls(mode=mode, retain_until_date=retain_until_date) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + element = Element("Retention") + SubElement(element, "Mode", self.mode) + SubElement( + element, + "RetainUntilDate", + to_iso8601utc(self.retain_until_date), + ) + return element + + +@dataclass(frozen=True) +class SSEConfig: + """server-side encryption configuration.""" + rule: Rule + + def __post_init__(self): + if not self.rule: + raise ValueError("rule must be provided") + + @classmethod + def fromxml(cls: Type[SSEConfig], element: ET.Element) -> SSEConfig: + """Create new object with values from XML element.""" + element = cast(ET.Element, find(element, "Rule", True)) + return cls(SSEConfig.Rule.fromxml(element)) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + element = Element("ServerSideEncryptionConfiguration") + self.rule.toxml(SubElement(element, "Rule")) + return element + + @dataclass(frozen=True) + class Rule(ABC): + """Server-side encryption rule. """ + AES256 = "AES256" + AWS_KMS = "aws:kms" + sse_algorithm: str + kms_master_key_id: Optional[str] = None + + @classmethod + def new_sse_s3_rule(cls: Type[SSEConfig.Rule]) -> SSEConfig.Rule: + """Create SSE-S3 rule.""" + return cls(sse_algorithm=SSEConfig.Rule.AES256) + + @classmethod + def new_sse_kms_rule( + cls: Type[SSEConfig.Rule], + kms_master_key_id: Optional[str] = None, + ) -> SSEConfig.Rule: + """Create new SSE-KMS rule.""" + return cls( + sse_algorithm=SSEConfig.Rule.AWS_KMS, + kms_master_key_id=kms_master_key_id, + ) + + @classmethod + def fromxml( + cls: Type[SSEConfig.Rule], + element: ET.Element, + ) -> SSEConfig.Rule: + """Create new object with values from XML element.""" + element = cast( + ET.Element, + find(element, "ApplyServerSideEncryptionByDefault", True), + ) + return cls( + sse_algorithm=cast( + str, findtext(element, "SSEAlgorithm", True), + ), + kms_master_key_id=findtext(element, "KMSMasterKeyID"), + ) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + tag = SubElement(element, "ApplyServerSideEncryptionByDefault") + SubElement(tag, "SSEAlgorithm", self.sse_algorithm) + if self.kms_master_key_id is not None: + SubElement(tag, "KMSMasterKeyID", self.kms_master_key_id) + return element + + +@dataclass(frozen=True) +class Tagging: + """Tagging for buckets and objects.""" + tags: Optional[Tags] + + @classmethod + def fromxml(cls: Type[Tagging], element: ET.Element) -> Tagging: + """Create new object with values from XML element.""" + element = cast(ET.Element, find(element, "TagSet", True)) + return cls( + tags=( + None if find(element, "Tag") is None + else Tags.fromxml(element) + ), + ) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + element = Element("Tagging") + if self.tags: + self.tags.toxml(SubElement(element, "TagSet")) + return element + + +@dataclass(frozen=True) +class VersioningConfig: + """Versioning configuration.""" + DISABLED = "Disabled" + ENABLED = "Enabled" + OFF = "Off" + SUSPENDED = "Suspended" + + status: Optional[str] = None + mfa_delete: Optional[str] = None + excluded_prefixes: Optional[list[str]] = None + exclude_folders: bool = False + + def __post_init__(self): + if ( + self.status is not None and + self.status not in [ + VersioningConfig.ENABLED, VersioningConfig.SUSPENDED, + ] + ): + raise ValueError( + f"status must be {VersioningConfig.ENABLED} or " + f"{VersioningConfig.SUSPENDED}", + ) + if ( + self.mfa_delete is not None and + self.mfa_delete not in [ + VersioningConfig.ENABLED, VersioningConfig.DISABLED, + ] + ): + raise ValueError( + f"MFA delete must be {VersioningConfig.ENABLED} or " + f"{VersioningConfig.DISABLED}", + ) + + @property + def status_string(self) -> str: + """Convert status to status string. """ + return self.status or VersioningConfig.OFF + + @classmethod + def fromxml( + cls: Type[VersioningConfig], + element: ET.Element, + ) -> VersioningConfig: + """Create new object with values from XML element.""" + status = findtext(element, "Status") + mfa_delete = findtext(element, "MFADelete") + excluded_prefixes = [ + prefix.text + for prefix in findall( + element, + "ExcludedPrefixes/Prefix", + ) + ] + exclude_folders = cast( + str, + findtext(element, "ExcludeFolders", default=""), + ).lower() == "true" + return cls( + status=status, + mfa_delete=mfa_delete, + excluded_prefixes=cast(Union[List[str], None], excluded_prefixes), + exclude_folders=exclude_folders, + ) + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + element = Element("VersioningConfiguration") + if self.status: + SubElement(element, "Status", self.status) + if self.mfa_delete: + SubElement(element, "MFADelete", self.mfa_delete) + for prefix in self.excluded_prefixes or []: + SubElement( + SubElement(element, "ExcludedPrefixes"), + "Prefix", + prefix, + ) + if self.exclude_folders: + SubElement(element, "ExcludeFolders", "true") + return element + + +################################################################################ +########### API request only XML models ########### +################################################################################ + +@dataclass(frozen=True) +class CreateBucketConfiguration: + """CreateBucket configuration.""" + location_constraint: str + location: Optional[CreateBucketConfiguration.Location] = None + bucket: Optional[CreateBucketConfiguration.Bucket] = None + tags: Optional[Tags] = None + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + element = Element("CreateBucketConfiguration") + SubElement(element, "LocationConstraint", self.location_constraint) + if self.location: + self.location.toxml(SubElement(element, "Location")) + if self.bucket: + self.bucket.toxml(SubElement(element, "Bucket")) + if self.tags: + self.tags.toxml(SubElement(element, "Tags")) + return element + + @dataclass(frozen=True) + class Location: + """Bucket location information of CreateBucketConfiguration.""" + name: Optional[str] = None + type: Optional[str] = None + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + if self.name or self.type: + element = SubElement(element, "Location") + if self.name: + SubElement(element, "Name", self.name) + if self.type: + SubElement(element, "Type", self.type) + return element + + @dataclass(frozen=True) + class Bucket: + """Bucket properties of CreateBucketConfiguration.""" + data_redundancy: Optional[str] = None + type: Optional[str] = None + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + if self.data_redundancy or self.type: + element = SubElement(element, "Bucket") + if self.data_redundancy: + SubElement(element, "DataRedundancy", self.data_redundancy) + if self.type: + SubElement(element, "Type", self.type) + return element + + +@dataclass(frozen=True) +class DeleteRequest: + """Delete object request.""" + + objects: list[DeleteRequest.Object] + quiet: bool = False + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + element = Element("Delete") + if self.quiet: + SubElement(element, "Quiet", "true") + for obj in self.objects: + obj.toxml(SubElement(element, "Object")) + return element + + @dataclass(frozen=True) + class Object: + """Delete object request information.""" + + name: str + version_id: Optional[str] = None + etag: Optional[str] = None + last_modified_time: Optional[datetime] = None + size: Optional[int] = None + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + SubElement(element, "Key", self.name) + if self.version_id is not None: + SubElement(element, "VersionId", self.version_id) + if self.etag: + SubElement(element, "ETag", self.etag) + if self.last_modified_time: + SubElement( + element, + "LastModifiedTime", to_http_header( + self.last_modified_time), + ) + if self.size is not None: + SubElement(element, "Size", str(self.size)) + return element + + +class PostPolicy: + """ + Post policy information to be used to generate presigned post policy + form-data. Condition elements and respective condition for Post policy + is available at + https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-HTTPPOSTConstructPolicy.html#sigv4-PolicyConditions + """ + _RESERVED_ELEMENTS = ( + "bucket", + "x-amz-algorithm", + "x-amz-credential", + "x-amz-date", + "policy", + "x-amz-signature", + ) + _EQ = "eq" + _STARTS_WITH = "starts-with" + _ALGORITHM = "AWS4-HMAC-SHA256" + + @staticmethod + def _trim_dollar(value: str) -> str: + """Trim dollar character if present.""" + return value[1:] if value.startswith("$") else value + + def __init__(self, bucket_name: str, expiration: datetime): + check_bucket_name(bucket_name) + self._bucket_name = bucket_name + self._expiration = expiration + self._conditions: OrderedDict = OrderedDict() + self._conditions[self._EQ] = OrderedDict() + self._conditions[self._STARTS_WITH] = OrderedDict() + self._lower_limit: Optional[int] = None + self._upper_limit: Optional[int] = None + + def add_equals_condition(self, element: str, value: str): + """Add equals condition of an element and value.""" + if not element: + raise ValueError("condition element cannot be empty") + element = self._trim_dollar(element) + if ( + element in [ + "success_action_redirect", + "redirect", + "content-length-range", + ] + ): + raise ValueError(element + " is unsupported for equals condition") + if element in self._RESERVED_ELEMENTS: + raise ValueError(element + " cannot be set") + self._conditions[self._EQ][element] = value + + def remove_equals_condition(self, element: str): + """Remove previously set equals condition of an element.""" + if not element: + raise ValueError("condition element cannot be empty") + self._conditions[self._EQ].pop(element) + + def add_starts_with_condition(self, element: str, value: str): + """ + Add starts-with condition of an element and value. Value set to empty + string does matching any content condition. + """ + if not element: + raise ValueError("condition element cannot be empty") + element = self._trim_dollar(element) + if ( + element in ["success_action_status", "content-length-range"] or + ( + element.startswith("x-amz-") and + not element.startswith("x-amz-meta-") + ) + ): + raise ValueError( + f"{element} is unsupported for starts-with condition", + ) + if element in self._RESERVED_ELEMENTS: + raise ValueError(element + " cannot be set") + self._conditions[self._STARTS_WITH][element] = value + + def remove_starts_with_condition(self, element: str): + """Remove previously set starts-with condition of an element.""" + if not element: + raise ValueError("condition element cannot be empty") + self._conditions[self._STARTS_WITH].pop(element) + + def add_content_length_range_condition( # pylint: disable=invalid-name + self, lower_limit: int, upper_limit: int): + """Add content-length-range condition with lower and upper limits.""" + if lower_limit < 0: + raise ValueError("lower limit cannot be negative number") + if upper_limit < 0: + raise ValueError("upper limit cannot be negative number") + if lower_limit > upper_limit: + raise ValueError("lower limit cannot be greater than upper limit") + self._lower_limit = lower_limit + self._upper_limit = upper_limit + + def remove_content_length_range_condition( # pylint: disable=invalid-name + self): + """Remove previously set content-length-range condition.""" + self._lower_limit = None + self._upper_limit = None + + def form_data(self, creds: Credentials, region: str): + """ + Return form-data of this post policy. The returned dict contains + x-amz-algorithm, x-amz-credential, x-amz-security-token, x-amz-date, + policy and x-amz-signature. + """ + if not region: + raise ValueError("region must be provided") + if ( + "key" not in self._conditions[self._EQ] and + "key" not in self._conditions[self._STARTS_WITH] + ): + raise ValueError("key condition must be set") + + policy: OrderedDict = OrderedDict() + policy["expiration"] = to_iso8601utc(self._expiration) + policy["conditions"] = [[self._EQ, "$bucket", self._bucket_name]] + for cond_key, conditions in self._conditions.items(): + for key, value in conditions.items(): + policy["conditions"].append([cond_key, "$"+key, value]) + if self._lower_limit is not None and self._upper_limit is not None: + policy["conditions"].append( + ["content-length-range", self._lower_limit, self._upper_limit], + ) + utcnow = datetime.utcnow() + credential = get_credential_string(creds.access_key, utcnow, region) + amz_date = to_amz_date(utcnow) + policy["conditions"].append( + [self._EQ, "$x-amz-algorithm", self._ALGORITHM], + ) + policy["conditions"].append( + [self._EQ, "$x-amz-credential", credential]) + if creds.session_token: + policy["conditions"].append( + [self._EQ, "$x-amz-security-token", creds.session_token], + ) + policy["conditions"].append([self._EQ, "$x-amz-date", amz_date]) + + policy_encoded = base64.b64encode( + json.dumps(policy).encode(), + ).decode("utf-8") + signature = post_presign_v4( + policy_encoded, creds.secret_key, utcnow, region, + ) + form_data = { + "x-amz-algorithm": self._ALGORITHM, + "x-amz-credential": credential, + "x-amz-date": amz_date, + "policy": policy_encoded, + "x-amz-signature": signature, + } + if creds.session_token: + form_data["x-amz-security-token"] = creds.session_token + return form_data + + @property + def bucket_name(self) -> str: + """Get bucket name.""" + return self._bucket_name + + +@dataclass(frozen=True) +class SelectObjectContentRequest: + """Select object content request.""" + + expression: str + input_serialization: InputSerialization + output_serialization: OutputSerialization + request_progress: bool = False + scan_start_range: Optional[int] = None + scan_end_range: Optional[int] = None + + def __post_init__(self): + if not self.expression: + raise ValueError("SQL expression must be provided") + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + element = Element("SelectObjectContentRequest") + SubElement(element, "Expression", self.expression) + SubElement(element, "ExpressionType", "SQL") + self.input_serialization.toxml( + SubElement(element, "InputSerialization"), + ) + self.output_serialization.toxml( + SubElement(element, "OutputSerialization"), + ) + if self.request_progress: + SubElement( + SubElement(element, "RequestProgress"), "Enabled", "true", + ) + if self.scan_start_range or self.scan_end_range: + tag = SubElement(element, "ScanRange") + if self.scan_start_range: + SubElement(tag, "Start", str(self.scan_start_range)) + if self.scan_end_range: + SubElement(tag, "End", str(self.scan_end_range)) + return element + + @dataclass(frozen=True) + class InputSerialization(ABC): + """Input serialization.""" + compression_type: Optional[ + SelectObjectContentRequest.CompressionType] = None + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + if self.compression_type is not None: + SubElement( + element, + "CompressionType", + str(self.compression_type), + ) + return element + + @dataclass(frozen=True) + class CSVInputSerialization(InputSerialization): + """CSV input serialization.""" + allow_quoted_record_delimiter: Optional[str] = None + comments: Optional[str] = None + field_delimiter: Optional[str] = None + file_header_info: Optional[ + SelectObjectContentRequest.FileHeaderInfo + ] = None + quote_character: Optional[str] = None + quote_escape_character: Optional[str] = None + record_delimiter: Optional[str] = None + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + super().toxml(element) + element = SubElement(element, "CSV") + if self.allow_quoted_record_delimiter is not None: + SubElement( + element, + "AllowQuotedRecordDelimiter", + self.allow_quoted_record_delimiter, + ) + if self.comments is not None: + SubElement(element, "Comments", self.comments) + if self.field_delimiter is not None: + SubElement(element, "FieldDelimiter", self.field_delimiter) + if self.file_header_info is not None: + SubElement( + element, + "FileHeaderInfo", + str(self.file_header_info), + ) + if self.quote_character is not None: + SubElement(element, "QuoteCharacter", self.quote_character) + if self.quote_escape_character is not None: + SubElement( + element, + "QuoteEscapeCharacter", + self.quote_escape_character, + ) + if self.record_delimiter is not None: + SubElement( + element, + "RecordDelimiter", + self.record_delimiter, + ) + return element + + @dataclass(frozen=True) + class JSONInputSerialization(InputSerialization): + """JSON input serialization.""" + json_type: Optional[ + SelectObjectContentRequest.JsonType] = None + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + super().toxml(element) + element = SubElement(element, "JSON") + if self.json_type is not None: + SubElement(element, "Type", str(self.json_type)) + return element + + @dataclass(frozen=True) + class ParquetInputSerialization(InputSerialization): + """Parquet input serialization.""" + + def toxml( # pylint: disable=no-self-use + self, + element: Optional[ET.Element], + ) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + super().toxml(element) + SubElement(element, "Parquet") + return element + + class CompressionType(str, Enum): + """Compression format of CSV and JSON input serialization.""" + NONE = "NONE" + GZIP = "GZIP" + BZIP2 = "BZIP2" + + class FileHeaderInfo(str, Enum): + """First line description of CSV object.""" + USE = "USE" + IGNORE = "IGNORE" + NONE = "NONE" + + class JsonType(str, Enum): + """JSON object type.""" + DOCUMENT = "DOCUMENT" + LINES = "LINES" + + @dataclass(frozen=True) + class OutputSerialization(ABC): + """Output serialization.""" + + def toxml( # pylint: disable=no-self-use + self, + element: Optional[ET.Element], + ) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + return element + + @dataclass(frozen=True) + class CSVOutputSerialization(OutputSerialization): + """CSV output serialization.""" + field_delimiter: Optional[str] = None + quote_character: Optional[str] = None + quote_escape_character: Optional[str] = None + quote_fields: Optional[ + SelectObjectContentRequest.QuoteFields + ] = None + record_delimiter: Optional[str] = None + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + element = SubElement(element, "CSV") + if self.field_delimiter is not None: + SubElement(element, "FieldDelimiter", self.field_delimiter) + if self.quote_character is not None: + SubElement(element, "QuoteCharacter", self.quote_character) + if self.quote_escape_character is not None: + SubElement( + element, + "QuoteEscapeCharacter", + self.quote_escape_character, + ) + if self.quote_fields is not None: + SubElement(element, "QuoteFields", str(self.quote_fields)) + if self.record_delimiter is not None: + SubElement( + element, + "RecordDelimiter", + self.record_delimiter, + ) + return element + + @dataclass(frozen=True) + class JSONOutputSerialization(OutputSerialization): + """JSON output serialization.""" + record_delimiter: Optional[str] = None + + def toxml(self, element: Optional[ET.Element]) -> ET.Element: + """Convert to XML.""" + if element is None: + raise ValueError("element must be provided") + element = SubElement(element, "JSON") + if self.record_delimiter is not None: + SubElement( + element, + "RecordDelimiter", + self.record_delimiter, + ) + return element + + class QuoteFields(str, Enum): + """Quotation field type.""" + ALWAYS = "ALWAYS" + ASNEEDED = "ASNEEDED" + + +################################################################################ +########### API response only XML models ########### +################################################################################ + +@dataclass(frozen=True) +class AccessControlPolicy: + """Access control policy of ACL.""" + _AUTHENTICATED_USERS_URL = ( + "http://acs.amazonaws.com/groups/global/AuthenticatedUsers" + ) + _ALL_USERS_URL = "http://acs.amazonaws.com/groups/global/AllUsers" + owner_id: Optional[str] = None + owner_name: Optional[str] = None + access_control_list: Optional[AccessControlList] = None + + @classmethod + def fromxml( + cls: Type[AccessControlPolicy], + element: ET.Element, + ) -> AccessControlPolicy: + """Create new object with values from XML element.""" + elem = find(element, "Owner") + owner_id, owner_name = ( + (None, None) if elem is None + else (findtext(elem, "ID"), findtext(elem, "DisplayName")) + ) + access_control_list = ( + None if find(element, "AccessControlList") is None + else AccessControlPolicy.AccessControlList.fromxml( + cast(ET.Element, find(element, "AccessControlList")), + ) + ) + return cls( + owner_id=owner_id, + owner_name=owner_name, + access_control_list=access_control_list, + ) + + @property + def canned_acl(self) -> str: + """Get canned ACL.""" + if not self.access_control_list: + return "" + if not self.access_control_list.grants: + return "" + grant_len = len(self.access_control_list.grants) + if grant_len < 1 or grant_len > 3: + return "" + for grant in self.access_control_list.grants: + if not grant or not grant.grantee: + continue + if ( + grant.permission == + AccessControlPolicy.Permission.FULL_CONTROL and + len(self.access_control_list.grants) == 1 and + not grant.grantee.uri + ): + return "private" + if ( + grant.permission == AccessControlPolicy.Permission.READ and + len(self.access_control_list.grants) == 2 + ): + if grant.grantee.uri == self._AUTHENTICATED_USERS_URL: + return "authenticated-read" + if grant.grantee.uri == self._ALL_USERS_URL: + return "public-read" + if self.owner_id == grant.grantee.grantee_id: + return "bucket-owner-read" + elif ( + grant.permission == AccessControlPolicy.Permission.WRITE and + len(self.access_control_list.grants) == 3 and + grant.grantee.uri == self._ALL_USERS_URL + ): + return "public-read-write" + return "" + + @property + def grant_acl(self) -> Optional[dict[str, str]]: + """Get grant ACLs.""" + if not self.access_control_list or not self.access_control_list.grants: + return None + mapping = { + AccessControlPolicy.Permission.READ: "X-Amz-Grant-Read", + AccessControlPolicy.Permission.WRITE: "X-Amz-Grant-Write", + AccessControlPolicy.Permission.READ_ACP: "X-Amz-Grant-Read-Acp", + AccessControlPolicy.Permission.WRITE_ACP: "X-Amz-Grant-Write-Acp", + AccessControlPolicy.Permission.FULL_CONTROL: + "X-Amz-Grant-Full-Control", + } + acls: dict[str, str] = {} + for grant in self.access_control_list.grants: + if ( + not grant or + not grant.permission or + not grant.grantee or + not grant.grantee.grantee_id + ): + continue + value = mapping.get(grant.permission) + if value: + acls[value] = "id=" + grant.grantee.grantee_id + return acls + + @dataclass(frozen=True) + class AccessControlList: + """Access control list""" + grants: Optional[List[AccessControlPolicy.Grant]] = None + + @classmethod + def fromxml( + cls: Type[AccessControlPolicy.AccessControlList], + element: ET.Element, + ) -> AccessControlPolicy.AccessControlList: + """Create new object with values from XML element.""" + return cls( + grants=[ + AccessControlPolicy.Grant.fromxml(elem) + for elem in findall(element, "Grant") + ], + ) + + @dataclass(frozen=True) + class Grant: + """Grant.""" + grantee: Optional[AccessControlPolicy.Grantee] = None + permission: Optional[AccessControlPolicy.Permission] = None + + @classmethod + def fromxml( + cls: Type[AccessControlPolicy.Grant], + element: ET.Element, + ) -> AccessControlPolicy.Grant: + """Create new object with values from XML element.""" + permission = findtext(element, "Permission") + return cls( + grantee=( + None if find(element, "Grantee") is None + else AccessControlPolicy.Grantee.fromxml( + cast(ET.Element, find(element, "Grantee")), + ) + ), + permission=( + AccessControlPolicy.Permission(permission) if permission + else None + ), + ) + + @dataclass(frozen=True) + class Grantee: + """Grantee.""" + display_name: Optional[str] = None + email_address: Optional[str] = None + grantee_id: Optional[str] = None + grantee_type: Optional[AccessControlPolicy.GranteeType] = None + uri: Optional[str] = None + + @classmethod + def fromxml( + cls: Type[AccessControlPolicy.Grantee], + element: ET.Element, + ) -> AccessControlPolicy.Grantee: + """Create new object with values from XML element.""" + grantee_type = findtext(element, "Type") + return cls( + display_name=findtext(element, "DisplayName"), + email_address=findtext(element, "EmailAddress"), + grantee_id=findtext(element, "ID"), + grantee_type=( + AccessControlPolicy.GranteeType(grantee_type) + if grantee_type else None + ), + uri=findtext(element, "URI"), + ) + + class GranteeType(str, Enum): + """Grantee type.""" + CanonicalUser = "CanonicalUser" + AmazonCustomerByEmail = "AmazonCustomerByEmail" + Group = "Group" + + class Permission(str, Enum): + """Grant permission.""" + FULL_CONTROL = "FULL_CONTROL" + WRITE = "WRITE" + WRITE_ACP = "WRITE_ACP" + READ = "READ" + READ_ACP = "READ_ACP" + + +@dataclass(frozen=True) +class BasePartsResult(ABC): + """ + Base part information for `ListPartsResult` and + `GetObjectAttributesOutput.ObjectParts`. + """ + is_truncated: bool = False + max_parts: Optional[int] = None + next_part_number_marker: Optional[int] = None + part_number_marker: Optional[int] = None + parts: Optional[List[Part]] = None + + @classmethod + def parsexml( + cls: Type[BasePartsResult], + element: ET.Element, + ) -> Tuple[bool, Optional[int], Optional[int], Optional[int], List[Part]]: + """Create new object with values from XML element.""" + max_parts = findtext(element, "MaxParts") + next_part_number_marker = findtext(element, "NextPartNumberMarker") + part_number_marker = findtext(element, "PartNumberMarker") + return ( + cast( + str, + findtext(element, "IsTruncated", default=""), + ).lower() == "true", + int(max_parts) if max_parts else None, + int(next_part_number_marker) if next_part_number_marker else None, + int(part_number_marker) if part_number_marker else None, + [Part.fromxml(elem) for elem in findall(element, "Part")], + ) + + +@dataclass(frozen=True) +class CompleteMultipartUploadResult(Checksum): + """CompleteMultipartUpload API result.""" + bucket_name: Optional[str] = None + object_name: Optional[str] = None + location: Optional[str] = None + etag: Optional[str] = None + version_id: Optional[str] = None + + @classmethod + def new( + cls: Type[CompleteMultipartUploadResult], + response: HTTPResponse, + ) -> CompleteMultipartUploadResult: + """Create CompleteMultipartUploadResult from response data.""" + element = ET.fromstring(response.data.decode()) + checksum = Checksum.fromxml(element) + return CompleteMultipartUploadResult( + bucket_name=findtext(element, "Bucket"), + object_name=findtext(element, "Key"), + location=findtext(element, "Location"), + etag=cast( + str, findtext(element, "ETag", default="")).replace('"', ""), + version_id=response.headers.get("x-amz-version-id"), + **vars(checksum), + ) + + +@dataclass(frozen=True) +class CopyObjectResult(Checksum): + """CopyObject result.""" + etag: str = "" + last_modified: Optional[datetime] = None + + @classmethod + def fromxml( + cls: Type[CopyObjectResult], + element: ET.Element, + ) -> CopyObjectResult: + """Create new object with values from XML element.""" + etag = cast(str, findtext(element, "ETag", True)).replace('"', "") + value = findtext(element, "LastModified") + last_modified = from_iso8601utc(value) if value else None + checksum = Checksum.fromxml(element) + return cls( + etag=etag, + last_modified=last_modified, + **vars(checksum), + ) + + +CopyPartResult = CopyObjectResult + + +@dataclass(frozen=True) +class DeleteResult: + """Delete object result.""" + objects: list[Deleted] = field(default_factory=list) + errors: list[Error] = field(default_factory=list) + + @classmethod + def fromxml(cls: Type[DeleteResult], element: ET.Element) -> DeleteResult: + """Create new object with values from XML element.""" + elements = findall(element, "Deleted") + objects = [] + for tag in elements: + objects.append(DeleteResult.Deleted.fromxml(tag)) + elements = findall(element, "Error") + errors = [] + for tag in elements: + errors.append(DeleteResult.Error.fromxml(tag)) + return cls(objects=objects, errors=errors) + + @dataclass(frozen=True) + class Deleted: + """Deleted object information.""" + name: str + version_id: Optional[str] + delete_marker: bool + delete_marker_version_id: Optional[str] + + @classmethod + def fromxml( + cls: Type[DeleteResult.Deleted], + element: ET.Element, + ) -> DeleteResult.Deleted: + """Create new object with values from XML element.""" + name = cast(str, findtext(element, "Key", True)) + version_id = findtext(element, "VersionId") + delete_marker = cast(str, findtext( + element, "DeleteMarker", default="")) + delete_marker_version_id = findtext( + element, "DeleteMarkerVersionId", + ) + return cls( + name=name, + version_id=version_id, + delete_marker=delete_marker.lower() == "true", + delete_marker_version_id=delete_marker_version_id, + ) + + @dataclass(frozen=True) + class Error: + """Delete error information.""" + code: str + message: Optional[str] + name: Optional[str] + version_id: Optional[str] + + @classmethod + def fromxml( + cls: Type[DeleteResult.Error], + element: ET.Element, + ) -> DeleteResult.Error: + """Create new object with values from XML element.""" + code = cast(str, findtext(element, "Code", True)) + message = findtext(element, "Message") + name = findtext(element, "Key") + version_id = findtext(element, "VersionId") + return cls( + code=code, + message=message, + name=name, + version_id=version_id, + ) + + +@dataclass(frozen=True) +class GetObjectAttributesOutput: + """Object attributes.""" + etag: Optional[str] = None + checksum: Optional[Checksum] = None + object_parts: Optional[ObjectParts] = None + storage_class: Optional[str] = None + object_size: Optional[int] = None + + @classmethod + def fromxml( + cls: Type[GetObjectAttributesOutput], + element: ET.Element, + ) -> GetObjectAttributesOutput: + """Create new object with values from XML element.""" + object_size = findtext(element, "ObjectSize") + return cls( + etag=findtext(element, "ETag"), + checksum=( + None if find(element, "Checksum") is None + else Checksum.fromxml( + cast(ET.Element, find(element, "Checksum")), + ) + ), + object_parts=( + None if find(element, "ObjectParts") is None + else GetObjectAttributesOutput.ObjectParts.fromxml( + cast(ET.Element, find(element, "ObjectParts")), + ) + ), + storage_class=findtext(element, "StorageClass"), + object_size=int(object_size) if object_size else None, + ) + + @dataclass(frozen=True) + class ObjectParts(BasePartsResult): + """Object parts.""" + parts_count: Optional[int] = None + + @classmethod + def fromxml( + cls: Type[GetObjectAttributesOutput.ObjectParts], + element: ET.Element, + ) -> GetObjectAttributesOutput.ObjectParts: + """Create new object with values from XML element.""" + ( + is_truncated, + max_parts, + next_part_number_marker, + part_number_marker, + parts, + ) = super().parsexml(element) + parts_count = findtext(element, "PartsCount") + return cls( + part_number_marker=part_number_marker, + next_part_number_marker=next_part_number_marker, + max_parts=max_parts, + is_truncated=is_truncated, + parts=parts, + parts_count=int(parts_count) if parts_count else None, + ) + + +@dataclass(frozen=True) +class ListAllMyBucketsResult: + """LissBuckets API result.""" + buckets: list[Bucket] = field(default_factory=list) + prefix: Optional[str] = None + continuation_token: Optional[str] = None + owner_id: Optional[str] = None + owner_name: Optional[str] = None + + @classmethod + def fromxml( + cls: Type[ListAllMyBucketsResult], + element: ET.Element, + ) -> ListAllMyBucketsResult: + """Create new object with values from XML element.""" + prefix = findtext(element, "Prefix") + continuation_token = findtext(element, "ContinuationToken") + owner = find(element, "Owner") + owner_id = None if owner is None else findtext(owner, "ID") + owner_name = None if owner is None else findtext(owner, "DisplayName") + element = cast(ET.Element, find(element, "Buckets", True)) + buckets = [ + ListAllMyBucketsResult.Bucket.fromxml(tag) + for tag in findall(element, "Bucket") + ] + return cls( + buckets=buckets, + prefix=prefix, + continuation_token=continuation_token, + owner_id=owner_id, + owner_name=owner_name, + ) + + @dataclass(frozen=True) + class Bucket: + """Bucket information.""" + name: str + creation_date: Optional[datetime] = None + bucket_region: Optional[str] = None + bucket_arn: Optional[str] = None + + @classmethod + def fromxml( + cls: Type[ListAllMyBucketsResult.Bucket], + element: ET.Element, + ) -> ListAllMyBucketsResult.Bucket: + """Create new object with values from XML element.""" + name = cast(str, findtext(element, "Name", True)) + creation_date = findtext(element, "CreationDate") + return cls( + name=name, + creation_date=from_iso8601utc( + creation_date) if creation_date else None, + bucket_region=findtext(element, "BucketRegion"), + bucket_arn=findtext(element, "BucketArn"), + ) + + +@dataclass(frozen=True) +class InitiateMultipartUploadResult: + """CreateMultipartUpload result.""" + bucket_name: str + object_name: str + upload_id: str + + @classmethod + def fromxml( + cls: Type[InitiateMultipartUploadResult], + element: ET.Element, + ) -> InitiateMultipartUploadResult: + """Create new object with values from XML element.""" + return cls( + bucket_name=cast(str, findtext(element, "Bucket", True)), + object_name=cast(str, findtext(element, "Key", True)), + upload_id=cast(str, findtext(element, "UploadId", True)), + ) + + +@dataclass(frozen=True) +class ListMultipartUploadsResult: + """ListMultipartUploads API result.""" + encoding_type: Optional[str] = None + bucket_name: Optional[str] = None + key_marker: Optional[str] = None + upload_id_marker: Optional[str] = None + next_key_marker: Optional[str] = None + next_upload_id_marker: Optional[str] = None + max_uploads: Optional[int] = None + is_truncated: bool = False + uploads: list[Upload] = field(default_factory=list) + + @classmethod + def fromxml( + cls: Type[ListMultipartUploadsResult], + element: ET.Element, + ) -> ListMultipartUploadsResult: + """Create new object with values from XML element.""" + encoding_type = findtext(element, "EncodingType") + key_marker = findtext(element, "KeyMarker") + if key_marker is not None and encoding_type == "url": + key_marker = unquote_plus(key_marker) + next_key_marker = findtext(element, "NextKeyMarker") + if next_key_marker is not None and encoding_type == "url": + next_key_marker = unquote_plus(next_key_marker) + max_uploads = findtext(element, "MaxUploads") + + return ListMultipartUploadsResult( + encoding_type=encoding_type, + bucket_name=findtext(element, "Bucket"), + key_marker=key_marker, + upload_id_marker=findtext(element, "UploadIdMarker"), + next_key_marker=next_key_marker, + next_upload_id_marker=findtext(element, "NextUploadIdMarker"), + max_uploads=int(max_uploads) if max_uploads else None, + is_truncated=cast( + str, + findtext(element, "IsTruncated", default=""), + ).lower() == "true", + uploads=[ + ListMultipartUploadsResult.Upload(tag, encoding_type) + for tag in findall(element, "Upload") + ], + ) + + @dataclass(frozen=True) + class Upload: + """ Upload information of a multipart upload.""" + object_name: str + encoding_type: Optional[str] = None + upload_id: Optional[str] = None + initiator_id: Optional[str] = None + initiator_name: Optional[str] = None + owner_id: Optional[str] = None + owner_name: Optional[str] = None + storage_class: Optional[str] = None + initiated_time: Optional[datetime] = None + checksum_algorithm: Optional[str] = None + checksum_type: Optional[str] = None + + def __init__( + self, element: ET.Element, encoding_type: Optional[str] = None, + ): + object_name = cast(str, findtext(element, "Key", True)) + object.__setattr__( + self, + "object_name", + unquote_plus(object_name) if encoding_type == "url" + else object_name, + ) + object.__setattr__(self, "encoding_type", encoding_type) + object.__setattr__(self, "upload_id", + findtext(element, "UploadId")) + tag = find(element, "Initiator") + object.__setattr__( + self, + "initiator_id", + None if tag is None else findtext(tag, "ID"), + ) + object.__setattr__( + self, + "initiator_name", + None if tag is None else findtext(tag, "DisplayName"), + ) + tag = find(element, "Owner") + object.__setattr__( + self, + "owner_id", + None if tag is None else findtext(tag, "ID"), + ) + object.__setattr__( + self, + "owner_name", + None if tag is None else findtext(tag, "DisplayName"), + ) + object.__setattr__( + self, + "storage_class", + findtext(element, "StorageClass"), + ) + initiated_time = findtext(element, "Initiated") + object.__setattr__( + self, + "initiated_time", + from_iso8601utc(initiated_time) if initiated_time else None, + ) + object.__setattr__( + self, + "checksum_algorithm", + findtext(element, "ChecksumAlgorithm"), + ) + object.__setattr__( + self, + "checksum_type", + findtext(element, "ChecksumType"), + ) + + +@dataclass(frozen=True) +class ListPartsResult(BasePartsResult): + """ListParts API result.""" + bucket_name: Optional[str] = None + object_name: Optional[str] = None + initiator_id: Optional[str] = None + initiator_name: Optional[str] = None + owner_id: Optional[str] = None + owner_name: Optional[str] = None + storage_class: Optional[str] = None + + @classmethod + def fromxml( + cls: Type[ListPartsResult], + element: ET.Element, + ) -> ListPartsResult: + """Create new object with values from XML element.""" + ( + is_truncated, + max_parts, + next_part_number_marker, + part_number_marker, + parts, + ) = super().parsexml(element) + tag = find(element, "Initiator") + initiator_id = None if tag is None else findtext(tag, "ID") + initiator_name = None if tag is None else findtext(tag, "DisplayName") + tag = find(element, "Owner") + owner_id = None if tag is None else findtext(tag, "ID") + owner_name = None if tag is None else findtext(tag, "DisplayName") + return cls( + bucket_name=findtext(element, "Bucket"), + object_name=findtext(element, "Key"), + initiator_id=initiator_id, + initiator_name=initiator_name, + owner_id=owner_id, + owner_name=owner_name, + storage_class=findtext(element, "StorageClass"), + part_number_marker=part_number_marker, + next_part_number_marker=next_part_number_marker, + max_parts=max_parts, + is_truncated=is_truncated, + parts=parts, + ) + + +################################################################################ +########### API responses ########### +################################################################################ + + +@dataclass(frozen=True) +class GenericResponse: + """ Generic response of any APIs.""" + headers: HTTPHeaderDict + bucket_name: Optional[str] = None + region: Optional[str] = None + object_name: Optional[str] = None + + def __init__( + self, + *, + headers: HTTPHeaderDict, + bucket_name: Optional[str] = None, + region: Optional[str] = None, + object_name: Optional[str] = None, + ): + object.__setattr__(self, "headers", headers) + object.__setattr__(self, "bucket_name", bucket_name) + object.__setattr__(self, "region", region) + object.__setattr__(self, "object_name", object_name) + + +@dataclass(frozen=True) +class AbortMultipartUploadResponse(GenericResponse): + """ Response of AbortMultipartUpload API.""" + upload_id: str = "" + + def __init__( + self, + *, + headers: HTTPHeaderDict, + bucket_name: str, + region: str, + object_name: str, + upload_id: str, + ): + super().__init__( + headers=headers, + bucket_name=bucket_name, + region=region, + object_name=object_name, + ) + object.__setattr__(self, "upload_id", upload_id) + + +@dataclass(frozen=True) +class CreateMultipartUploadResponse(GenericResponse): + """ Response of CreateMultipartUpload API.""" + result: InitiateMultipartUploadResult = InitiateMultipartUploadResult( + "", "", "", + ) + + def __init__( + self, + *, + response: HTTPResponse, + bucket_name: str, + region: str, + object_name: str, + ): + super().__init__( + headers=response.headers, + bucket_name=bucket_name, + region=region, + object_name=object_name, + ) + object.__setattr__( + self, + "result", + unmarshal(InitiateMultipartUploadResult, response.data.decode()), + ) + + +@dataclass(frozen=True) +class DeleteObjectsResponse(GenericResponse): + """ Response of DeleteObjects API.""" + result: DeleteResult = DeleteResult() + + def __init__( + self, + *, + response: HTTPResponse, + bucket_name: str, + region: str, + ): + super().__init__( + headers=response.headers, + bucket_name=bucket_name, + region=region, + ) + object.__setattr__( + self, + "result", + unmarshal(DeleteResult, response.data.decode()), + ) + + +class EventIterable: + """Context manager friendly event iterable.""" + + def __init__(self, func): + self._func = func + self._response = None + + def _close_response(self): + """Close response.""" + if self._response: + self._response.close() + self._response.release_conn() + self._response = None + + def __iter__(self): + return self + + def _get_records(self): + """Get event records from response stream.""" + try: + line = self._response.readline().strip() + if not line: + return None + if hasattr(line, 'decode'): + line = line.decode() + event = json.loads(line) + if event['Records']: + return event + except (StopIteration, JSONDecodeError): + self._close_response() + return None + + def __next__(self): + records = None + while not records: + if not self._response or self._response.closed: + self._response = self._func() + records = self._get_records() + return records + + def __enter__(self): + return self + + def __exit__(self, exc_type, value, traceback): + self._close_response() + + +@dataclass(frozen=True) +class GenericUploadResponse(GenericResponse): + """Common response of any object upload API.""" + etag: str = "" + last_modified: Optional[datetime] = None + checksum_crc32: Optional[str] = None + checksum_crc32c: Optional[str] = None + checksum_crc64nvme: Optional[str] = None + checksum_sha1: Optional[str] = None + checksum_sha256: Optional[str] = None + checksum_type: Optional[str] = None + + def __init__( # pylint: disable=too-many-positional-arguments + self, + *, + headers: HTTPHeaderDict, + bucket_name: str, + region: str, + object_name: str, + etag: Optional[str] = None, + result: Union[ + CopyObjectResult, CompleteMultipartUploadResult, None] = None, + ): + super().__init__( + headers=headers, + bucket_name=bucket_name, + region=region, + object_name=object_name, + ) + object.__setattr__( + self, + "etag", + etag or headers.get("etag", "").replace('"', ""), + ) + if isinstance(result, CopyObjectResult): + object.__setattr__(self, "last_modified", result.last_modified) + object.__setattr__( + self, + "checksum_crc32", + ( + result.checksum_crc32 if result else + headers.get("x-amz-checksum-crc32") + ), + ) + object.__setattr__( + self, + "checksum_crc32c", + ( + result.checksum_crc32c if result else + headers.get("x-amz-checksum-crc32c") + ), + ) + object.__setattr__( + self, + "checksum_crc64nvme", + ( + result.checksum_crc64nvme if result else + headers.get("x-amz-checksum-crc64nvme") + ), + ) + object.__setattr__( + self, + "checksum_sha1", + ( + result.checksum_sha1 if result else + headers.get("x-amz-checksum-sha1") + ), + ) + object.__setattr__( + self, + "checksum_sha256", + ( + result.checksum_sha256 if result else + headers.get("x-amz-checksum-sha256") + ), + ) + object.__setattr__( + self, + "checksum_type", + ( + result.checksum_type if result else + headers.get("x-amz-checksum-type") + ), + ) + + +@dataclass(frozen=True) +class GetObjectAclResponse(GenericResponse): + """ Response of GetObjectACL API.""" + policy: AccessControlPolicy = AccessControlPolicy() + version_id: Optional[str] = None + + def __init__( + self, + *, + response: HTTPResponse, + bucket_name: str, + region: str, + object_name: str, + version_id: Optional[str] = None, + ): + super().__init__( + headers=response.headers, + bucket_name=bucket_name, + region=region, + object_name=object_name, + ) + object.__setattr__( + self, + "result", + unmarshal(AccessControlPolicy, response.data.decode()), + ) + object.__setattr__(self, "version_id", version_id) + + +@dataclass(frozen=True) +class GetObjectAttributesResponse(GenericResponse): + """ Response of GetObjectAttributes API.""" + result: GetObjectAttributesOutput = GetObjectAttributesOutput() + delete_marker: bool = False + last_modified: Optional[datetime] = None + version_id: Optional[str] = None + + def __init__( + self, + *, + response: HTTPResponse, + bucket_name: str, + region: str, + object_name: str, + ): + super().__init__( + headers=response.headers, + bucket_name=bucket_name, + region=region, + object_name=object_name, + ) + object.__setattr__( + self, + "result", + unmarshal(GetObjectAttributesOutput, response.data.decode()), + ) + object.__setattr__( + self, + "delete_marker", + response.headers.get("x-amz-delete-marker"), + ) + last_modified = response.headers.get("Last-Modified") + if last_modified: + object.__setattr__( + self, + "last_modified", + from_http_header(last_modified), + ) + object.__setattr__( + self, + "version_id", + response.headers.get("x-amz-version-id"), + ) + + +@dataclass(frozen=True) +class HeadBucketResponse(GenericResponse): + """ Response of HeadBucket API.""" + bucket_arn: Optional[str] = None + location_type: Optional[str] = None + location_name: Optional[str] = None + access_point_alias: Optional[str] = None + + def __init__( + self, + *, + headers: HTTPHeaderDict, + bucket_name: str, + region: str, + ): + super().__init__( + headers=headers, + bucket_name=bucket_name, + region=region, + ) + object.__setattr__( + self, + "bucket_arn", + headers.get("x-amz-bucket-arn"), + ) + object.__setattr__( + self, + "location_type", + headers.get("x-amz-bucket-location-type"), + ) + object.__setattr__( + self, + "location_name", + headers.get("x-amz-bucket-location-name"), + ) + object.__setattr__( + self, + "access_point_alias", + headers.get("x-amz-access-point-alias"), + ) + + +@dataclass(frozen=True) +class HeadObjectResponse(GenericResponse): + """ Response of HeadObject API.""" + etag: str = "" + size: int = 0 + delete_marker: bool = False + last_modified: Optional[datetime] = None + lock_mode: Optional[str] = None + lock_retain_until_date: Optional[datetime] = None + lock_legal_hold: bool = False + checksums: Optional[dict[Algorithm, str]] = None + checksum_type: Optional[ChecksumType] = None + user_metadata: Optional[HTTPHeaderDict] = None + + def __init__( + self, + *, + headers: HTTPHeaderDict, + bucket_name: str, + region: str, + object_name: str, + ): + super().__init__( + headers=headers, + bucket_name=bucket_name, + region=region, + object_name=object_name, + ) + object.__setattr__( + self, + "etag", + headers.get("etag", "").replace('"', ""), + ) + object.__setattr__( + self, + "size", + int(headers.get("content-length", "0")), + ) + object.__setattr__( + self, + "delete_marker", + headers.get("x-amz-delete-marker", "").lower() == "true", + ) + value = headers.get("last-modified") + object.__setattr__( + self, + "last_modified", + from_http_header(value) if value is not None else None, + ) + object.__setattr__( + self, + "lock_mode", + headers.get("x-amz-object-lock-mode", None), + ) + value = headers.get("x-amz-object-lock-retain-until-date") + object.__setattr__( + self, + "lock_retain_until_date", + from_iso8601utc(value) if value is not None else None, + ) + object.__setattr__( + self, + "lock_legal_hold", + headers.get("x-amz-object-lock-legal-hold", "") == "ON", + ) + mapping = { + "x-amz-checksum-crc32": Algorithm.CRC32, + "x-amz-checksum-crc32c": Algorithm.CRC32C, + "x-amz-checksum-crc64nvme": Algorithm.CRC64NVME, + "x-amz-checksum-sha1": Algorithm.SHA1, + "x-amz-checksum-sha256": Algorithm.SHA256, + } + checksums = {} + for name, algo in mapping.items(): + checksum = self.headers.get(name) + if checksum: + checksums[algo] = checksum + object.__setattr__(self, "checksums", checksums) + value = headers.get("x-amz-checksum-type") + if value: + object.__setattr__(self, "checksum_type", ChecksumType(value)) + user_metadata = HTTPHeaderDict() + for name, value in headers.items(): + lower_name = name.lower() + if lower_name.startswith("x-amz-meta-"): + key = lower_name[len("x-amz-meta-"):] + user_metadata[key] = value + object.__setattr__(self, "user_metadata", user_metadata) + + @property + def version_id(self) -> Optional[str]: + """Get version ID.""" + return self.headers.get("x-amz-version-id") + + @property + def content_type(self) -> Optional[str]: + """Get content-type.""" + return self.headers.get("content-type") + + +@dataclass(frozen=True) +class ListBucketsResponse(GenericResponse): + """ Response of ListBuckets API.""" + result: ListAllMyBucketsResult = ListAllMyBucketsResult() + + def __init__(self, *, response: HTTPResponse, region: Optional[str]): + super().__init__(headers=response.headers, region=region) + object.__setattr__( + self, + "result", + unmarshal(ListAllMyBucketsResult, response.data.decode()), + ) + + +@dataclass(frozen=True) +class ListMultipartUploadsResponse(GenericResponse): + """ Response of ListMultipartUploads API.""" + result: ListMultipartUploadsResult = ListMultipartUploadsResult() + + def __init__( + self, + *, + response: HTTPResponse, + bucket_name: str, + region: str, + ): + super().__init__( + headers=response.headers, + bucket_name=bucket_name, + region=region, + ) + object.__setattr__( + self, + "result", + unmarshal(ListMultipartUploadsResult, response.data.decode()), + ) + + +@dataclass(frozen=True) +class ListPartsResponse(GenericResponse): + """ Response of ListParts API.""" + result: ListPartsResult = ListPartsResult() + + def __init__( + self, + *, + response: HTTPResponse, + bucket_name: str, + region: str, + object_name: str, + ): + super().__init__( + headers=response.headers, + bucket_name=bucket_name, + region=region, + object_name=object_name, + ) + object.__setattr__( + self, + "result", + unmarshal(ListPartsResult, response.data.decode()), + ) + + +@dataclass(frozen=True) +class ObjectWriteResponse(GenericUploadResponse): + """Response of any APIs doing object creation.""" + version_id: Optional[str] = None + + def __init__( # pylint: disable=too-many-positional-arguments + self, + *, + headers: HTTPHeaderDict, + bucket_name: str, + region: str, + object_name: str, + etag: Optional[str] = None, + version_id: Optional[str] = None, + result: Union[ + CopyObjectResult, CompleteMultipartUploadResult, None] = None, + ): + super().__init__( + headers=headers, + bucket_name=bucket_name, + region=region, + object_name=object_name, + etag=etag, + result=result, + ) + object.__setattr__( + self, + "version_id", + version_id or headers.get("x-amz-version-id"), + ) + + +@dataclass(frozen=True) +class PutObjectFanOutResponse(GenericResponse): + """ Response of PutObjectFanOut API.""" + results: List[Result] = field(default_factory=list) + + def __init__( + self, + *, + response: HTTPResponse, + bucket_name: str, + region: str, + ): + super().__init__( + headers=response.headers, + bucket_name=bucket_name, + region=region, + ) + + def to_result(result): + """Create new result.""" + return PutObjectFanOutResponse.Result( + key=result["key"], + etag=result["etag"], + version_id=result.get("versionId"), + last_modified=( + to_iso8601utc(result.get("lastModified")) + if result.get("lastModified") else None + ), + error=result.get("error"), + ) + object.__setattr__( + self, + "results", + [ + to_result(HTTPHeaderDict(json.loads(line))) + for line in response if line + ], + ) + + @dataclass(frozen=True) + class Result: + """PutObjectFanOut result.""" + key: str + etag: str + version_id: Optional[str] = None + last_modified: Optional[datetime] = None + error: Optional[str] = None + + +class GetObjectResponse(GenericResponse, BufferedIOBase): + """GetObject response, file-like and BufferedIOBase-compatible.""" + version_id: Optional[str] = None + + def __init__( # pylint: disable=too-many-positional-arguments + self, + *, + response: HTTPResponse, + bucket_name: str, + region: str, + object_name: str, + version_id: Optional[str] = None, + ): + super().__init__( + headers=response.headers, + bucket_name=bucket_name, + region=region, + object_name=object_name, + ) + self._response = response + object.__setattr__(self, "version_id", version_id) + + def __enter__(self) -> GetObjectResponse: + return self + + def __exit__(self, exc_type, exc_value, exc_traceback) -> None: + self.close() + + def readable(self) -> bool: # type: ignore[override] + """Return True: this stream is readable.""" + return True + + def writable(self) -> bool: # type: ignore[override] + """Return False: this stream is not writable.""" + return False + + def seekable(self) -> bool: # type: ignore[override] + """S3 object stream is not seekable by default.""" + return False + + def close(self) -> None: # type: ignore[override] + """Close response and release network resources.""" + try: + # Close the underlying response + self._response.close() + self._response.release_conn() + finally: + # Mark this BufferedIOBase as closed + super().close() + + def read(self, size: int = -1) -> bytes: # type: ignore[override] + """ + Read up to `size` bytes from the stream. + + If size is -1 (default), read until EOF. + """ + return self._response.read(size) + + def readinto(self, b) -> int: # type: ignore[override] + """ + Read bytes into a pre-allocated, writable bytes-like object `b`. + + Returns the number of bytes read (0 on EOF). + """ + # Determine how many bytes to request + length = len(b) + data = self._response.read(length) + n = len(data) + b[:n] = data + return n + + def stream(self, num_bytes: int = 32 * 1024): + """ + Stream data in chunks of `num_bytes`. + + This is a convenience wrapper over the underlying HTTPResponse.stream(). + """ + yield from self._response.stream(num_bytes) + + +class PromptObjectResponse(GenericResponse, BufferedIOBase): + """PromptObject response, file-like and BufferedIOBase-compatible.""" + + def __init__( # pylint: disable=too-many-positional-arguments + self, + *, + response: HTTPResponse, + bucket_name: str, + region: str, + object_name: str, + ): + super().__init__( + headers=response.headers, + bucket_name=bucket_name, + region=region, + object_name=object_name, + ) + self._response = response + + def __enter__(self) -> PromptObjectResponse: + return self + + def __exit__(self, exc_type, exc_value, exc_traceback) -> None: + self.close() + + def readable(self) -> bool: # type: ignore[override] + """Return True: this stream is readable.""" + return True + + def writable(self) -> bool: # type: ignore[override] + """Return False: this stream is not writable.""" + return False + + def seekable(self) -> bool: # type: ignore[override] + """S3 object stream is not seekable by default.""" + return False + + def close(self) -> None: # type: ignore[override] + """Close response and release network resources.""" + try: + # Close the underlying response + self._response.close() + self._response.release_conn() + finally: + # Mark this BufferedIOBase as closed + super().close() + + def read(self, size: int = -1) -> bytes: # type: ignore[override] + """ + Read up to `size` bytes from the stream. + + If size is -1 (default), read until EOF. + """ + return self._response.read(size) + + def readinto(self, b) -> int: # type: ignore[override] + """ + Read bytes into a pre-allocated, writable bytes-like object `b`. + + Returns the number of bytes read (0 on EOF). + """ + # Determine how many bytes to request + length = len(b) + data = self._response.read(length) + n = len(data) + b[:n] = data + return n + + def stream(self, num_bytes: int = 32 * 1024): + """ + Stream data in chunks of `num_bytes`. + + This is a convenience wrapper over the underlying HTTPResponse.stream(). + """ + yield from self._response.stream(num_bytes) + + +class SelectObjectResponse(GenericResponse): + """ + BufferedIOBase compatible reader represents response data of + Minio.select_object_content() API. + """ + + def __init__( # pylint: disable=too-many-positional-arguments + self, + *, + response: HTTPResponse, + bucket_name: str, + region: str, + object_name: str): + super().__init__( + headers=response.headers, + bucket_name=bucket_name, + region=region, + object_name=object_name, + ) + self._response = response + self._stats = None + self._payload = None + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, exc_traceback): + return self.close() + + def readable(self): # pylint: disable=no-self-use + """Return this is readable.""" + return True + + def writeable(self): # pylint: disable=no-self-use + """Return this is not writeable.""" + return False + + def close(self): + """Close response and release network resources.""" + self._response.close() + self._response.release_conn() + + def stats(self): + """Get stats information.""" + return self._stats + + def _read(self): + """Read and decode response.""" + if self._response.isclosed(): + return 0 + + prelude = self._read_fully(self._response, 8) + prelude_crc = self._read_fully(self._response, 4) + if self._crc32(prelude) != self._int(prelude_crc): + raise IOError( + f"prelude CRC mismatch; expected: {self._crc32(prelude)}, " + f"got: {self._int(prelude_crc)}" + ) + + total_length = self._int(prelude[:4]) + data = self._read_fully(self._response, total_length - 8 - 4 - 4) + message_crc = self._int(self._read_fully(self._response, 4)) + if self._crc32(prelude + prelude_crc + data) != message_crc: + raise IOError( + f"message CRC mismatch; " + f"expected: {self._crc32(prelude + prelude_crc + data)}, " + f"got: {message_crc}" + ) + + header_length = SelectObjectResponse._int(prelude[4:]) + headers = SelectObjectResponse._decode_header(data[:header_length]) + + if headers.get(":message-type") == "error": + raise MinioException( + f"{headers.get(':error-code')}: " + f"{headers.get(':error-message')}" + ) + + if headers.get(":event-type") == "End": + return 0 + + payload_length = total_length - header_length - 16 + if headers.get(":event-type") == "Cont" or payload_length < 1: + return self._read() + + payload = data[header_length:header_length+payload_length] + + if headers.get(":event-type") in ["Progress", "Stats"]: + self._stats = SelectObjectResponse.Stats(payload) + return self._read() + + if headers.get(":event-type") == "Records": + self._payload = payload + return len(payload) + + raise MinioException( + f"unknown event-type {headers.get(':event-type')}", + ) + + def stream(self, num_bytes=32*1024): + """ + Stream extracted payload from response data. Upon completion, caller + should call self.close() to release network resources. + """ + while self._read() > 0: + while self._payload: + result = self._payload + if num_bytes < len(self._payload): + result = self._payload[:num_bytes] + self._payload = self._payload[len(result):] + yield result + + @staticmethod + def _read_fully(reader, size): + """Wrapper to RawIOBase.read() to error out on short reads.""" + data = reader.read(size) + if len(data) != size: + raise IOError("insufficient data") + return data + + @staticmethod + def _int(data): + """Convert byte data to big-endian int.""" + return int.from_bytes(data, byteorder="big") + + @staticmethod + def _crc32(data): + """Wrapper to binascii.crc32().""" + return crc32(data) & 0xffffffff + + @staticmethod + def _decode_header(data): + """Decode header data.""" + reader = BytesIO(data) + headers = {} + while True: + length = reader.read(1) + if not length: + break + name = SelectObjectResponse._read_fully( + reader, + SelectObjectResponse._int(length), + ) + if SelectObjectResponse._int( + SelectObjectResponse._read_fully(reader, 1), + ) != 7: + raise IOError("header value type is not 7") + value = SelectObjectResponse._read_fully( + reader, + SelectObjectResponse._int( + SelectObjectResponse._read_fully(reader, 2), + ), + ) + headers[name.decode()] = value.decode() + return headers + + @dataclass(frozen=True) + class Stats: + """Progress/Stats information.""" + bytes_scanned: Optional[str] = None + bytes_processed: Optional[str] = None + bytes_returned: Optional[str] = None + + def __init__(self, data): + element = ET.fromstring(data.decode()) + object.__setattr__( + self, + "bytes_scanned", + findtext(element, "BytesScanned"), + ) + object.__setattr__( + self, + "bytes_processed", + findtext(element, "BytesProcessed"), + ) + object.__setattr__( + self, + "bytes_returned", + findtext(element, "BytesReturned"), + ) + + +StatObjectResponse = HeadObjectResponse + + +@dataclass(frozen=True) +class UploadPartCopyResponse(GenericResponse): + """ Response of UploadPartCopy API.""" + upload_id: str = "" + part_number: int = 0 + result: CopyPartResult = CopyPartResult() + + def __init__( # pylint: disable=too-many-positional-arguments + self, + *, + response: HTTPResponse, + bucket_name: str, + region: str, + object_name: str, + upload_id: str, + part_number: int, + ): + super().__init__( + headers=response.headers, + bucket_name=bucket_name, + region=region, + object_name=object_name, + ) + object.__setattr__(self, "upload_id", upload_id) + object.__setattr__(self, "part_number", part_number) + object.__setattr__( + self, + "result", + unmarshal(CopyPartResult, response.data.decode()), + ) + + @property + def part(self) -> Part: + """Get part information.""" + return Part.new(self.result, self.part_number) + + +@dataclass(frozen=True) +class UploadPartResponse(GenericResponse): + """ Response of UploadPart API.""" + upload_id: str = "" + part: Part = Part() + + def __init__( # pylint: disable=too-many-positional-arguments + self, + *, + response: ObjectWriteResponse, + upload_id: str, + part_number: int, + ): + super().__init__( + headers=response.headers, + bucket_name=response.bucket_name, + region=response.region, + object_name=response.object_name, + ) + part = Part( + part_number=part_number, + etag=response.etag, + checksum_crc32=response.checksum_crc32, + checksum_crc32c=response.checksum_crc32c, + checksum_crc64nvme=response.checksum_crc64nvme, + checksum_sha1=response.checksum_sha1, + checksum_sha256=response.checksum_sha256, + ) + object.__setattr__(self, "upload_id", upload_id) + object.__setattr__(self, "part", part) diff --git a/minio/notificationconfig.py b/minio/notificationconfig.py deleted file mode 100644 index bdf6ee1d..00000000 --- a/minio/notificationconfig.py +++ /dev/null @@ -1,283 +0,0 @@ -# -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) -# 2020 MinIO, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Request/response of PutBucketNotificationConfiguration and -GetBucketNotiicationConfiguration APIs. -""" - -from __future__ import absolute_import, annotations - -from abc import ABC -from dataclasses import dataclass, field -from typing import Optional, Type, TypeVar, cast -from xml.etree import ElementTree as ET - -from .xml import Element, SubElement, find, findall, findtext - -A = TypeVar("A", bound="FilterRule") - - -@dataclass(frozen=True) -class FilterRule(ABC): - """Filter rule.""" - - name: str - value: str - - @classmethod - def fromxml(cls: Type[A], element: ET.Element) -> A: - """Create new object with values from XML element.""" - name = cast(str, findtext(element, "Name", True)) - value = cast(str, findtext(element, "Value", True)) - return cls(name, value) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - if element is None: - raise ValueError("element must be provided") - element = SubElement(element, "FilterRule") - SubElement(element, "Name", self.name) - SubElement(element, "Value", self.value) - return element - - -@dataclass(frozen=True) -class PrefixFilterRule(FilterRule): - """Prefix filter rule.""" - - def __init__(self, value: str): - super().__init__(name="prefix", value=value) - - -@dataclass(frozen=True) -class SuffixFilterRule(FilterRule): - """Suffix filter rule.""" - - def __init__(self, value: str): - super().__init__(name="suffix", value=value) - - -@dataclass(frozen=True) -class CommonConfig(ABC): - """Common for cloud-function/queue/topic configuration.""" - - events: list[str] - config_id: Optional[str] = None - prefix_filter_rule: Optional[PrefixFilterRule] = None - suffix_filter_rule: Optional[SuffixFilterRule] = None - - def __post_init__(self): - if not self.events: - raise ValueError("events must be provided") - - @staticmethod - def parsexml( - element: ET.Element, - ) -> tuple[ - list[str], - Optional[str], - Optional[PrefixFilterRule], - Optional[SuffixFilterRule], - ]: - """Parse XML.""" - elements = findall(element, "Event") - events = [] - for tag in elements: - if tag.text is None: - raise ValueError("missing value in XML tag 'Event'") - events.append(tag.text) - config_id = findtext(element, "Id") - elem = find(element, "Filter") - if elem is None: - return events, config_id, None, None - prefix_filter_rule = None - suffix_filter_rule = None - elem = cast(ET.Element, find(elem, "S3Key", True)) - elements = findall(elem, "FilterRule") - for tag in elements: - filter_rule = FilterRule.fromxml(tag) - if filter_rule.name == "prefix": - prefix_filter_rule = PrefixFilterRule(filter_rule.value) - else: - suffix_filter_rule = SuffixFilterRule(filter_rule.value) - return events, config_id, prefix_filter_rule, suffix_filter_rule - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - if element is None: - raise ValueError("element must be provided") - for event in self.events: - SubElement(element, "Event", event) - if self.config_id is not None: - SubElement(element, "Id", self.config_id) - if self.prefix_filter_rule or self.suffix_filter_rule: - rule = SubElement(element, "Filter") - rule = SubElement(rule, "S3Key") - if self.prefix_filter_rule: - self.prefix_filter_rule.toxml(rule) - if self.suffix_filter_rule: - self.suffix_filter_rule.toxml(rule) - return element - - -B = TypeVar("B", bound="CloudFuncConfig") - - -@dataclass(frozen=True) -class CloudFuncConfig(CommonConfig): - """Cloud function configuration.""" - cloud_func: Optional[str] = None - - def __post_init__(self): - if not self.cloud_func: - raise ValueError("cloud function must be provided") - - @classmethod - def fromxml(cls: Type[B], element: ET.Element) -> B: - """Create new object with values from XML element.""" - cloud_func = cast(str, findtext(element, "CloudFunction", True)) - (events, config_id, prefix_filter_rule, - suffix_filter_rule) = cls.parsexml(element) - return cls( - cloud_func=cloud_func, - events=events, - config_id=config_id, - prefix_filter_rule=prefix_filter_rule, - suffix_filter_rule=suffix_filter_rule, - ) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - if element is None: - raise ValueError("element must be provided") - element = SubElement(element, "CloudFunctionConfiguration") - SubElement(element, "CloudFunction", self.cloud_func) - super().toxml(element) - return element - - -C = TypeVar("C", bound="QueueConfig") - - -@dataclass(frozen=True) -class QueueConfig(CommonConfig): - """Queue configuration.""" - queue: Optional[str] = None - - def __post_init__(self): - if not self.queue: - raise ValueError("queue must be provided") - - @classmethod - def fromxml(cls: Type[C], element: ET.Element) -> C: - """Create new object with values from XML element.""" - queue = cast(str, findtext(element, "Queue", True)) - (events, config_id, prefix_filter_rule, - suffix_filter_rule) = cls.parsexml(element) - return cls( - queue=queue, - events=events, - config_id=config_id, - prefix_filter_rule=prefix_filter_rule, - suffix_filter_rule=suffix_filter_rule, - ) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - if element is None: - raise ValueError("element must be provided") - element = SubElement(element, "QueueConfiguration") - SubElement(element, "Queue", self.queue) - super().toxml(element) - return element - - -D = TypeVar("D", bound="TopicConfig") - - -@dataclass(frozen=True) -class TopicConfig(CommonConfig): - """Get topic configuration.""" - topic: Optional[str] = None - - def __post_init__(self): - if not self.topic: - raise ValueError("topic must be provided") - - @classmethod - def fromxml(cls: Type[D], element: ET.Element) -> D: - """Create new object with values from XML element.""" - topic = cast(str, findtext(element, "Topic", True)) - (events, config_id, prefix_filter_rule, - suffix_filter_rule) = cls.parsexml(element) - return cls( - topic=topic, - events=events, - config_id=config_id, - prefix_filter_rule=prefix_filter_rule, - suffix_filter_rule=suffix_filter_rule, - ) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - if element is None: - raise ValueError("element must be provided") - element = SubElement(element, "TopicConfiguration") - SubElement(element, "Topic", self.topic) - super().toxml(element) - return element - - -E = TypeVar("E", bound="NotificationConfig") - - -@dataclass(frozen=True) -class NotificationConfig: - """Notification configuration.""" - cloud_func_config_list: list[CloudFuncConfig] = field(default_factory=list) - queue_config_list: list[QueueConfig] = field(default_factory=list) - topic_config_list: list[TopicConfig] = field(default_factory=list) - - @classmethod - def fromxml(cls: Type[E], element: ET.Element) -> E: - """Create new object with values from XML element.""" - elements = findall(element, "CloudFunctionConfiguration") - cloud_func_config_list = [] - for tag in elements: - cloud_func_config_list.append(CloudFuncConfig.fromxml(tag)) - elements = findall(element, "QueueConfiguration") - queue_config_list = [] - for tag in elements: - queue_config_list.append(QueueConfig.fromxml(tag)) - elements = findall(element, "TopicConfiguration") - topic_config_list = [] - for tag in elements: - topic_config_list.append(TopicConfig.fromxml(tag)) - return cls( - cloud_func_config_list, queue_config_list, topic_config_list, - ) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - element = Element("NotificationConfiguration") - for cloud_func_config in self.cloud_func_config_list: - cloud_func_config.toxml(element) - for queue_config in self.queue_config_list: - queue_config.toxml(element) - for config in self.topic_config_list: - config.toxml(element) - return element diff --git a/minio/objectlockconfig.py b/minio/objectlockconfig.py deleted file mode 100644 index 3da29bc9..00000000 --- a/minio/objectlockconfig.py +++ /dev/null @@ -1,94 +0,0 @@ -# -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) -# 2020 MinIO, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Request/response of PutObjectLockConfiguration and GetObjectLockConfiguration -APIs. -""" - -from __future__ import absolute_import, annotations - -from dataclasses import dataclass -from typing import Optional, Type, TypeVar, cast -from xml.etree import ElementTree as ET - -from .commonconfig import COMPLIANCE, ENABLED, GOVERNANCE -from .xml import Element, SubElement, find, findtext - -DAYS = "Days" -YEARS = "Years" - -A = TypeVar("A", bound="ObjectLockConfig") - - -@dataclass(frozen=True) -class ObjectLockConfig: - """Object lock configuration.""" - - mode: Optional[str] - duration: Optional[int] - duration_unit: Optional[str] - - def __post_init__(self): - if (self.mode is not None) ^ (self.duration is not None): - if self.mode is None: - raise ValueError("mode must be provided") - raise ValueError("duration must be provided") - if self.mode is not None and self.mode not in [GOVERNANCE, COMPLIANCE]: - raise ValueError(f"mode must be {GOVERNANCE} or {COMPLIANCE}") - if ( - self.duration is not None and - self.duration_unit not in [DAYS, YEARS] - ): - raise ValueError(f"duration unit must be {DAYS} or {YEARS}") - if self.duration_unit: - object.__setattr__( - self, "duration_unit", self.duration_unit.title(), - ) - - @classmethod - def fromxml(cls: Type[A], element: ET.Element) -> A: - """Create new object with values from XML element.""" - elem = find(element, "Rule") - if elem is None: - return cls(None, None, None) - elem = cast(ET.Element, find(elem, "DefaultRetention", True)) - mode = findtext(elem, "Mode") - duration_unit = DAYS - duration = findtext(elem, duration_unit) - if not duration: - duration_unit = YEARS - duration = findtext(elem, duration_unit) - if not duration: - raise ValueError(f"XML element <{DAYS}> or <{YEARS}> not found") - return cls( - mode=mode, - duration=int(duration), - duration_unit=duration_unit, - ) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - element = Element("ObjectLockConfiguration") - SubElement(element, "ObjectLockEnabled", ENABLED) - if self.mode: - rule = SubElement(element, "Rule") - retention = SubElement(rule, "DefaultRetention") - SubElement(retention, "Mode", self.mode) - if not self.duration_unit: - raise ValueError("duration unit must be provided") - SubElement(retention, self.duration_unit, str(self.duration)) - return element diff --git a/minio/replicationconfig.py b/minio/replicationconfig.py deleted file mode 100644 index 885f04e3..00000000 --- a/minio/replicationconfig.py +++ /dev/null @@ -1,437 +0,0 @@ -# -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) -# 2020 MinIO, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Request/response of PutBucketReplication and GetBucketReplication APIs.""" - -from __future__ import absolute_import, annotations - -from abc import ABC -from dataclasses import dataclass -from typing import Optional, Type, TypeVar, cast -from xml.etree import ElementTree as ET - -from .commonconfig import DISABLED, BaseRule, Filter, check_status -from .xml import Element, SubElement, find, findall, findtext - -A = TypeVar("A", bound="Status") - - -@dataclass(frozen=True) -class Status(ABC): - """Status.""" - - status: str - - @classmethod - def fromxml(cls: Type[A], element: ET.Element) -> A: - """Create new object with values from XML element.""" - element = cast(ET.Element, find(element, cls.__name__, True)) - status = cast(str, findtext(element, "Status", True)) - return cls(status) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - if element is None: - raise ValueError("element must be provided") - element = SubElement(element, self.__class__.__name__) - SubElement(element, "Status", self.status) - return element - - -@dataclass(frozen=True) -class SseKmsEncryptedObjects(Status): - """SSE KMS encrypted objects.""" - - -B = TypeVar("B", bound="SourceSelectionCriteria") - - -@dataclass(frozen=True) -class SourceSelectionCriteria: - """Source selection criteria.""" - - sse_kms_encrypted_objects: Optional[SseKmsEncryptedObjects] = None - - @classmethod - def fromxml(cls: Type[B], element: ET.Element) -> B: - """Create new object with values from XML element.""" - element = cast( - ET.Element, - find(element, "SourceSelectionCriteria", True), - ) - return cls( - None if find(element, "SseKmsEncryptedObjects") is None - else SseKmsEncryptedObjects.fromxml(element) - ) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - if element is None: - raise ValueError("element must be provided") - element = SubElement(element, "SourceSelectionCriteria") - if self.sse_kms_encrypted_objects: - self.sse_kms_encrypted_objects.toxml(element) - return element - - -@dataclass(frozen=True) -class ExistingObjectReplication(Status): - """Existing object replication.""" - - -@dataclass(frozen=True) -class DeleteMarkerReplication(Status): - """Delete marker replication.""" - - def __init__(self, status=DISABLED): - super().__init__(status) - - -C = TypeVar("C", bound="ReplicationTimeValue") - - -@dataclass(frozen=True) -class ReplicationTimeValue(ABC): - """Replication time value.""" - - minutes: Optional[int] = 15 - - @classmethod - def fromxml(cls: Type[C], element: ET.Element) -> C: - """Create new object with values from XML element.""" - element = cast(ET.Element, find(element, cls.__name__, True)) - minutes = findtext(element, "Minutes") - return cls(int(minutes) if minutes else None) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - if element is None: - raise ValueError("element must be provided") - element = SubElement(element, self.__class__.__name__) - if self.minutes is not None: - SubElement(element, "Minutes", str(self.minutes)) - return element - - -@dataclass(frozen=True) -class Time(ReplicationTimeValue): - """Time.""" - - -D = TypeVar("D", bound="ReplicationTime") - - -@dataclass(frozen=True) -class ReplicationTime: - """Replication time.""" - - time: Time - status: str - - def __post_init__(self,): - if not self.time: - raise ValueError("time must be provided") - check_status(self.status) - - @classmethod - def fromxml(cls: Type[D], element: ET.Element) -> D: - """Create new object with values from XML element.""" - element = cast(ET.Element, find(element, "ReplicationTime", True)) - time = Time.fromxml(element) - status = cast(str, findtext(element, "Status", True)) - return cls(time, status) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - if element is None: - raise ValueError("element must be provided") - element = SubElement(element, "ReplicationTime") - self.time.toxml(element) - SubElement(element, "Status", self.status) - return element - - -@dataclass(frozen=True) -class EventThreshold(ReplicationTimeValue): - """Event threshold.""" - - -E = TypeVar("E", bound="Metrics") - - -@dataclass(frozen=True) -class Metrics: - """Metrics.""" - - event_threshold: EventThreshold - status: str - - def __post_init__(self): - if not self.event_threshold: - raise ValueError("event threshold must be provided") - check_status(self.status) - - @classmethod - def fromxml(cls: Type[E], element: ET.Element) -> E: - """Create new object with values from XML element.""" - element = cast(ET.Element, find(element, "Metrics", True)) - event_threshold = EventThreshold.fromxml(element) - status = cast(str, findtext(element, "Status", True)) - return cls(event_threshold, status) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - if element is None: - raise ValueError("element must be provided") - element = SubElement(element, "Metrics") - self.event_threshold.toxml(element) - SubElement(element, "Status", self.status) - return element - - -F = TypeVar("F", bound="EncryptionConfig") - - -@dataclass(frozen=True) -class EncryptionConfig: - """Encryption configuration.""" - - replica_kms_key_id: Optional[str] = None - - @classmethod - def fromxml(cls: Type[F], element: ET.Element) -> F: - """Create new object with values from XML element.""" - element = cast( - ET.Element, - find(element, "EncryptionConfiguration", True), - ) - return cls(findtext(element, "ReplicaKmsKeyID")) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - if element is None: - raise ValueError("element must be provided") - element = SubElement(element, "EncryptionConfiguration") - SubElement(element, "ReplicaKmsKeyID", self.replica_kms_key_id) - return element - - -G = TypeVar("G", bound="AccessControlTranslation") - - -@dataclass(frozen=True) -class AccessControlTranslation: - """Access control translation.""" - - owner: str = "Destination" - - def __post_init__(self): - if not self.owner: - raise ValueError("owner must be provided") - - @classmethod - def fromxml(cls: Type[G], element: ET.Element) -> G: - """Create new object with values from XML element.""" - element = cast( - ET.Element, find(element, "AccessControlTranslation", True), - ) - owner = cast(str, findtext(element, "Owner", True)) - return cls(owner) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - if element is None: - raise ValueError("element must be provided") - element = SubElement(element, "AccessControlTranslation") - SubElement(element, "Owner", self.owner) - return element - - -H = TypeVar("H", bound="Destination") - - -@dataclass(frozen=True) -class Destination: - """Replication destination.""" - - bucket_arn: str - access_control_translation: Optional[AccessControlTranslation] = None - account: Optional[str] = None - encryption_config: Optional[EncryptionConfig] = None - metrics: Optional[Metrics] = None - replication_time: Optional[ReplicationTime] = None - storage_class: Optional[str] = None - - def __post_init__(self): - if not self.bucket_arn: - raise ValueError("bucket ARN must be provided") - - @classmethod - def fromxml(cls: Type[H], element: ET.Element) -> H: - """Create new object with values from XML element.""" - element = cast(ET.Element, find(element, "Destination", True)) - access_control_translation = ( - None if find(element, "AccessControlTranslation") is None - else AccessControlTranslation.fromxml(element) - ) - account = findtext(element, "Account") - bucket_arn = cast(str, findtext(element, "Bucket", True)) - encryption_config = ( - None if find(element, "EncryptionConfiguration") is None - else EncryptionConfig.fromxml(element) - ) - metrics = ( - None if find(element, "Metrics") is None - else Metrics.fromxml(element) - ) - replication_time = ( - None if find(element, "ReplicationTime") is None - else ReplicationTime.fromxml(element) - ) - storage_class = findtext(element, "StorageClass") - return cls(bucket_arn, access_control_translation, account, - encryption_config, metrics, replication_time, storage_class) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - if element is None: - raise ValueError("element must be provided") - element = SubElement(element, "Destination") - if self.access_control_translation: - self.access_control_translation.toxml(element) - if self.account is not None: - SubElement(element, "Account", self.account) - SubElement(element, "Bucket", self.bucket_arn) - if self.encryption_config: - self.encryption_config.toxml(element) - if self.metrics: - self.metrics.toxml(element) - if self.replication_time: - self.replication_time.toxml(element) - if self.storage_class: - SubElement(element, "StorageClass", self.storage_class) - return element - - -I = TypeVar("I", bound="Rule") - - -@dataclass(frozen=True) -class Rule(BaseRule): - """Replication rule. """ - - destination: Optional[Destination] = None - delete_marker_replication: Optional[DeleteMarkerReplication] = None - existing_object_replication: Optional[ExistingObjectReplication] = None - rule_filter: Optional[Filter] = None - rule_id: Optional[str] = None - prefix: Optional[str] = None - priority: Optional[int] = None - source_selection_criteria: Optional[SourceSelectionCriteria] = None - - def __post_init__(self): - if not self.destination: - raise ValueError("destination must be provided") - - def _require_subclass_implementation(self) -> None: - """Dummy abstract method to enforce abstract class behavior.""" - - @classmethod - def fromxml(cls: Type[I], element: ET.Element) -> I: - """Create new object with values from XML element.""" - status, rule_filter, rule_id = cls.parsexml(element) - delete_marker_replication = ( - None if find(element, "DeleteMarkerReplication") is None - else DeleteMarkerReplication.fromxml(element) - ) - destination = Destination.fromxml(element) - existing_object_replication = ( - None if find(element, "ExistingObjectReplication") is None - else ExistingObjectReplication.fromxml(element) - ) - prefix = findtext(element, "Prefix") - priority = findtext(element, "Priority") - source_selection_criteria = ( - None if find(element, "SourceSelectionCriteria") is None - else SourceSelectionCriteria.fromxml(element) - ) - - return cls( - status=status, - rule_filter=rule_filter, - rule_id=rule_id, - destination=destination, - delete_marker_replication=delete_marker_replication, - existing_object_replication=existing_object_replication, - prefix=prefix, - priority=int(priority) if priority else None, - source_selection_criteria=source_selection_criteria, - ) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - if element is None: - raise ValueError("element must be provided") - element = SubElement(element, "Rule") - super().toxml(element) - if self.delete_marker_replication: - self.delete_marker_replication.toxml(element) - if self.destination: - self.destination.toxml(element) - if self.existing_object_replication: - self.existing_object_replication.toxml(element) - if self.prefix is not None: - SubElement(element, "Prefix", self.prefix) - if self.priority is not None: - SubElement(element, "Priority", str(self.priority)) - if self.source_selection_criteria: - self.source_selection_criteria.toxml(element) - return element - - -J = TypeVar("J", bound="ReplicationConfig") - - -@dataclass(frozen=True) -class ReplicationConfig: - """Replication configuration.""" - - role: str - rules: list[Rule] - - def __post_init__(self): - if not self.rules: - raise ValueError("rules must be provided") - if len(self.rules) > 1000: - raise ValueError("more than 1000 rules are not supported") - - @classmethod - def fromxml(cls: Type[J], element: ET.Element) -> J: - """Create new object with values from XML element.""" - role = cast(str, findtext(element, "Role", True)) - elements = findall(element, "Rule") - rules = [] - for tag in elements: - rules.append(Rule.fromxml(tag)) - return cls(role, rules) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - element = Element("ReplicationConfiguration") - SubElement(element, "Role", self.role) - for rule in self.rules: - rule.toxml(element) - return element diff --git a/minio/retention.py b/minio/retention.py deleted file mode 100644 index d7c5f086..00000000 --- a/minio/retention.py +++ /dev/null @@ -1,69 +0,0 @@ -# -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) -# 2020 MinIO, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Request/response of PutObjectRetention and GetObjectRetention APIs.""" - -from __future__ import absolute_import, annotations - -from dataclasses import dataclass -from datetime import datetime -from typing import Optional, Type, TypeVar, cast -from xml.etree import ElementTree as ET - -from .commonconfig import COMPLIANCE, GOVERNANCE -from .time import from_iso8601utc, to_iso8601utc -from .xml import Element, SubElement, findtext - -A = TypeVar("A", bound="Retention") - - -@dataclass(frozen=True) -class Retention: - """Retention configuration.""" - - mode: str - retain_until_date: datetime - - def __post_init__(self): - if self.mode not in [GOVERNANCE, COMPLIANCE]: - raise ValueError(f"mode must be {GOVERNANCE} or {COMPLIANCE}") - if not isinstance(self.retain_until_date, datetime): - raise ValueError( - "retain until date must be datetime type", - ) - - @classmethod - def fromxml(cls: Type[A], element: ET.Element) -> A: - """Create new object with values from XML element.""" - mode = cast(str, findtext(element, "Mode", True)) - retain_until_date = cast( - datetime, - from_iso8601utc( - cast(str, findtext(element, "RetainUntilDate", True)), - ), - ) - return cls(mode=mode, retain_until_date=retain_until_date) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - element = Element("Retention") - SubElement(element, "Mode", self.mode) - SubElement( - element, - "RetainUntilDate", - to_iso8601utc(self.retain_until_date), - ) - return element diff --git a/minio/select.py b/minio/select.py deleted file mode 100644 index 9f25fd8e..00000000 --- a/minio/select.py +++ /dev/null @@ -1,417 +0,0 @@ -# -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) -# 2020 MinIO, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Request/response of SelectObjectContent API.""" - -from __future__ import absolute_import - -from abc import ABC, abstractmethod -from binascii import crc32 -from dataclasses import dataclass -from io import BytesIO -from typing import Optional -from xml.etree import ElementTree as ET - -from .error import MinioException -from .xml import Element, SubElement, findtext - -COMPRESSION_TYPE_NONE = "NONE" -COMPRESSION_TYPE_GZIP = "GZIP" -COMPRESSION_TYPE_BZIP2 = "BZIP2" - -FILE_HEADER_INFO_USE = "USE" -FILE_HEADER_INFO_IGNORE = "IGNORE" -FILE_HEADER_INFO_NONE = "NONE" - -JSON_TYPE_DOCUMENT = "DOCUMENT" -JSON_TYPE_LINES = "LINES" - -QUOTE_FIELDS_ALWAYS = "ALWAYS" -QUOTE_FIELDS_ASNEEDED = "ASNEEDED" - - -@dataclass(frozen=True) -class InputSerialization(ABC): - """Input serialization.""" - - compression_type: Optional[str] = None - - def __post_init__(self): - if ( - self.compression_type is not None and - self.compression_type not in [ - COMPRESSION_TYPE_NONE, - COMPRESSION_TYPE_GZIP, - COMPRESSION_TYPE_BZIP2, - ] - ): - raise ValueError( - f"compression type must be {COMPRESSION_TYPE_NONE}, " - f"{COMPRESSION_TYPE_GZIP} or {COMPRESSION_TYPE_BZIP2}" - ) - - def toxml(self, element): - """Convert to XML.""" - if self.compression_type is not None: - SubElement(element, "CompressionType", self.compression_type) - return element - - -@dataclass(frozen=True) -class CSVInputSerialization(InputSerialization): - """CSV input serialization.""" - - allow_quoted_record_delimiter: Optional[str] = None - comments: Optional[str] = None - field_delimiter: Optional[str] = None - file_header_info: Optional[str] = None - quote_character: Optional[str] = None - quote_escape_character: Optional[str] = None - record_delimiter: Optional[str] = None - - def __post_init__(self): - if ( - self.file_header_info is not None and - self.file_header_info not in [ - FILE_HEADER_INFO_USE, - FILE_HEADER_INFO_IGNORE, - FILE_HEADER_INFO_NONE, - ] - ): - raise ValueError( - f"file header info must be {FILE_HEADER_INFO_USE}, " - f"{FILE_HEADER_INFO_IGNORE} or {FILE_HEADER_INFO_NONE}" - ) - - def toxml(self, element): - """Convert to XML.""" - super().toxml(element) - element = SubElement(element, "CSV") - if self.allow_quoted_record_delimiter is not None: - SubElement( - element, - "AllowQuotedRecordDelimiter", - self.allow_quoted_record_delimiter, - ) - if self.comments is not None: - SubElement(element, "Comments", self.comments) - if self.field_delimiter is not None: - SubElement(element, "FieldDelimiter", self.field_delimiter) - if self.file_header_info is not None: - SubElement(element, "FileHeaderInfo", self.file_header_info) - if self.quote_character is not None: - SubElement(element, "QuoteCharacter", self.quote_character) - if self.quote_escape_character is not None: - SubElement( - element, - "QuoteEscapeCharacter", - self.quote_escape_character, - ) - if self.record_delimiter is not None: - SubElement(element, "RecordDelimiter", self.record_delimiter) - - -@dataclass(frozen=True) -class JSONInputSerialization(InputSerialization): - """JSON input serialization.""" - - json_type: Optional[str] = None - - def __post_init__(self): - if ( - self.json_type is not None and - self.json_type not in [JSON_TYPE_DOCUMENT, JSON_TYPE_LINES] - ): - raise ValueError( - f"json type must be {JSON_TYPE_DOCUMENT} or {JSON_TYPE_LINES}" - ) - - def toxml(self, element): - """Convert to XML.""" - super().toxml(element) - element = SubElement(element, "JSON") - if self.json_type is not None: - SubElement(element, "Type", self.json_type) - - -@dataclass(frozen=True) -class ParquetInputSerialization(InputSerialization): - """Parquet input serialization.""" - - def toxml(self, element): - """Convert to XML.""" - super().toxml(element) - return SubElement(element, "Parquet") - - -@dataclass(frozen=True) -class OutputSerialization(ABC): - """Output serialization.""" - - @abstractmethod - def toxml(self, element): - """Convert to XML.""" - - -@dataclass(frozen=True) -class CSVOutputSerialization(OutputSerialization): - """CSV output serialization.""" - - field_delimiter: Optional[str] = None - quote_character: Optional[str] = None - quote_escape_character: Optional[str] = None - quote_fields: Optional[str] = None - record_delimiter: Optional[str] = None - - def __post_init__(self): - if ( - self.quote_fields is not None and - self.quote_fields not in [ - QUOTE_FIELDS_ALWAYS, QUOTE_FIELDS_ASNEEDED, - ] - ): - raise ValueError( - f"quote fields must be {QUOTE_FIELDS_ALWAYS} or " - f"{QUOTE_FIELDS_ASNEEDED}" - ) - - def toxml(self, element): - """Convert to XML.""" - element = SubElement(element, "CSV") - if self.field_delimiter is not None: - SubElement(element, "FieldDelimiter", self.field_delimiter) - if self.quote_character is not None: - SubElement(element, "QuoteCharacter", self.quote_character) - if self.quote_escape_character is not None: - SubElement( - element, - "QuoteEscapeCharacter", - self.quote_escape_character, - ) - if self.quote_fields is not None: - SubElement(element, "QuoteFields", self.quote_fields) - if self.record_delimiter is not None: - SubElement(element, "RecordDelimiter", self.record_delimiter) - - -@dataclass(frozen=True) -class JSONOutputSerialization(OutputSerialization): - """JSON output serialization.""" - - record_delimiter: Optional[str] = None - - def toxml(self, element): - """Convert to XML.""" - element = SubElement(element, "JSON") - if self.record_delimiter is not None: - SubElement(element, "RecordDelimiter", self.record_delimiter) - - -@dataclass(frozen=True) -class SelectRequest: - """Select object content request.""" - - expression: str - input_serialization: InputSerialization - output_serialization: OutputSerialization - request_progress: bool = False - scan_start_range: Optional[int] = None - scan_end_range: Optional[int] = None - - def toxml(self, element): - """Convert to XML.""" - element = Element("SelectObjectContentRequest") - SubElement(element, "Expression", self.expression) - SubElement(element, "ExpressionType", "SQL") - self.input_serialization.toxml( - SubElement(element, "InputSerialization"), - ) - self.output_serialization.toxml( - SubElement(element, "OutputSerialization"), - ) - if self.request_progress: - SubElement( - SubElement(element, "RequestProgress"), "Enabled", "true", - ) - if self.scan_start_range or self.scan_end_range: - tag = SubElement(element, "ScanRange") - if self.scan_start_range: - SubElement(tag, "Start", self.scan_start_range) - if self.scan_end_range: - SubElement(tag, "End", self.scan_end_range) - return element - - -def _read(reader, size): - """Wrapper to RawIOBase.read() to error out on short reads.""" - data = reader.read(size) - if len(data) != size: - raise IOError("insufficient data") - return data - - -def _int(data): - """Convert byte data to big-endian int.""" - return int.from_bytes(data, byteorder="big") - - -def _crc32(data): - """Wrapper to binascii.crc32().""" - return crc32(data) & 0xffffffff - - -def _decode_header(data): - """Decode header data.""" - reader = BytesIO(data) - headers = {} - while True: - length = reader.read(1) - if not length: - break - name = _read(reader, _int(length)) - if _int(_read(reader, 1)) != 7: - raise IOError("header value type is not 7") - value = _read(reader, _int(_read(reader, 2))) - headers[name.decode()] = value.decode() - return headers - - -@dataclass(frozen=True) -class Stats: - """Progress/Stats information.""" - - bytes_scanned: Optional[str] = None - bytes_processed: Optional[str] = None - bytes_returned: Optional[str] = None - - def __init__(self, data): - element = ET.fromstring(data.decode()) - object.__setattr__( - self, - "bytes_scanned", - findtext(element, "BytesScanned"), - ) - object.__setattr__( - self, - "bytes_processed", - findtext(element, "BytesProcessed"), - ) - object.__setattr__( - self, - "bytes_returned", - findtext(element, "BytesReturned"), - ) - - -class SelectObjectReader: - """ - BufferedIOBase compatible reader represents response data of - Minio.select_object_content() API. - """ - - def __init__(self, response): - self._response = response - self._stats = None - self._payload = None - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_value, exc_traceback): - return self.close() - - def readable(self): # pylint: disable=no-self-use - """Return this is readable.""" - return True - - def writeable(self): # pylint: disable=no-self-use - """Return this is not writeable.""" - return False - - def close(self): - """Close response and release network resources.""" - self._response.close() - self._response.release_conn() - - def stats(self): - """Get stats information.""" - return self._stats - - def _read(self): - """Read and decode response.""" - if self._response.isclosed(): - return 0 - - prelude = _read(self._response, 8) - prelude_crc = _read(self._response, 4) - if _crc32(prelude) != _int(prelude_crc): - raise IOError( - f"prelude CRC mismatch; expected: {_crc32(prelude)}, " - f"got: {_int(prelude_crc)}" - ) - - total_length = _int(prelude[:4]) - data = _read(self._response, total_length - 8 - 4 - 4) - message_crc = _int(_read(self._response, 4)) - if _crc32(prelude + prelude_crc + data) != message_crc: - raise IOError( - f"message CRC mismatch; " - f"expected: {_crc32(prelude + prelude_crc + data)}, " - f"got: {message_crc}" - ) - - header_length = _int(prelude[4:]) - headers = _decode_header(data[:header_length]) - - if headers.get(":message-type") == "error": - raise MinioException( - f"{headers.get(':error-code')}: " - f"{headers.get(':error-message')}" - ) - - if headers.get(":event-type") == "End": - return 0 - - payload_length = total_length - header_length - 16 - if headers.get(":event-type") == "Cont" or payload_length < 1: - return self._read() - - payload = data[header_length:header_length+payload_length] - - if headers.get(":event-type") in ["Progress", "Stats"]: - self._stats = Stats(payload) - return self._read() - - if headers.get(":event-type") == "Records": - self._payload = payload - return len(payload) - - raise MinioException( - f"unknown event-type {headers.get(':event-type')}", - ) - - def stream(self, num_bytes=32*1024): - """ - Stream extracted payload from response data. Upon completion, caller - should call self.close() to release network resources. - """ - while self._read() > 0: - while self._payload: - result = self._payload - if num_bytes < len(self._payload): - result = self._payload[:num_bytes] - self._payload = self._payload[len(result):] - yield result diff --git a/minio/signer.py b/minio/signer.py index 304d4d8c..bee8f2a1 100644 --- a/minio/signer.py +++ b/minio/signer.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015-2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,19 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +"""Signature V4 signing methods.""" -""" -minio.signer -~~~~~~~~~~~~~~~ - -This module implements all helpers for AWS Signature version '4' support. - -:copyright: (c) 2015 by MinIO, Inc. -:license: Apache 2.0, see LICENSE for more details. - -""" - -from __future__ import absolute_import, annotations +from __future__ import annotations import hashlib import hmac @@ -36,11 +26,10 @@ from typing import cast from urllib.parse import SplitResult -from urllib3._collections import HTTPHeaderDict - from . import time +from .checksum import sha256_hash +from .compat import HTTPHeaderDict, queryencode from .credentials import Credentials -from .helpers import queryencode, sha256_hash SIGN_V4_ALGORITHM = 'AWS4-HMAC-SHA256' _MULTI_SPACE_REGEX = re.compile(r"( +)") diff --git a/minio/sse.py b/minio/sse.py index e65843d3..01dc25c4 100644 --- a/minio/sse.py +++ b/minio/sse.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2018 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,23 +14,17 @@ # See the License for the specific language governing permissions and # limitations under the License. -""" -minio.sse -~~~~~~~~~~~~~~~~~~~ +"""Server-side encryption.""" -This module contains core API parsers. - -:copyright: (c) 2018 by MinIO, Inc. -:license: Apache 2.0, see LICENSE for more details. - -""" -from __future__ import absolute_import, annotations +from __future__ import annotations import base64 import json from abc import ABC, abstractmethod from typing import Any, cast +from .checksum import md5sum_hash + class Sse(ABC): """Server-side encryption base class.""" @@ -58,8 +52,6 @@ def __init__(self, key: bytes): "Pass raw bytes, not the base64 encoded value.", ) b64key = base64.b64encode(key).decode() - from .helpers import \ - md5sum_hash # pylint: disable=import-outside-toplevel md5key = cast(str, md5sum_hash(key)) self._headers: dict[str, str] = { "X-Amz-Server-Side-Encryption-Customer-Algorithm": "AES256", diff --git a/minio/sseconfig.py b/minio/sseconfig.py deleted file mode 100644 index 09f71184..00000000 --- a/minio/sseconfig.py +++ /dev/null @@ -1,101 +0,0 @@ -# -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) -# 2020 MinIO, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Request/response of PutBucketEncryption and GetBucketEncryption APIs.""" - -from __future__ import absolute_import, annotations - -from abc import ABC -from dataclasses import dataclass -from typing import Optional, Type, TypeVar, cast -from xml.etree import ElementTree as ET - -from .xml import Element, SubElement, find, findtext - -AES256 = "AES256" -AWS_KMS = "aws:kms" - -A = TypeVar("A", bound="Rule") - - -@dataclass(frozen=True) -class Rule(ABC): - """Server-side encryption rule. """ - - sse_algorithm: str - kms_master_key_id: Optional[str] = None - - @classmethod - def new_sse_s3_rule(cls: Type[A]) -> A: - """Create SSE-S3 rule.""" - return cls(sse_algorithm=AES256) - - @classmethod - def new_sse_kms_rule( - cls: Type[A], - kms_master_key_id: Optional[str] = None, - ) -> A: - """Create new SSE-KMS rule.""" - return cls(sse_algorithm=AWS_KMS, kms_master_key_id=kms_master_key_id) - - @classmethod - def fromxml(cls: Type[A], element: ET.Element) -> A: - """Create new object with values from XML element.""" - element = cast( - ET.Element, - find(element, "ApplyServerSideEncryptionByDefault", True), - ) - return cls( - sse_algorithm=cast(str, findtext(element, "SSEAlgorithm", True)), - kms_master_key_id=findtext(element, "KMSMasterKeyID"), - ) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - if element is None: - raise ValueError("element must be provided") - element = SubElement(element, "Rule") - tag = SubElement(element, "ApplyServerSideEncryptionByDefault") - SubElement(tag, "SSEAlgorithm", self.sse_algorithm) - if self.kms_master_key_id is not None: - SubElement(tag, "KMSMasterKeyID", self.kms_master_key_id) - return element - - -B = TypeVar("B", bound="SSEConfig") - - -@dataclass(frozen=True) -class SSEConfig: - """server-side encryption configuration.""" - - rule: Rule - - def __post_init__(self): - if not self.rule: - raise ValueError("rule must be provided") - - @classmethod - def fromxml(cls: Type[B], element: ET.Element) -> B: - """Create new object with values from XML element.""" - element = cast(ET.Element, find(element, "Rule", True)) - return cls(Rule.fromxml(element)) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - element = Element("ServerSideEncryptionConfiguration") - self.rule.toxml(element) - return element diff --git a/minio/tagging.py b/minio/tagging.py deleted file mode 100644 index efe0f031..00000000 --- a/minio/tagging.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) -# 2020 MinIO, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Tagging for bucket and object.""" - -from __future__ import absolute_import, annotations - -from dataclasses import dataclass -from typing import Optional, Type, TypeVar, cast -from xml.etree import ElementTree as ET - -from .commonconfig import Tags -from .xml import Element, SubElement, find - -A = TypeVar("A", bound="Tagging") - - -@dataclass(frozen=True) -class Tagging: - """Tagging for buckets and objects.""" - - tags: Optional[Tags] - - @classmethod - def fromxml(cls: Type[A], element: ET.Element) -> A: - """Create new object with values from XML element.""" - element = cast(ET.Element, find(element, "TagSet", True)) - tags = ( - None if find(element, "Tag") is None - else Tags.fromxml(element) - ) - return cls(tags=tags) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - element = Element("Tagging") - if self.tags: - self.tags.toxml(SubElement(element, "TagSet")) - return element diff --git a/minio/time.py b/minio/time.py index 2fa794d7..4b22ecae 100644 --- a/minio/time.py +++ b/minio/time.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) -# 2020 MinIO, Inc. +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,9 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Time formatter for S3 APIs.""" +"""Time functions and formatter for S3 APIs.""" -from __future__ import absolute_import, annotations +from __future__ import annotations import time as ctime from datetime import datetime, timezone diff --git a/minio/versioningconfig.py b/minio/versioningconfig.py deleted file mode 100644 index 41b223ed..00000000 --- a/minio/versioningconfig.py +++ /dev/null @@ -1,92 +0,0 @@ -# -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) -# 2020 MinIO, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Request/response of PutBucketVersioning and GetBucketVersioning APIs.""" - -from __future__ import absolute_import, annotations - -from dataclasses import dataclass -from typing import List, Optional, Type, TypeVar, Union, cast -from xml.etree import ElementTree as ET - -from .commonconfig import DISABLED, ENABLED -from .xml import Element, SubElement, findall, findtext - -OFF = "Off" -SUSPENDED = "Suspended" - -A = TypeVar("A", bound="VersioningConfig") - - -@dataclass(frozen=True) -class VersioningConfig: - """Versioning configuration.""" - - status: Optional[str] = None - mfa_delete: Optional[str] = None - excluded_prefixes: Optional[list[str]] = None - exclude_folders: bool = False - - def __post_init__(self): - if self.status is not None and self.status not in [ENABLED, SUSPENDED]: - raise ValueError(f"status must be {ENABLED} or {SUSPENDED}") - if ( - self.mfa_delete is not None and - self.mfa_delete not in [ENABLED, DISABLED] - ): - raise ValueError(f"MFA delete must be {ENABLED} or {DISABLED}") - - @property - def status_string(self) -> str: - """Convert status to status string. """ - return OFF if self.status is None else self.status - - @classmethod - def fromxml(cls: Type[A], element: ET.Element) -> A: - """Create new object with values from XML element.""" - status = findtext(element, "Status") - mfa_delete = findtext(element, "MFADelete") - excluded_prefixes = [ - prefix.text - for prefix in findall( - element, - "ExcludedPrefixes/Prefix", - ) - ] - exclude_folders = findtext(element, "ExcludeFolders") == "true" - return cls( - status=status, - mfa_delete=mfa_delete, - excluded_prefixes=cast(Union[List[str], None], excluded_prefixes), - exclude_folders=exclude_folders, - ) - - def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert to XML.""" - element = Element("VersioningConfiguration") - if self.status: - SubElement(element, "Status", self.status) - if self.mfa_delete: - SubElement(element, "MFADelete", self.mfa_delete) - for prefix in self.excluded_prefixes or []: - SubElement( - SubElement(element, "ExcludedPrefixes"), - "Prefix", - prefix, - ) - if self.exclude_folders: - SubElement(element, "ExcludeFolders", "true") - return element diff --git a/minio/xml.py b/minio/xml.py index 7dbe051a..92cb0d15 100644 --- a/minio/xml.py +++ b/minio/xml.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) -# 2020 MinIO, Inc. +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,12 +14,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""XML utility module.""" +"""XML encoding and decoding functions.""" -from __future__ import absolute_import, annotations +from __future__ import annotations import io -from typing import Optional, Type, TypeVar +from typing import Optional, TypeVar from xml.etree import ElementTree as ET from typing_extensions import Protocol @@ -45,24 +45,30 @@ def SubElement( # pylint: disable=invalid-name return element -def _get_namespace(element: ET.Element) -> str: - """Exact namespace if found.""" - start = element.tag.find("{") - if start < 0: - return "" - start += 1 - end = element.tag.find("}") - if end < 0: - return "" - return element.tag[start:end] +def _namespaced(element: ET.Element, name: str) -> tuple[str, dict[str, str]]: + """Namespace arguments for find and findall.""" + def _get_namespace() -> str: + """Exact namespace if found.""" + start = element.tag.find("{") + if start < 0: + return "" + start += 1 + end = element.tag.find("}") + if end < 0: + return "" + return element.tag[start:end] + + namespace = _get_namespace() + if namespace: + name = "/".join(f"ns:{token}" for token in name.split("/")) + return name, {"ns": namespace} + return name, {} def findall(element: ET.Element, name: str) -> list[ET.Element]: """Namespace aware ElementTree.Element.findall().""" - namespace = _get_namespace(element) - if namespace: - name = "/".join(["ns:" + token for token in name.split("/")]) - return element.findall(name, {"ns": namespace} if namespace else {}) + name, namespaces = _namespaced(element, name) + return element.findall(name, namespaces=namespaces) def find( @@ -71,11 +77,8 @@ def find( strict: bool = False, ) -> Optional[ET.Element]: """Namespace aware ElementTree.Element.find().""" - namespace = _get_namespace(element) - elem = element.find( - "ns:" + name if namespace else name, - {"ns": namespace} if namespace else {}, - ) + name, namespaces = _namespaced(element, name) + elem = element.find(name, namespaces=namespaces) if strict and elem is None: raise ValueError(f"XML element <{name}> not found") return elem @@ -85,30 +88,32 @@ def findtext( element: ET.Element, name: str, strict: bool = False, + default: Optional[str] = None, ) -> Optional[str]: """ Namespace aware ElementTree.Element.findtext() with strict flag raises ValueError if element name not exist. """ elem = find(element, name, strict=strict) - return None if elem is None else (elem.text or "") + return default if elem is None else (elem.text or "") -A = TypeVar("A") +UnmarshalT = TypeVar("UnmarshalT", bound="UnmarshalProtocol") -class FromXmlType(Protocol): +class UnmarshalProtocol(Protocol): """typing stub for class with `fromxml` method""" @classmethod - def fromxml(cls: Type[A], element: ET.Element) -> A: - """Create python object with values from XML element.""" - - -B = TypeVar("B", bound=FromXmlType) + def fromxml(cls: type[UnmarshalT], element: ET.Element) -> UnmarshalT: + """ + Create object by values from XML element. + Code discipline: + 1. Do not use find() to look for its own `Element` if needed. + """ -def unmarshal(cls: Type[B], xmlstring: str) -> B: +def unmarshal(cls: type[UnmarshalT], xmlstring: str) -> UnmarshalT: """Unmarshal given XML string to an object of passed class.""" return cls.fromxml(ET.fromstring(xmlstring)) @@ -124,13 +129,20 @@ def getbytes(element: ET.Element) -> bytes: return data.getvalue() -class ToXmlType(Protocol): +class MarshalT(Protocol): """typing stub for class with `toxml` method""" def toxml(self, element: Optional[ET.Element]) -> ET.Element: - """Convert python object to ElementTree.Element.""" + """ + Convert python object to ElementTree.Element. + Code discipline: + 1. Do not create its own `SubElement` if needed. + 2. Always return passed `Element`. + 3. For root, `element` argument is always `None` hence + root `Element` must be created. + """ -def marshal(obj: ToXmlType) -> bytes: +def marshal(obj: MarshalT) -> bytes: """Get XML data as bytes of ElementTree.Element.""" return getbytes(obj.toxml(None)) diff --git a/setup.py b/setup.py index f10f5e16..06f0c9bb 100644 --- a/setup.py +++ b/setup.py @@ -1,5 +1,6 @@ -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# -*- coding: utf-8 -*- +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,41 +14,60 @@ # See the License for the specific language governing permissions and # limitations under the License. -import codecs +"""Setup definitions.""" + +from __future__ import annotations + import re -import sys +from pathlib import Path -from setuptools import setup +from setuptools import find_packages, setup -if sys.argv[-1] == "publish": - sys.argv = sys.argv[:-1] + ["sdist", "upload"] +ROOT = Path(__file__).parent -with codecs.open("minio/__init__.py") as file: - version = re.search( - r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', - file.read(), - re.MULTILINE, - ).group(1) +# Read version from minio/__init__.py +init_py = ROOT / "minio" / "__init__.py" +version_match = re.search( + r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', + init_py.read_text(encoding="utf-8"), + re.MULTILINE, +) +if not version_match: + raise RuntimeError("Unable to find __version__ in minio/__init__.py") +version = version_match.group(1) -with codecs.open("README.md", encoding="utf-8") as file: - readme = file.read() +# Long description +readme = (ROOT / "README.md").read_text(encoding="utf-8") setup( name="minio", + version=version, description="MinIO Python SDK for Amazon S3 Compatible Cloud Storage", + long_description=readme, + long_description_content_type="text/markdown", author="MinIO, Inc.", - url="https://github.com/minio/minio-py", - download_url="https://github.com/minio/minio-py/releases", author_email="dev@min.io", - version=version, - long_description_content_type="text/markdown", - package_dir={"minio": "minio"}, - packages=["minio", "minio.credentials"], - python_requires=">=3.9", - install_requires=["certifi", "urllib3", "argon2-cffi", - "pycryptodome", "typing-extensions"], - tests_require=[], + url="https://github.com/minio/minio-py", + project_urls={ + "Source": "https://github.com/minio/minio-py", + "Issues": "https://github.com/minio/minio-py/issues", + "Changelog": "https://github.com/minio/minio-py/releases", + }, license="Apache-2.0", + package_dir={"": "."}, + packages=find_packages(include=["minio", "minio.*"]), + python_requires=">=3.10", + install_requires=[ + "certifi", + "urllib3", + "argon2-cffi", + "pycryptodome", + "typing-extensions", + ], + include_package_data=True, + package_data={ + "minio": ["LICENSE", "README.md", "py.typed"], + }, classifiers=[ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", @@ -55,14 +75,11 @@ "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Topic :: Software Development :: Libraries :: Python Modules", ], - long_description=readme, - package_data={"": ["LICENSE", "README.md", "py.typed"]}, - include_package_data=True, ) diff --git a/tests/__init__.py b/tests/__init__.py index e69de29b..0b5c93a7 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/functional/tests.py b/tests/functional/tests.py index 8e5456df..94c666d4 100644 --- a/tests/functional/tests.py +++ b/tests/functional/tests.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015, 2016, 2017, 2018 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,7 +18,7 @@ # pylint: disable=too-many-lines,broad-exception-raised """Functional tests of minio-py.""" -from __future__ import absolute_import, division +from __future__ import division import hashlib import io @@ -39,24 +39,24 @@ import certifi import urllib3 -from urllib3._collections import HTTPHeaderDict from minio import Minio -from minio.checksum import Algorithm -from minio.commonconfig import ENABLED, REPLACE, CopySource, SnowballObject -from minio.datatypes import PostPolicy -from minio.deleteobjects import DeleteObject +from minio.args import Directive, SnowballObject, SourceObject +from minio.checksum import CRC32C, Algorithm +from minio.checksum import Type as ChecksumType +from minio.checksum import base64_string +from minio.compat import HTTPHeaderDict, HTTPQueryDict from minio.error import S3Error -from minio.helpers import HTTPQueryDict -from minio.select import (CSVInputSerialization, CSVOutputSerialization, - SelectRequest) +from minio.models import (DeleteRequest, PostPolicy, + SelectObjectContentRequest, VersioningConfig) from minio.sse import SseCustomerKey from minio.time import to_http_header -from minio.versioningconfig import SUSPENDED, VersioningConfig _client = None # pylint: disable=invalid-name _test_file = None # pylint: disable=invalid-name _large_file = None # pylint: disable=invalid-name +_test_file_crc32c = None # pylint: disable=invalid-name +_large_file_crc32c = None # pylint: disable=invalid-name _is_aws = None # pylint: disable=invalid-name KB = 1024 MB = 1024 * KB @@ -291,10 +291,10 @@ def test_select_object_content(log_entry): length=len(content.getvalue()), ) - request = SelectRequest( + request = SelectObjectContentRequest( "select * from s3object", - CSVInputSerialization(), - CSVOutputSerialization(), + SelectObjectContentRequest.CSVInputSerialization(), + SelectObjectContentRequest.CSVOutputSerialization(), request_progress=True, ) data = _client.select_object_content( @@ -339,11 +339,36 @@ def _test_fput_object(bucket_name, object_name, filename, metadata, sse): sse=sse, ) - _client.stat_object( + response = _client.stat_object( bucket_name=bucket_name, object_name=object_name, ssec=sse, + fetch_checksum=True, ) + + if filename == _test_file: + if response.checksum_type != ChecksumType.FULL_OBJECT: + raise ValueError( + f"checksum type: expected: {ChecksumType.FULL_OBJECT}; " + f"got: {response.checksum_type}", + ) + if response.checksums.get(Algorithm.CRC32C) != _test_file_crc32c: + raise ValueError( + f"checksum crc32c: expected: {_test_file_crc32c}; " + f"got: {response.checksums.get(Algorithm.CRC32C)}", + ) + elif filename == _large_file: + if response.checksum_type != ChecksumType.COMPOSITE: + raise ValueError( + f"checksum type: expected: {ChecksumType.COMPOSITE}; " + f"got: {response.checksum_type}", + ) + if response.checksums.get(Algorithm.CRC32C) != _large_file_crc32c: + raise ValueError( + f"checksum crc32c: expected: {_large_file_crc32c}; " + f"got: {response.checksums.get(Algorithm.CRC32C)}", + ) + finally: _client.remove_object(bucket_name=bucket_name, object_name=object_name) _client.remove_bucket(bucket_name=bucket_name) @@ -421,11 +446,11 @@ def _validate_stat(st_obj, expected_size, expected_meta, version_id=None): } received_etag = st_obj.etag received_metadata = { - key.lower(): value for key, value in (st_obj.metadata or {}).items() + key.lower(): value for key, value in + (st_obj.user_metadata or {}).items() } received_content_type = st_obj.content_type received_size = st_obj.size - received_is_dir = st_obj.is_dir if not received_etag: raise ValueError('No Etag value is returned.') @@ -448,12 +473,8 @@ def _validate_stat(st_obj, expected_size, expected_meta, version_id=None): raise ValueError('Incorrect file size. Expected: 11534336', ', received: ', received_size) - if received_is_dir: - raise ValueError('Incorrect file type. Expected: is_dir=False', - ', received: is_dir=', received_is_dir) - if not all(i in received_metadata.items() for i in expected_meta.items()): - raise ValueError("Metadata key 'x-amz-meta-testing' not found") + raise ValueError("expected metadata not found in received metadata") def test_copy_object_no_copy_condition( # pylint: disable=invalid-name @@ -491,7 +512,7 @@ def test_copy_object_no_copy_condition( # pylint: disable=invalid-name bucket_name=bucket_name, object_name=object_copy, sse=ssec, - source=CopySource( + source=SourceObject( bucket_name=bucket_name, object_name=object_source, ssec=ssec_copy, @@ -520,11 +541,12 @@ def test_copy_object_with_metadata(log_entry): object_name = f"{uuid4()}" object_source = object_name + "-source" object_copy = object_name + "-copy" - metadata = HTTPHeaderDict({ + expected_metadata = { "testing-string": "string", "testing-int": "1", "10": 'value', - }) + } + metadata = HTTPHeaderDict(expected_metadata) log_entry["args"] = { "bucket_name": bucket_name, @@ -548,21 +570,18 @@ def test_copy_object_with_metadata(log_entry): _client.copy_object( bucket_name=bucket_name, object_name=object_copy, - source=CopySource( + source=SourceObject( bucket_name=bucket_name, object_name=object_source, ), user_metadata=metadata, - metadata_directive=REPLACE, + metadata_directive=Directive.REPLACE, ) # Verification st_obj = _client.stat_object( bucket_name=bucket_name, object_name=object_copy, ) - expected_metadata = {'x-amz-meta-testing-int': '1', - 'x-amz-meta-testing-string': 'string', - 'x-amz-meta-10': 'value'} _validate_stat(st_obj, size, expected_metadata) finally: _client.remove_object( @@ -603,7 +622,7 @@ def test_copy_object_etag_match(log_entry): _client.copy_object( bucket_name=bucket_name, object_name=object_copy, - source=CopySource( + source=SourceObject( bucket_name=bucket_name, object_name=object_source, ), @@ -617,7 +636,7 @@ def test_copy_object_etag_match(log_entry): _client.copy_object( bucket_name=bucket_name, object_name=object_copy, - source=CopySource( + source=SourceObject( bucket_name=bucket_name, object_name=object_source, match_etag=source_etag, @@ -667,7 +686,7 @@ def test_copy_object_negative_etag_match( # pylint: disable=invalid-name _client.copy_object( bucket_name=bucket_name, object_name=object_copy, - source=CopySource( + source=SourceObject( bucket_name=bucket_name, object_name=object_source, match_etag=etag, @@ -720,7 +739,7 @@ def test_copy_object_modified_since(log_entry): _client.copy_object( bucket_name=bucket_name, object_name=object_copy, - source=CopySource( + source=SourceObject( bucket_name=bucket_name, object_name=object_source, modified_since=mod_since, @@ -773,7 +792,7 @@ def test_copy_object_unmodified_since( # pylint: disable=invalid-name _client.copy_object( bucket_name=bucket_name, object_name=object_copy, - source=CopySource( + source=SourceObject( bucket_name=bucket_name, object_name=object_source, unmodified_since=unmod_since, @@ -859,14 +878,14 @@ def test_put_object(log_entry, sse=None): ) normalized_meta = { key.lower(): value for key, value in ( - st_obj.metadata or {}).items() + st_obj.user_metadata or {}).items() } - if 'x-amz-meta-testing' not in normalized_meta: + if 'testing' not in normalized_meta: raise ValueError("Metadata key 'x-amz-meta-testing' not found") - value = normalized_meta['x-amz-meta-testing'] + value = normalized_meta['testing'] if value != 'value': raise ValueError(f"Metadata key has unexpected value {value}") - if 'x-amz-meta-test-key' not in normalized_meta: + if 'test-key' not in normalized_meta: raise ValueError("Metadata key 'x-amz-meta-test-key' not found") finally: _client.remove_object(bucket_name=bucket_name, object_name=object_name) @@ -909,114 +928,6 @@ def test_negative_put_object_with_path_segment( # pylint: disable=invalid-name _client.remove_bucket(bucket_name=bucket_name) -def test_put_object_multipart_with_checksum( # pylint: disable=invalid-name - log_entry): - """Test put_object() multipart upload with checksum validation. - - This test validates the AWS S3 compliant checksum implementation for - multipart uploads: - - CreateMultipartUpload receives algorithm header only (not values) - - UploadPart includes checksum value headers - - CompleteMultipartUpload includes checksums in XML body - """ - - # Get a unique bucket_name and object_name - bucket_name = _gen_bucket_name() - object_name = f"{uuid4()}-checksum" - object_name_sha256 = None # Initialize for cleanup - # Use 6 MB to trigger multipart upload (> 5 MB threshold) - length = 6 * MB - - log_entry["args"] = { - "bucket_name": bucket_name, - "object_name": object_name, - "length": length, - "data": "LimitedRandomReader(6 * MB)", - "checksum": "Algorithm.CRC32C", - } - - try: - _client.make_bucket(bucket_name=bucket_name) - - # Upload with CRC32C checksum - triggers multipart upload - reader = LimitedRandomReader(length) - result = _client.put_object( - bucket_name=bucket_name, - object_name=object_name, - data=reader, - length=length, - checksum=Algorithm.CRC32C, - ) - - # Verify upload succeeded and returned valid result - if not result.etag: - raise ValueError("Upload did not return valid ETag") - - # Verify ETag indicates multipart upload (contains dash and part count) - if '-' not in result.etag: - raise ValueError( - f"Expected multipart ETag (with dash), got: {result.etag}") - - # Stat the object to verify it exists and has correct size - st_obj = _client.stat_object( - bucket_name=bucket_name, - object_name=object_name, - ) - - if st_obj.size != length: - raise ValueError( - f"Size mismatch: expected {length}, got {st_obj.size}") - - # Test with SHA256 checksum algorithm - object_name_sha256 = f"{uuid4()}-checksum-sha256" - log_entry["args"]["object_name"] = object_name_sha256 - log_entry["args"]["checksum"] = "Algorithm.SHA256" - - reader = LimitedRandomReader(length) - result = _client.put_object( - bucket_name=bucket_name, - object_name=object_name_sha256, - data=reader, - length=length, - checksum=Algorithm.SHA256, - ) - - if not result.etag: - raise ValueError("Upload with SHA256 did not return valid ETag") - - if '-' not in result.etag: - raise ValueError( - f"Expected multipart ETag for SHA256, got: {result.etag}") - - st_obj = _client.stat_object( - bucket_name=bucket_name, - object_name=object_name_sha256, - ) - - if st_obj.size != length: - raise ValueError( - f"Size mismatch: expected {length}, got {st_obj.size}") - - finally: - try: - _client.remove_object( - bucket_name=bucket_name, object_name=object_name) - except: # pylint: disable=bare-except - pass - if object_name_sha256: - try: - _client.remove_object( - bucket_name=bucket_name, - object_name=object_name_sha256, - ) - except: # pylint: disable=bare-except - pass - try: - _client.remove_bucket(bucket_name=bucket_name) - except: # pylint: disable=bare-except - pass - - def _test_stat_object(log_entry, sse=None, version_check=False): """Test stat_object().""" @@ -1043,7 +954,7 @@ def _test_stat_object(log_entry, sse=None, version_check=False): if version_check: _client.set_bucket_versioning( bucket_name=bucket_name, - config=VersioningConfig(ENABLED), + config=VersioningConfig(VersioningConfig.ENABLED), ) # Put/Upload a streaming object of 1 MiB reader = LimitedRandomReader(length) @@ -1066,8 +977,7 @@ def _test_stat_object(log_entry, sse=None, version_check=False): log_entry["args"]["length"] = length = 11 * MB reader = LimitedRandomReader(length) log_entry["args"]["data"] = "LimitedRandomReader(11 * MB)" - log_entry["args"]["metadata"] = metadata = { - 'X-Amz-Meta-Testing': 'value'} + log_entry["args"]["metadata"] = metadata = {'Testing': 'value'} log_entry["args"]["content_type"] = content_type = ( "application/octet-stream") log_entry["args"]["object_name"] = object_name + "-metadata" @@ -1136,7 +1046,7 @@ def _test_remove_object(log_entry, version_check=False): if version_check: _client.set_bucket_versioning( bucket_name=bucket_name, - config=VersioningConfig(ENABLED), + config=VersioningConfig(VersioningConfig.ENABLED), ) result = _client.put_object( bucket_name=bucket_name, @@ -1185,7 +1095,7 @@ def _test_get_object(log_entry, sse=None, version_check=False): if version_check: _client.set_bucket_versioning( bucket_name=bucket_name, - config=VersioningConfig(ENABLED), + config=VersioningConfig(VersioningConfig.ENABLED), ) result = _client.put_object( bucket_name=bucket_name, @@ -1250,7 +1160,7 @@ def _test_fget_object(log_entry, sse=None, version_check=False): if version_check: _client.set_bucket_versioning( bucket_name=bucket_name, - config=VersioningConfig(ENABLED), + config=VersioningConfig(VersioningConfig.ENABLED), ) result = _client.put_object( bucket_name=bucket_name, @@ -1409,7 +1319,7 @@ def _test_list_objects(log_entry, use_api_v1=False, version_check=False): if version_check: _client.set_bucket_versioning( bucket_name=bucket_name, - config=VersioningConfig(ENABLED), + config=VersioningConfig(VersioningConfig.ENABLED), ) size = 1 * KB result = _client.put_object( @@ -1873,7 +1783,7 @@ def test_presigned_get_object_version( # pylint: disable=invalid-name try: _client.set_bucket_versioning( bucket_name=bucket_name, - config=VersioningConfig(ENABLED), + config=VersioningConfig(VersioningConfig.ENABLED), ) size = 1 * KB result = _client.put_object( @@ -2241,12 +2151,12 @@ def _test_remove_objects(log_entry, version_check=False): _client.make_bucket(bucket_name=bucket_name) object_names = [] - delete_object_list = [] + objects = [] try: if version_check: _client.set_bucket_versioning( bucket_name=bucket_name, - config=VersioningConfig(ENABLED), + config=VersioningConfig(VersioningConfig.ENABLED), ) size = 1 * KB # Upload some new objects to prepare for multi-object delete test. @@ -2262,17 +2172,17 @@ def _test_remove_objects(log_entry, version_check=False): (object_name, result.version_id) if version_check else object_name, ) - log_entry["args"]["delete_object_list"] = object_names + log_entry["args"]["objects"] = object_names for args in object_names: - delete_object_list.append( - DeleteObject(args) if isinstance(args, str) - else DeleteObject(args[0], args[1]) + objects.append( + DeleteRequest.Object(args) if isinstance(args, str) + else DeleteRequest.Object(args[0], args[1]) ) # delete the objects in a single library call. errs = _client.remove_objects( bucket_name=bucket_name, - delete_object_list=delete_object_list, + objects=objects, ) for err in errs: raise ValueError(f"Remove objects err: {err}") @@ -2280,7 +2190,7 @@ def _test_remove_objects(log_entry, version_check=False): # Try to clean everything to keep our server intact errs = _client.remove_objects( bucket_name=bucket_name, - delete_object_list=delete_object_list, + objects=objects, ) for err in errs: raise ValueError(f"Remove objects err: {err}") @@ -2311,7 +2221,7 @@ def test_remove_bucket(log_entry): if _is_aws: log_entry["args"]["location"] = location = "us-east-1" - _client.make_bucket(bucket_name=bucket_name, location=location) + _client.make_bucket(bucket_name=bucket_name, region=location) else: _client.make_bucket(bucket_name=bucket_name) @@ -2391,13 +2301,13 @@ def test_set_get_bucket_versioning(log_entry): # Test all fields of versioning configuration _client.set_bucket_versioning( bucket_name=bucket_name, - config=VersioningConfig(status=ENABLED, + config=VersioningConfig(status=VersioningConfig.ENABLED, exclude_folders=True, excluded_prefixes=excl_prefixes), ) vcfg = _client.get_bucket_versioning(bucket_name=bucket_name) - if vcfg.status != ENABLED: + if vcfg.status != VersioningConfig.ENABLED: raise ValueError(f'(1) unexpected get_bucket_versioning result: ' f'status: {vcfg.status}') if not vcfg.exclude_folders: @@ -2410,11 +2320,11 @@ def test_set_get_bucket_versioning(log_entry): # Disable all fields of versioning configuration _client.set_bucket_versioning( bucket_name=bucket_name, - config=VersioningConfig(status=SUSPENDED), + config=VersioningConfig(status=VersioningConfig.SUSPENDED), ) vcfg = _client.get_bucket_versioning(bucket_name=bucket_name) - if vcfg.status != SUSPENDED: + if vcfg.status != VersioningConfig.SUSPENDED: raise ValueError(f'(2) unexpected get_bucket_versioning result: ' f'status: {vcfg.status}') if vcfg.exclude_folders: @@ -2433,7 +2343,8 @@ def main(): Functional testing of minio python library. """ # pylint: disable=global-statement - global _client, _test_file, _large_file, _is_aws + global _client, _test_file, _large_file, _test_file_crc32c, \ + _large_file_crc32c, _is_aws access_key = os.getenv('ACCESS_KEY') secret_key = os.getenv('SECRET_KEY') @@ -2477,6 +2388,26 @@ def main(): with open(_large_file, 'wb') as file_data: shutil.copyfileobj(LimitedRandomReader(11 * MB), file_data) + hasher = CRC32C() + with open(_test_file, "rb") as file: + hasher.update(file.read()) + _test_file_crc32c = base64_string(hasher.sum()) + hasher.reset() + + composite_hasher = CRC32C() + part_count = 0 + with open(_large_file, "rb") as file: + while True: + data = file.read(5 * MB) + if not data: + break + part_count += 1 + hasher.update(data) + composite_hasher.update(hasher.sum()) + hasher.reset() + _large_file_crc32c = base64_string(composite_hasher.sum()) + _large_file_crc32c = f"{_large_file_crc32c}-{part_count}" + ssec = None if secure: # Create a Customer Key of 32 Bytes for Server Side Encryption (SSE-C) @@ -2502,7 +2433,6 @@ def main(): test_copy_object_unmodified_since: None, test_put_object: {"sse": ssec} if ssec else None, test_negative_put_object_with_path_segment: None, - test_put_object_multipart_with_checksum: None, test_stat_object: {"sse": ssec} if ssec else None, test_stat_object_version: {"sse": ssec} if ssec else None, test_get_object: {"sse": ssec} if ssec else None, diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py index c15edd87..0b5c93a7 100644 --- a/tests/unit/__init__.py +++ b/tests/unit/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/bucket_exist_test.py b/tests/unit/bucket_exist_test.py index cf5f954e..b4e3ea75 100644 --- a/tests/unit/bucket_exist_test.py +++ b/tests/unit/bucket_exist_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,12 +14,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -import unittest.mock as mock -from unittest import TestCase +from unittest import TestCase, mock from minio import Minio -from minio.api import _DEFAULT_USER_AGENT from minio.error import S3Error +from minio.helpers import _DEFAULT_USER_AGENT from .minio_mocks import MockConnection, MockResponse diff --git a/tests/unit/copy_object_test.py b/tests/unit/copy_object_test.py deleted file mode 100644 index ab7ad408..00000000 --- a/tests/unit/copy_object_test.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015, 2016 MinIO, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from unittest import TestCase - -from minio import Minio -from minio.commonconfig import CopySource - - -class CopyObjectTest(TestCase): - def test_valid_copy_source(self): - client = Minio(endpoint='localhost:9000') - with self.assertRaises(ValueError): - client.copy_object( - bucket_name='hello', - object_name='1', - source='/testbucket/object', - ) - - def test_valid_match_etag(self): - self.assertRaises( - ValueError, CopySource, "src-bucket", "src-object", match_etag='') - - def test_not_match_etag(self): - self.assertRaises( - ValueError, - CopySource, "src-bucket", "src-object", not_match_etag='' - ) - - def test_valid_modified_since(self): - self.assertRaises( - ValueError, - CopySource, "src-bucket", "src-object", modified_since='' - ) - - def test_valid_unmodified_since(self): - self.assertRaises( - ValueError, - CopySource, "src-bucket", "src-object", unmodified_since='' - ) diff --git a/tests/unit/credentials_test.py b/tests/unit/credentials_test.py index 87d72d06..54ceee65 100644 --- a/tests/unit/credentials_test.py +++ b/tests/unit/credentials_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) -# 2020 MinIO, Inc. +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/crypto_test.py b/tests/unit/crypto_test.py index 6ec7d0c2..1122a650 100644 --- a/tests/unit/crypto_test.py +++ b/tests/unit/crypto_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/get_bucket_policy_test.py b/tests/unit/get_bucket_policy_test.py index c6ab6a45..f813de58 100644 --- a/tests/unit/get_bucket_policy_test.py +++ b/tests/unit/get_bucket_policy_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,12 +16,11 @@ import json -import unittest.mock as mock -from unittest import TestCase +from unittest import TestCase, mock from minio import Minio -from minio.api import _DEFAULT_USER_AGENT from minio.error import S3Error +from minio.helpers import _DEFAULT_USER_AGENT from tests.unit.minio_mocks import MockConnection, MockResponse diff --git a/tests/unit/get_object_test.py b/tests/unit/get_object_test.py index 7bb09384..4d3038c5 100644 --- a/tests/unit/get_object_test.py +++ b/tests/unit/get_object_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,12 +14,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -import unittest.mock as mock -from unittest import TestCase +from unittest import TestCase, mock from minio import Minio -from minio.api import _DEFAULT_USER_AGENT from minio.error import S3Error +from minio.helpers import _DEFAULT_USER_AGENT from .helpers import generate_error from .minio_mocks import MockConnection, MockResponse diff --git a/tests/unit/helpers.py b/tests/unit/helpers.py index 4c781c84..352e5833 100644 --- a/tests/unit/helpers.py +++ b/tests/unit/helpers.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,24 +14,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -from collections import namedtuple -from unittest import TestCase -from urllib.parse import urlunsplit - -from minio.helpers import BaseURL - - -def generate_error(code, message, request_id, host_id, - resource, bucket_name, object_name): - return ''' - - {0} - {1} - {2} - {3} - {4} - {5} - {6} - - '''.format(code, message, request_id, host_id, - resource, bucket_name, object_name) +def generate_error( + code, message, request_id, host_id, resource, bucket_name, object_name, +): + return f""" + {code} + {message} + {request_id} + {host_id} + {resource} + {bucket_name} + {object_name} +""" diff --git a/tests/unit/helpers_test.py b/tests/unit/helpers_test.py index aa25febc..96e72497 100644 --- a/tests/unit/helpers_test.py +++ b/tests/unit/helpers_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/legelhold_test.py b/tests/unit/legelhold_test.py index 3a4df903..a35367c1 100644 --- a/tests/unit/legelhold_test.py +++ b/tests/unit/legelhold_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,7 +17,7 @@ from unittest import TestCase from minio import xml -from minio.legalhold import LegalHold +from minio.models import LegalHold class LegalHoldTest(TestCase): diff --git a/tests/unit/lifecycleconfig_test.py b/tests/unit/lifecycleconfig_test.py index 832d9303..c32b38e7 100644 --- a/tests/unit/lifecycleconfig_test.py +++ b/tests/unit/lifecycleconfig_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015, 2016 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,25 +17,26 @@ from unittest import TestCase from minio import xml -from minio.commonconfig import ENABLED, Filter -from minio.lifecycleconfig import Expiration, LifecycleConfig, Rule, Transition +from minio.models import Filter, LifecycleConfig, Status class LifecycleConfigTest(TestCase): def test_config(self): config = LifecycleConfig( rules=[ - Rule( - status=ENABLED, + LifecycleConfig.Rule( + status=Status.ENABLED, rule_filter=Filter(prefix="documents/"), rule_id="rule1", - transition=Transition(days=30, storage_class="GLACIER"), + transition=LifecycleConfig.Transition( + days=30, storage_class="GLACIER", + ), ), - Rule( - status=ENABLED, + LifecycleConfig.Rule( + status=Status.ENABLED, rule_filter=Filter(prefix="logs/"), rule_id="rule2", - expiration=Expiration(days=365), + expiration=LifecycleConfig.Expiration(days=365), ), ], ) @@ -43,19 +44,19 @@ def test_config(self): config = LifecycleConfig( rules=[ - Rule( - status=ENABLED, + LifecycleConfig.Rule( + status=Status.ENABLED, rule_filter=Filter(prefix=""), rule_id="rule", - expiration=Expiration(days=365), + expiration=LifecycleConfig.Expiration(days=365), ), ], ) xml.marshal(config) config = xml.unmarshal( - LifecycleConfig, - """ + LifecycleConfig, """ + DeleteAfterBecomingNonCurrent @@ -82,8 +83,8 @@ def test_config(self): xml.marshal(config) config = xml.unmarshal( - LifecycleConfig, - """ + LifecycleConfig, """ + DeleteAfterBecomingNonCurrent diff --git a/tests/unit/list_buckets_test.py b/tests/unit/list_buckets_test.py index 61d60958..575093a3 100644 --- a/tests/unit/list_buckets_test.py +++ b/tests/unit/list_buckets_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,7 +18,7 @@ from unittest import TestCase, mock from minio import Minio -from minio.api import _DEFAULT_USER_AGENT +from minio.helpers import _DEFAULT_USER_AGENT from .minio_mocks import MockConnection, MockResponse diff --git a/tests/unit/list_objects_test.py b/tests/unit/list_objects_test.py index b47de276..3cd0b1db 100644 --- a/tests/unit/list_objects_test.py +++ b/tests/unit/list_objects_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015-2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,11 +15,10 @@ # limitations under the License. import time -import unittest.mock as mock -from unittest import TestCase +from unittest import TestCase, mock from minio import Minio -from minio.api import _DEFAULT_USER_AGENT +from minio.helpers import _DEFAULT_USER_AGENT from .minio_mocks import MockConnection, MockResponse diff --git a/tests/unit/list_objects_v1_test.py b/tests/unit/list_objects_v1_test.py index 04ea3071..98defc57 100644 --- a/tests/unit/list_objects_v1_test.py +++ b/tests/unit/list_objects_v1_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015-2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,11 +15,10 @@ # limitations under the License. import time -import unittest.mock as mock -from unittest import TestCase +from unittest import TestCase, mock from minio import Minio -from minio.api import _DEFAULT_USER_AGENT +from minio.helpers import _DEFAULT_USER_AGENT from .minio_mocks import MockConnection, MockResponse @@ -112,8 +111,8 @@ def test_list_objects_works(self, mock_connection): mock_server.mock_add_request( MockResponse( "GET", - "https://localhost:9000/bucket?delimiter=%2F&encoding-type=url" - "&max-keys=1000&prefix=", + "https://localhost:9000/bucket?delimiter=%2F&" + "encoding-type=url&max-keys=1000&prefix=", {"User-Agent": _DEFAULT_USER_AGENT}, 200, content=b"", diff --git a/tests/unit/make_bucket_test.py b/tests/unit/make_bucket_test.py index af2bc3f0..5e600de1 100644 --- a/tests/unit/make_bucket_test.py +++ b/tests/unit/make_bucket_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,12 +14,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -import unittest.mock as mock -from unittest import TestCase +from unittest import TestCase, mock from minio import Minio -from minio.api import _DEFAULT_USER_AGENT from minio.error import S3Error +from minio.helpers import _DEFAULT_USER_AGENT from .helpers import generate_error from .minio_mocks import MockConnection, MockResponse diff --git a/tests/unit/minio_mocks.py b/tests/unit/minio_mocks.py index 19aa6d51..79d86a9a 100644 --- a/tests/unit/minio_mocks.py +++ b/tests/unit/minio_mocks.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015-2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,7 +17,7 @@ from http import client as httplib -class MockResponse(object): +class MockResponse: def __init__(self, method, url, headers, status_code, response_headers=None, content=None): self.method = method @@ -61,11 +61,10 @@ def getheader(self, key, value=None): return self.headers.get(key, value) if self.headers else value def __getitem__(self, key): - if key == "status": - return self.status + return self.status if key == "status" else None -class MockConnection(object): +class MockConnection: def __init__(self): self.requests = [] diff --git a/tests/unit/minio_test.py b/tests/unit/minio_test.py index 21eb162a..198c1ecf 100644 --- a/tests/unit/minio_test.py +++ b/tests/unit/minio_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015, 2016, 2017 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -19,8 +19,7 @@ from minio import Minio from minio import __version__ as minio_version -from minio.api import _DEFAULT_USER_AGENT -from minio.helpers import BaseURL, check_bucket_name +from minio.helpers import _DEFAULT_USER_AGENT, BaseURL, check_bucket_name class ValidBucketName(TestCase): diff --git a/tests/unit/notificationconfig_test.py b/tests/unit/notificationconfig_test.py index cfc71aba..7aad9742 100644 --- a/tests/unit/notificationconfig_test.py +++ b/tests/unit/notificationconfig_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,19 +17,20 @@ from unittest import TestCase from minio import xml -from minio.notificationconfig import (NotificationConfig, PrefixFilterRule, - QueueConfig) +from minio.models import NotificationConfig class NotificationConfigTest(TestCase): def test_config(self): config = NotificationConfig( queue_config_list=[ - QueueConfig( + NotificationConfig.QueueConfig( queue="QUEUE-ARN-OF-THIS-BUCKET", events=['s3:ObjectCreated:*'], config_id="1", - prefix_filter_rule=PrefixFilterRule("abc"), + prefix_filter_rule=NotificationConfig.PrefixFilterRule( + "abc", + ), ), ], ) diff --git a/tests/unit/objectlockconfig_test.py b/tests/unit/objectlockconfig_test.py index 277e3db9..fa6bc258 100644 --- a/tests/unit/objectlockconfig_test.py +++ b/tests/unit/objectlockconfig_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,18 +17,19 @@ from unittest import TestCase from minio import xml -from minio.commonconfig import COMPLIANCE, GOVERNANCE -from minio.objectlockconfig import DAYS, YEARS, ObjectLockConfig +from minio.models import ObjectLockConfig class ObjectLockConfigTest(TestCase): def test_config(self): - config = ObjectLockConfig(GOVERNANCE, 15, DAYS) + config = ObjectLockConfig( + ObjectLockConfig.GOVERNANCE, 15, ObjectLockConfig.DAYS, + ) xml.marshal(config) config = xml.unmarshal( - ObjectLockConfig, - """ + ObjectLockConfig, """ + Enabled @@ -39,5 +40,8 @@ def test_config(self): """, ) xml.marshal(config) - self.assertEqual(config.mode, COMPLIANCE) - self.assertEqual((config.duration, config.duration_unit), (3, YEARS)) + self.assertEqual(config.mode, ObjectLockConfig.COMPLIANCE) + self.assertEqual( + (config.duration, config.duration_unit), + (3, ObjectLockConfig.YEARS), + ) diff --git a/tests/unit/presigned_get_object_test.py b/tests/unit/presigned_get_object_test.py index f2d604ab..26c32749 100644 --- a/tests/unit/presigned_get_object_test.py +++ b/tests/unit/presigned_get_object_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,9 +14,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -import unittest.mock as mock from datetime import timedelta -from unittest import TestCase +from unittest import TestCase, mock from minio import Minio from minio.helpers import HTTPQueryDict diff --git a/tests/unit/presigned_put_object_test.py b/tests/unit/presigned_put_object_test.py index 8fcfca03..7db77ede 100644 --- a/tests/unit/presigned_put_object_test.py +++ b/tests/unit/presigned_put_object_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015, 2016 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/put_object_test.py b/tests/unit/put_object_test.py index 8e9fb2ae..f2b03f20 100644 --- a/tests/unit/put_object_test.py +++ b/tests/unit/put_object_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/remove_bucket_test.py b/tests/unit/remove_bucket_test.py index 6025e4ad..a01a3191 100644 --- a/tests/unit/remove_bucket_test.py +++ b/tests/unit/remove_bucket_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,11 +14,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -import unittest.mock as mock -from unittest import TestCase +from unittest import TestCase, mock from minio import Minio -from minio.api import _DEFAULT_USER_AGENT +from minio.helpers import _DEFAULT_USER_AGENT from .minio_mocks import MockConnection, MockResponse diff --git a/tests/unit/remove_object_test.py b/tests/unit/remove_object_test.py index 746b4ea1..b8251733 100644 --- a/tests/unit/remove_object_test.py +++ b/tests/unit/remove_object_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the 'License'); # you may not use this file except in compliance with the License. @@ -14,11 +14,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -import unittest.mock as mock -from unittest import TestCase +from unittest import TestCase, mock from minio import Minio -from minio.api import _DEFAULT_USER_AGENT +from minio.helpers import _DEFAULT_USER_AGENT from .minio_mocks import MockConnection, MockResponse diff --git a/tests/unit/remove_objects_test.py b/tests/unit/remove_objects_test.py index 8b32d287..8dbf1228 100644 --- a/tests/unit/remove_objects_test.py +++ b/tests/unit/remove_objects_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2016 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the 'License'); # you may not use this file except in compliance with the License. @@ -15,12 +15,11 @@ # limitations under the License. import itertools -import unittest.mock as mock -from unittest import TestCase +from unittest import TestCase, mock from minio import Minio -from minio.api import _DEFAULT_USER_AGENT -from minio.deleteobjects import DeleteObject +from minio.helpers import _DEFAULT_USER_AGENT +from minio.models import DeleteRequest from .minio_mocks import MockConnection, MockResponse @@ -40,7 +39,9 @@ def test_object_is_list(self, mock_connection): client = Minio(endpoint='localhost:9000') for err in client.remove_objects( bucket_name="hello", - delete_object_list=[DeleteObject("Ab"), DeleteObject("c")], + objects=[ + DeleteRequest.Object("Ab"), DeleteRequest.Object("c"), + ], ): print(err) @@ -58,7 +59,9 @@ def test_object_is_tuple(self, mock_connection): client = Minio(endpoint='localhost:9000') for err in client.remove_objects( bucket_name="hello", - delete_object_list=(DeleteObject("Ab"), DeleteObject("c")), + objects=( + DeleteRequest.Object("Ab"), DeleteRequest.Object("c"), + ), ): print(err) @@ -74,8 +77,11 @@ def test_object_is_iterator(self, mock_connection): content=b'') ) client = Minio(endpoint='localhost:9000') - it = itertools.chain((DeleteObject("Ab"), DeleteObject("c"))) result = client.remove_objects( - bucket_name='hello', delete_object_list=it) + bucket_name='hello', + objects=itertools.chain( + (DeleteRequest.Object("Ab"), DeleteRequest.Object("c")), + ), + ) for err in result: print(err) diff --git a/tests/unit/replicationconfig_test.py b/tests/unit/replicationconfig_test.py index 61718934..a8a0ee6c 100644 --- a/tests/unit/replicationconfig_test.py +++ b/tests/unit/replicationconfig_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,9 +17,7 @@ from unittest import TestCase from minio import xml -from minio.commonconfig import DISABLED, ENABLED, AndOperator, Filter, Tags -from minio.replicationconfig import (DeleteMarkerReplication, Destination, - ReplicationConfig, Rule) +from minio.models import Filter, ReplicationConfig, Status, Tags class ReplicationConfigTest(TestCase): @@ -29,15 +27,17 @@ def test_config(self): config = ReplicationConfig( "REPLACE-WITH-ACTUAL-ROLE", [ - Rule( - destination=Destination( + ReplicationConfig.Rule( + destination=ReplicationConfig.Destination( "REPLACE-WITH-ACTUAL-DESTINATION-BUCKET-ARN", ), - status=ENABLED, - delete_marker_replication=DeleteMarkerReplication( - DISABLED, + status=Status.ENABLED, + delete_marker_replication=( + ReplicationConfig.DeleteMarkerReplication( + Status.DISABLED, + ) ), - rule_filter=Filter(AndOperator("TaxDocs", tags)), + rule_filter=Filter(Filter.And("TaxDocs", tags)), rule_id="rule1", priority=1, ), @@ -47,7 +47,8 @@ def test_config(self): config = xml.unmarshal( ReplicationConfig, - """ + """ + arn:aws:iam::35667example:role/CrossRegionReplicationRoleForS3 rule1 diff --git a/tests/unit/retention_test.py b/tests/unit/retention_test.py index 58487e61..b787f62f 100644 --- a/tests/unit/retention_test.py +++ b/tests/unit/retention_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,14 +18,15 @@ from unittest import TestCase from minio import xml -from minio.commonconfig import COMPLIANCE, GOVERNANCE -from minio.retention import Retention +from minio.models import Retention from minio.time import utcnow class RetentionTest(TestCase): def test_config(self): - config = Retention(GOVERNANCE, utcnow() + timedelta(days=10)) + config = Retention( + Retention.GOVERNANCE, utcnow() + timedelta(days=10), + ) xml.marshal(config) config = xml.unmarshal( @@ -36,7 +37,7 @@ def test_config(self): """, ) xml.marshal(config) - self.assertEqual(config.mode, COMPLIANCE) + self.assertEqual(config.mode, Retention.COMPLIANCE) self.assertEqual( config.retain_until_date, datetime(2020, 10, 2, 0, 0, 0, 0, timezone.utc), diff --git a/tests/unit/sign_test.py b/tests/unit/sign_test.py index bc869fc7..80dcfc14 100644 --- a/tests/unit/sign_test.py +++ b/tests/unit/sign_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the 'License'); # you may not use this file except in compliance with the License. @@ -23,8 +23,9 @@ from urllib3._collections import HTTPHeaderDict from minio import Minio +from minio.checksum import sha256_hash +from minio.compat import queryencode, quote from minio.credentials import Credentials -from minio.helpers import queryencode, quote, sha256_hash from minio.signer import (_get_authorization, _get_canonical_request_hash, _get_scope, _get_signing_key, _get_string_to_sign, presign_v4, sign_v4_s3) @@ -138,7 +139,16 @@ def test_presigned_versioned_id(self): expires=604800, ) - self.assertEqual(urlunsplit(url), 'http://localhost:9000/bucket-name/objectName?versionId=uuid&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=minio%2F20150620%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20150620T010203Z&X-Amz-Expires=604800&X-Amz-SignedHeaders=host&X-Amz-Signature=3ce13e2ca929fafa20581a05730e4e9435f2a5e20ec7c5a082d175692fb0a663') + self.assertEqual( + urlunsplit(url), + 'http://localhost:9000/bucket-name/objectName?versionId=uuid&' + 'X-Amz-Algorithm=AWS4-HMAC-SHA256&' + 'X-Amz-Credential=minio%2F20150620%2Fus-east-1%2Fs3%2Faws4_request&' + 'X-Amz-Date=20150620T010203Z&X-Amz-Expires=604800&' + 'X-Amz-SignedHeaders=host&' + 'X-Amz-Signature=3ce13e2ca929fafa20581a05730e4e9435f2a5e20ec7c5a082' + 'd175692fb0a663', + ) class SignV4Test(TestCase): @@ -170,7 +180,8 @@ def test_signv4(self): headers=headers, credentials=creds, content_sha256=( - "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" + "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b8" + "55" ), date=dt, ) @@ -178,8 +189,8 @@ def test_signv4(self): 'AWS4-HMAC-SHA256 Credential=' 'minio/20150620/us-east-1/s3/aws4_request, ' 'SignedHeaders=host;x-amz-content-sha256;x-amz-date, ' - 'Signature=' - 'a2f4546f647981732bd90dfa5a7599c44dca92f44bea48ecc7565df06032c25b') + 'Signature=a2f4546f647981732bd90dfa5a7599c44dca92f44b' + 'ea48ecc7565df06032c25b') class UnicodeEncodeTest(TestCase): @@ -191,11 +202,11 @@ def test_unicode_queryencode(self): '%2Ftest%2F123%2F%E6%B1%89%E5%AD%97') def test_unicode_quote_u(self): - self.assertEqual(quote(u'/test/123/汉字'), + self.assertEqual(quote('/test/123/汉字'), '/test/123/%E6%B1%89%E5%AD%97') def test_unicode_queryencode_u(self): - self.assertEqual(queryencode(u'/test/123/汉字'), + self.assertEqual(queryencode('/test/123/汉字'), '%2Ftest%2F123%2F%E6%B1%89%E5%AD%97') def test_unicode_quote_b(self): diff --git a/tests/unit/sseconfig_test.py b/tests/unit/sseconfig_test.py index 60121ea7..1aba0c87 100644 --- a/tests/unit/sseconfig_test.py +++ b/tests/unit/sseconfig_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,17 +17,17 @@ from unittest import TestCase from minio import xml -from minio.sseconfig import AWS_KMS, Rule, SSEConfig +from minio.models import SSEConfig class ReplicationConfigTest(TestCase): def test_config(self): - config = SSEConfig(Rule.new_sse_s3_rule()) + config = SSEConfig(SSEConfig.Rule.new_sse_s3_rule()) xml.marshal(config) config = xml.unmarshal( - SSEConfig, - """ + SSEConfig, """ + aws:kms @@ -38,7 +38,7 @@ def test_config(self): """, ) xml.marshal(config) - self.assertEqual(config.rule.sse_algorithm, AWS_KMS) + self.assertEqual(config.rule.sse_algorithm, SSEConfig.Rule.AWS_KMS) self.assertEqual( config.rule.kms_master_key_id, "arn:aws:kms:us-east-1:1234/5678example", diff --git a/tests/unit/stat_object_test.py b/tests/unit/stat_object_test.py index 0dd76c21..6a290c66 100644 --- a/tests/unit/stat_object_test.py +++ b/tests/unit/stat_object_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the 'License'); # you may not use this file except in compliance with the License. @@ -14,11 +14,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -import unittest.mock as mock -from unittest import TestCase +from unittest import TestCase, mock from minio import Minio -from minio.api import _DEFAULT_USER_AGENT +from minio.helpers import _DEFAULT_USER_AGENT from .minio_mocks import MockConnection, MockResponse diff --git a/tests/unit/tagging_test.py b/tests/unit/tagging_test.py index b3ff7729..7b3649ab 100644 --- a/tests/unit/tagging_test.py +++ b/tests/unit/tagging_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,8 +17,7 @@ from unittest import TestCase from minio import xml -from minio.commonconfig import Tags -from minio.tagging import Tagging +from minio.models import Tagging, Tags class TaggingTest(TestCase): diff --git a/tests/unit/time_test.py b/tests/unit/time_test.py index d392087b..08ca9024 100644 --- a/tests/unit/time_test.py +++ b/tests/unit/time_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2024 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/trace_test.py b/tests/unit/trace_test.py index c67617a0..c44fbf34 100644 --- a/tests/unit/trace_test.py +++ b/tests/unit/trace_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2015, 2016 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/versioningconfig_test.py b/tests/unit/versioningconfig_test.py index 6ca7bc5a..2d4cfd49 100644 --- a/tests/unit/versioningconfig_test.py +++ b/tests/unit/versioningconfig_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# MinIO Python Library for Amazon S3 Compatible Cloud Storage, -# (C) 2020 MinIO, Inc. +# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) +# [2014] - [2025] MinIO, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,39 +17,38 @@ from unittest import TestCase from minio import xml -from minio.commonconfig import DISABLED, ENABLED -from minio.versioningconfig import OFF, SUSPENDED, VersioningConfig +from minio.models import VersioningConfig class VersioningConfigTest(TestCase): def test_config(self): - config = VersioningConfig(ENABLED) + config = VersioningConfig(VersioningConfig.ENABLED) xml.marshal(config) config = xml.unmarshal( - VersioningConfig, - """ + VersioningConfig, """ + """, ) xml.marshal(config) - self.assertEqual(config.status_string, OFF) + self.assertEqual(config.status_string, VersioningConfig.OFF) config = xml.unmarshal( - VersioningConfig, - """ + VersioningConfig, """ + Enabled """, ) xml.marshal(config) - self.assertEqual(config.status_string, ENABLED) + self.assertEqual(config.status_string, VersioningConfig.ENABLED) config = xml.unmarshal( - VersioningConfig, - """ + VersioningConfig, """ + Suspended Disabled """, ) xml.marshal(config) - self.assertEqual(config.status_string, SUSPENDED) - self.assertEqual(config.mfa_delete, DISABLED) + self.assertEqual(config.status_string, VersioningConfig.SUSPENDED) + self.assertEqual(config.mfa_delete, VersioningConfig.DISABLED)