diff --git a/.gitignore b/.gitignore index b7faf40..3a12def 100644 --- a/.gitignore +++ b/.gitignore @@ -1,207 +1,222 @@ -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[codz] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -wheels/ -share/python-wheels/ -*.egg-info/ -.installed.cfg -*.egg -MANIFEST - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.nox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*.cover -*.py.cover -.hypothesis/ -.pytest_cache/ -cover/ - -# Translations -*.mo -*.pot - -# Django stuff: -*.log -local_settings.py -db.sqlite3 -db.sqlite3-journal - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ - -# PyBuilder -.pybuilder/ -target/ - -# Jupyter Notebook -.ipynb_checkpoints - -# IPython -profile_default/ -ipython_config.py - -# pyenv -# For a library or package, you might want to ignore these files since the code is -# intended to run in multiple environments; otherwise, check them in: -# .python-version - -# pipenv -# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. -# However, in case of collaboration, if having platform-specific dependencies or dependencies -# having no cross-platform support, pipenv may install dependencies that don't work, or not -# install all needed dependencies. -#Pipfile.lock - -# UV -# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control. -# This is especially recommended for binary packages to ensure reproducibility, and is more -# commonly ignored for libraries. -#uv.lock - -# poetry -# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. -# This is especially recommended for binary packages to ensure reproducibility, and is more -# commonly ignored for libraries. -# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control -#poetry.lock -#poetry.toml - -# pdm -# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. -# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python. -# https://pdm-project.org/en/latest/usage/project/#working-with-version-control -#pdm.lock -#pdm.toml -.pdm-python -.pdm-build/ - -# pixi -# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control. -#pixi.lock -# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one -# in the .venv directory. It is recommended not to include this directory in version control. -.pixi - -# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm -__pypackages__/ - -# Celery stuff -celerybeat-schedule -celerybeat.pid - -# SageMath parsed files -*.sage.py - -# Environments -.env -.envrc -.venv -env/ -venv/ -ENV/ -env.bak/ -venv.bak/ - -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - -# mkdocs documentation -/site - -# mypy -.mypy_cache/ -.dmypy.json -dmypy.json - -# Pyre type checker -.pyre/ - -# pytype static type analyzer -.pytype/ - -# Cython debug symbols -cython_debug/ - -# PyCharm -# JetBrains specific template is maintained in a separate JetBrains.gitignore that can -# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore -# and can be added to the global gitignore or merged into this file. For a more nuclear -# option (not recommended) you can uncomment the following to ignore the entire idea folder. -#.idea/ - -# Abstra -# Abstra is an AI-powered process automation framework. -# Ignore directories containing user credentials, local state, and settings. -# Learn more at https://abstra.io/docs -.abstra/ - -# Visual Studio Code -# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore -# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore -# and can be added to the global gitignore or merged into this file. However, if you prefer, -# you could uncomment the following to ignore the entire vscode folder -# .vscode/ - -# Ruff stuff: -.ruff_cache/ - -# PyPI configuration file -.pypirc - -# Cursor -# Cursor is an AI-powered code editor. `.cursorignore` specifies files/directories to -# exclude from AI features like autocomplete and code analysis. Recommended for sensitive data -# refer to https://docs.cursor.com/context/ignore-files -.cursorignore -.cursorindexingignore - -# Marimo -marimo/_static/ -marimo/_lsp/ -__marimo__/ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[codz] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py.cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# UV +# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +#uv.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock +#poetry.toml + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python. +# https://pdm-project.org/en/latest/usage/project/#working-with-version-control +#pdm.lock +#pdm.toml +.pdm-python +.pdm-build/ + +# pixi +# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control. +#pixi.lock +# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one +# in the .venv directory. It is recommended not to include this directory in version control. +.pixi + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.envrc +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +# Abstra +# Abstra is an AI-powered process automation framework. +# Ignore directories containing user credentials, local state, and settings. +# Learn more at https://abstra.io/docs +.abstra/ + +# Visual Studio Code +# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore +# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore +# and can be added to the global gitignore or merged into this file. However, if you prefer, +# you could uncomment the following to ignore the entire vscode folder +# .vscode/ + +# Ruff stuff: +.ruff_cache/ + +# PyPI configuration file +.pypirc + +# Cursor +# Cursor is an AI-powered code editor. `.cursorignore` specifies files/directories to +# exclude from AI features like autocomplete and code analysis. Recommended for sensitive data +# refer to https://docs.cursor.com/context/ignore-files +.cursorignore +.cursorindexingignore + +# Marimo +marimo/_static/ +marimo/_lsp/ +__marimo__/ + +.venv/ +*.py[cod] + +# IDE +.idea/ +.vscode/ +*.swp +*.swo + +# OS +.DS_Store +Thumbs.db + +clouds.yaml \ No newline at end of file diff --git a/LICENSE b/LICENSE index 261eeb9..29f81d8 100644 --- a/LICENSE +++ b/LICENSE @@ -1,201 +1,201 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md index 70d7132..0a97650 100644 --- a/README.md +++ b/README.md @@ -1 +1,27 @@ -# python-t-cloud \ No newline at end of file +# python-t-cloud + +Python SDK for T-Cloud. + +> **Status:** Early development. Not ready for production use. + +## Quick Start + +```bash +uv sync # install dependencies +uv run pytest # run tests +``` + +## Development + +Requires [uv](https://docs.astral.sh/uv/) and Python 3.11+. + +```bash +uv sync --group dev # install with dev dependencies +uv run ruff check src/ # lint +uv run mypy src/ # type check +uv run pytest -v # test core +``` + +## License + +Apache-2.0 \ No newline at end of file diff --git a/conftest.py b/conftest.py new file mode 100644 index 0000000..945df5e --- /dev/null +++ b/conftest.py @@ -0,0 +1,18 @@ +# conftest.py at project root — registers the 'acceptance' marker +# and separates acceptance tests from unit tests. + +import pytest + + +def pytest_collection_modifyitems(config, items): + """Auto-mark tests under acceptance/ directory.""" + for item in items: + if "acceptance" in str(item.fspath): + item.add_marker(pytest.mark.acceptance) + + +def pytest_configure(config): + config.addinivalue_line( + "markers", + "acceptance: marks tests that hit real T Cloud Public API (deselect with '-m \"not acceptance\"')", + ) diff --git a/docs/api/core.rst b/docs/api/core.rst new file mode 100644 index 0000000..0624870 --- /dev/null +++ b/docs/api/core.rst @@ -0,0 +1,26 @@ +Core +==== + +Authentication +-------------- + +.. automodule:: sdk.core.auth + :members: + :show-inheritance: + :exclude-members: model_config, model_fields, model_computed_fields + +Exceptions +---------- + +.. automodule:: sdk.core.exceptions + :members: + :undoc-members: + :show-inheritance: + +Logging +------- + +.. automodule:: sdk.core.log + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/api/index.rst b/docs/api/index.rst new file mode 100644 index 0000000..10ad122 --- /dev/null +++ b/docs/api/index.rst @@ -0,0 +1,7 @@ +API Reference +============= + +.. toctree:: + :maxdepth: 2 + + core diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..bbd7004 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,51 @@ +"""Sphinx configuration for the SDK documentation.""" + +import os +import sys + +# Add src/ to path so autodoc can find the package +sys.path.insert(0, os.path.abspath("../src")) + +# -- Project information ----------------------------------------------------- + +project = "SDK" +copyright = "2026, T Cloud Public" +author = "T Cloud Public" +release = "0.1.0" + +# -- General configuration --------------------------------------------------- + +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.napoleon", # Google-style docstrings + "sphinx_autodoc_typehints", # type hints in docs + "sphinx.ext.viewcode", # [source] links + "sphinx.ext.intersphinx", # link to Python stdlib docs +] + +# Napoleon settings (Google-style) +napoleon_google_docstring = True +napoleon_numpy_docstring = False +napoleon_include_init_with_doc = True +napoleon_include_private_with_doc = False +napoleon_use_param = True +napoleon_use_rtype = True + +# Autodoc settings +autodoc_member_order = "bysource" +autodoc_typehints = "description" +autodoc_class_signature = "separated" +autodoc_pydantic_model_show_field_summary = False + +# Intersphinx — link to Python docs +intersphinx_mapping = { + "python": ("https://docs.python.org/3", None), + "pydantic": ("https://docs.pydantic.dev/latest/", None), +} + +# -- Options for HTML output ------------------------------------------------- + +html_theme = "sphinx_rtd_theme" +html_theme_options = { + "navigation_depth": 3, +} diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..6821194 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,9 @@ +SDK Documentation +================= + +.. toctree:: + :maxdepth: 2 + :caption: Contents + + quickstart + api/index diff --git a/docs/new_arch.md b/docs/new_arch.md new file mode 100644 index 0000000..45acc2c --- /dev/null +++ b/docs/new_arch.md @@ -0,0 +1,616 @@ +# New Python SDK Architecture for OpenTelekomCloud + +**Status:** Proposal for review + +--- + +## 1. Problems with Current python-otcextensions + +The current Python SDK is built on top of openstacksdk and inherits its architectural decisions, causing systemic issues: + +- **Heavy dependencies.** openstacksdk, keystoneauth1, os-service-types and the entire OpenStack ecosystem pull in dozens of transitive dependencies. Updating or debugging any of them affects the entire SDK. +- **Auth model incompatibility.** AK/SK authentication (AWS Signature V4) does not fit well into keystoneauth — SigV4 requires signing an already-formed HTTP request, while keystoneauth provides headers before request formation. Each new service with AK/SK requires individual workarounds. +- **Implicit contracts.** Request and response models are spread across proxy classes and resources with no clear boundary between input parameters and API responses. + +--- + +## 2. Go SDK Architecture Analysis (gophertelekomcloud) + +### 2.1. Overall Structure + +The Go SDK has a minimalistic structure with **3 dependencies** (testify, golang.org/x/crypto, yaml.v2) and a clean layered organization: + +``` +gophertelekomcloud/ +├── golangsdk (root package) +│ ├── auth_options.go # AuthOptions — token/password auth +│ ├── auth_aksk_options.go # AKSKAuthOptions — AK/SK auth +│ ├── auth_option_provider.go # AuthOptionsProvider — unified interface +│ ├── provider_client.go # ProviderClient — HTTP client with auth +│ ├── service_client.go # ServiceClient — base service client +│ ├── endpoint_search.go # EndpointOpts — endpoint discovery +│ ├── results.go # Result — base response type +│ ├── params.go # Parameter serialization utilities +│ └── signer_helper.go # AK/SK signing (AWS SigV4) +│ +├── internal/ +│ ├── build/ # Request body, query strings, headers +│ └── extract/ # JSON response deserialization +│ +├── openstack/ +│ ├── client.go # Factories: NewDNSV2(), NewComputeV2(), etc. +│ ├── common/ # Shared utilities (tags, metadata, pointerto) +│ │ +│ ├── dns/v2/ # ← Typical service +│ │ ├── clusters/ +│ │ │ ├── common.py # Cluster, Spec, Status (shared models) +│ │ │ ├── create.py # CreateOpts + create() +│ │ │ ├── get.py # get() +│ │ │ ├── list.py # ListOpts + list_clusters() +│ │ │ ├── delete.py # DeleteOpts + delete() +│ │ │ └── update.py # UpdateOpts + update() +│ │ ├── recordsets/ +│ │ └── ... +│ │ +│ ├── vpc/v1/ # Each service is isolated +│ ├── cce/v3/ +│ ├── elb/v3/ +│ └── ... (59+ services) +│ +└── pagination/ # Pagination (linked, marker, offset, single) +``` + +### 2.2. Key Architectural Patterns + +#### Pattern 1: Unified Auth Interface + +A minimal `AuthOptionsProvider` interface with a single method `GetIdentityEndpoint()`. Two auth types — `AuthOptions` (token/password) and `AKSKAuthOptions` (AK/SK) — both implement this interface. Dispatch in `Authenticate()` determines the auth type via type assertion and calls the appropriate strategy: + +``` +AuthOptionsProvider (interface) + ├── AuthOptions → v3auth() or v3authWithAgency() + └── AKSKAuthOptions → v3AKSKAuth() or authWithAgencyByAKSK() +``` + +AK/SK signing is applied transparently at the `ProviderClient.Request()` level — if `AKSKAuthOptions.AccessKey` is set, the request is signed via `Sign()` before sending. + +#### Pattern 2: Two-Level Client System + +- **ProviderClient** — a single HTTP client that holds auth state (token, project ID, domain ID), reauth logic, retry/backoff. All requests go through its `Request()`. +- **ServiceClient** — a lightweight wrapper that adds endpoint and convenience methods (`Get`, `Post`, `Put`, `Patch`, `Delete`). Created via factories in `client.go` (e.g. `NewDNSV2(provider, endpointOpts)`). + +#### Pattern 3: Each Resource Is an Isolated Package + +Each resource (zones, recordsets, publicips, ...) is a separate package with three files: + +| File | Contents | +|------|----------| +| `requests.go` | CRUD functions (free functions, not methods). Input parameter types (`CreateOpts`, `ListOpts`) with builder interfaces (`CreateOptsBuilder`). Validation via struct tags. | +| `results.go` | Response models (`Zone`, `CreateResult`, `GetResult`). Inherit from `golangsdk.Result` for lazy extraction via `Extract()`. | +| `urls.go` | Pure URL construction functions using `ServiceClient.ServiceURL()`. | + +Functions take `*ServiceClient` as their first argument — no magic proxies or resource classes. + +#### Pattern 4: Minimal External Dependencies + +The Go SDK deliberately avoids OpenStack-specific libraries. Everything, including AK/SK signing, is implemented inside the repository. This provides full control and eliminates breaking changes from upstream. + +--- + +## 3. Target Architecture for New Python SDK + +### 3.1. Package Structure + +``` +otc-sdk-python/ +├── pyproject.toml # Minimal deps: httpx, pydantic +│ +├── src/sdk/ +│ ├── __init__.py +│ │ +│ ├── core/ # ← Analogue of root golangsdk package +│ │ ├── auth.py # AuthOptions, AKSKAuthOptions, AuthProvider (Protocol) +│ │ ├── signer.py # AK/SK signing (SigV4) — own implementation +│ │ ├── provider.py # ProviderClient — HTTP client + auth +│ │ ├── service_client.py # ServiceClient — base client for services +│ │ ├── endpoint.py # EndpointOpts, endpoint discovery +│ │ ├── result.py # Base result types +│ │ ├── exceptions.py # Exception hierarchy +│ │ └── pagination.py # Pagination strategies (linked, marker, offset) +│ │ +│ ├── services/ # ← Analogue of openstack/ +│ │ ├── __init__.py +│ │ │ +│ │ ├── dns/ # Each service is a subpackage +│ │ │ ├── __init__.py +│ │ │ ├── v2/ +│ │ │ │ ├── __init__.py +│ │ │ │ ├── client.py # DnsV2Client with factory methods +│ │ │ │ ├── clusters/ +│ │ │ │ │ ├── __init__.py +│ │ │ │ │ ├── common.py # Cluster, Spec, Status (shared models) +│ │ │ │ │ ├──create.py # CreateOpts + create() +│ │ │ │ │ ├── get.py # get() +│ │ │ │ │ ├── list.py # ListOpts + list_clusters() +│ │ │ │ │ ├── delete.py # DeleteOpts + delete() +│ │ │ │ │ └── update.py # UpdateOpts + update() +│ │ │ │ ├── recordsets/ +│ │ │ │ └── ... +│ │ │ └── ... +│ │ │ +│ │ ├── vpc/ +│ │ ├── cce/ +│ │ ├── elb/ +│ │ └── ... +│ │ +│ └── common/ # Shared utilities +│ ├── tags.py +│ └── metadata.py +│ +├── tests/ +│ ├── unit/ +│ │ ├── core/ +│ │ └── services/ +│ └── acceptance/ +│ └── services/ +│ +└── docs/ +``` + +### 3.2. Core Abstractions + +#### AuthConfig + +A single config model that accepts all possible auth parameters. The provider auto-detects which auth strategy to use based on what fields are provided: + +- `access_key` + `secret_key` present → **AK/SK** (AWS Signature V4) +- `password` present → **Token** (Keystone V3 password auth) +- `token_id` present → **Token** (Keystone V3 token auth) + +```python +from pydantic import BaseModel, model_validator + +class AuthConfig(BaseModel): + """Single auth config. Provider auto-selects strategy based on provided fields.""" + + identity_endpoint: str + + # Token/Password auth fields + username: str | None = None + user_id: str | None = None + password: str | None = None + token_id: str | None = None + domain_id: str | None = None + domain_name: str | None = None + tenant_id: str | None = None + tenant_name: str | None = None + allow_reauth: bool = False + + # AK/SK auth fields + access_key: str | None = None + secret_key: str | None = None + security_token: str | None = None + + # Common fields + project_id: str | None = None + project_name: str | None = None + region: str | None = None + + # Agency delegation + agency_name: str | None = None + agency_domain_name: str | None = None + delegated_project: str | None = None + + @property + def auth_mode(self) -> str: + """Auto-detect auth strategy from provided fields.""" + if self.access_key and self.secret_key: + return "aksk" + if self.password: + return "password" + if self.token_id: + return "token" + raise ValueError("Cannot determine auth mode: provide access_key+secret_key, password, or token_id") + + @model_validator(mode="after") + def _validate_fields(self): + # Ensure minimum required fields per strategy + self.auth_mode # triggers ValueError if nothing matches + return self +``` + +The user never picks a strategy class — they just pass whatever credentials they have. + +#### ProviderClient + +```python +import httpx + +class ProviderClient: + """Central HTTP client. Manages auth, retry, reauth.""" + + def __init__(self, auth: AuthConfig): + self.auth = auth + self.identity_endpoint: str = auth.identity_endpoint + self.token_id: str | None = None + self.project_id: str | None = None + self.domain_id: str | None = None + self._http: httpx.Client = httpx.Client() + self._reauth_func: Callable | None = None + + def authenticate(self) -> None: + """Auto-select and execute auth strategy.""" + match self.auth.auth_mode: + case "aksk": + self._aksk_auth() + case "password": + self._token_auth() + case "token": + self._token_reuse() + + def request(self, method: str, url: str, **kwargs) -> httpx.Response: + """Send request with auth, retry, reauth.""" + # 1. Add auth headers: + # - aksk mode → sign request with AK/SK (SigV4) + # - token mode → add X-Auth-Token header + # 2. Send request + # 3. Handle 401 → reauth → retry + # 4. Handle 429 → backoff → retry + # 5. Handle errors → typed exceptions + ... +``` + +#### ServiceClient + +```python +class ServiceClient: + """Base client for a specific service.""" + + def __init__(self, provider: ProviderClient, endpoint: str, + resource_base: str | None = None): + self.provider = provider + self.endpoint = endpoint + self.resource_base = resource_base or endpoint + + def service_url(self, *parts: str) -> str: + return self.resource_base + "/".join(parts) + + def get(self, url: str, **kwargs) -> httpx.Response: + return self.provider.request("GET", url, **kwargs) + + def post(self, url: str, **kwargs) -> httpx.Response: + return self.provider.request("POST", url, **kwargs) + + # put, patch, delete similarly +``` + +### 3.3. Service Implementation Example (DNS Zones) + +#### models.py + +```python +from pydantic import BaseModel + +class CreateZoneOpts(BaseModel): + name: str + email: str | None = None + description: str | None = None + ttl: int | None = None + zone_type: str | None = None + +class Zone(BaseModel): + id: str + name: str + email: str | None = None + description: str | None = None + ttl: int | None = None + status: str | None = None + zone_type: str | None = None + record_num: int | None = None + pool_id: str | None = None + project_id: str | None = None + created_at: str | None = None + updated_at: str | None = None + +class ListZonesOpts(BaseModel): + limit: int | None = None + marker: str | None = None + name: str | None = None + status: str | None = None + type: str | None = None +``` + +#### urls.py + +```python +from otc_sdk.core.service_client import ServiceClient + +ROOT = "zones" + +def base_url(client: ServiceClient) -> str: + return client.service_url(ROOT) + +def zone_url(client: ServiceClient, zone_id: str) -> str: + return client.service_url(ROOT, zone_id) +``` + +#### requests.py + +```python +from typing import Iterator +from otc_sdk.core.service_client import ServiceClient +from .models import CreateZoneOpts, Zone, ListZonesOpts +from . import urls + +def create(client: ServiceClient, opts: CreateZoneOpts) -> Zone: + resp = client.post( + urls.base_url(client), + json=opts.model_dump(exclude_none=True), + ) + return Zone.model_validate(resp.json()) + +def get(client: ServiceClient, zone_id: str) -> Zone: + resp = client.get(urls.zone_url(client, zone_id)) + return Zone.model_validate(resp.json()) + +def list_zones(client: ServiceClient, opts: ListZonesOpts | None = None) -> Iterator[Zone]: + """Iterator that automatically walks through all pages.""" + url = urls.base_url(client) + params = opts.model_dump(exclude_none=True) if opts else {} + while url: + resp = client.get(url, params=params) + data = resp.json() + for z in data["zones"]: + yield Zone.model_validate(z) + url = data.get("links", {}).get("next") + params = {} # params already embedded in next URL + +def delete(client: ServiceClient, zone_id: str) -> None: + client.delete(urls.zone_url(client, zone_id)) +``` + +> **Proposal: Generator-based pagination.** In Go, pagination uses `pagination.Pager` with callbacks. In Python, the natural approach is an iterator with `yield` that automatically fetches subsequent pages. The user should never have to think about markers: +> +> ```python +> for zone in zones.list_zones(client): +> print(zone.name) +> ``` + +### 3.4. Client Factory + +```python +# otc_sdk/client.py — main entry point + +class OTCClient: + """Main entry point. Creates ProviderClient and service factories.""" + + def __init__(self, **kwargs): + """Accept auth params directly. Provider auto-detects strategy. + + Usage: + OTCClient(identity_endpoint="...", username="...", password="...") + OTCClient(identity_endpoint="...", access_key="...", secret_key="...") + """ + auth = AuthConfig(**kwargs) + self.provider = ProviderClient(auth) + self.provider.authenticate() + + def dns_v2(self, region: str | None = None) -> ServiceClient: + endpoint = self.provider.find_endpoint("dns", region=region) + return ServiceClient(self.provider, endpoint, + resource_base=endpoint + "v2/") + + def vpc_v1(self, region: str | None = None) -> ServiceClient: + ... +``` + +> **Proposal: Lazy imports for services.** Eagerly importing all 50+ services in `__init__.py` would slow down `import otc_sdk`. Instead, use lazy properties that import a service only on first access: +> +> ```python +> class OTCClient: +> @property +> def dns(self): +> from otc_sdk.services.dns.v2 import client as dns_client +> return dns_client.DnsV2Client(self.provider) +> ``` +> +> This ensures fast application startup — only services that are actually used get imported. No entry point or plugin magic needed. + +### 3.5. Usage Example + +```python +from otc_sdk import OTCClient +from otc_sdk.services.dns.v2 import zones + +# Token authentication — just pass credentials, provider figures out the rest +client = OTCClient( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + username="user", + password="pass", + domain_name="domain", + tenant_name="eu-de", +) + +# Or AK/SK — same constructor, different fields +client = OTCClient( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + access_key="AK...", + secret_key="SK...", + project_id="...", + region="eu-de", +) + +# API works identically regardless of auth type +dns = client.dns_v2() +zone = zones.create(dns, zones.CreateZoneOpts(name="example.com.", email="admin@example.com")) + +for z in zones.list_zones(dns): + print(z.name) +``` + +--- + +## 4. Go → Python Mapping + +| Go SDK | Python SDK | Notes | +|--------|-----------|-------| +| `AuthOptionsProvider` (interface) | `AuthConfig` (single pydantic model) | Auto-detects strategy from fields | +| `AuthOptions` + `AKSKAuthOptions` (separate structs) | `AuthConfig.auth_mode` property | User never picks strategy manually | +| `ProviderClient` | `ProviderClient` | httpx instead of net/http | +| `ServiceClient` | `ServiceClient` | Thin wrapper | +| `Sign()` | `sign_request()` | Own SigV4 implementation | +| `openstack/client.go` (factories) | `OTCClient` | Factory methods | +| `openstack/dns/v2/zones/` package | `services/dns/v2/zones/` package | 1:1 mapping | +| `requests.go` (free functions) | `requests.py` (free functions) | Not class methods | +| `results.go` (struct + Extract) | `models.py` (pydantic BaseModel) | model_validate instead of Extract | +| `urls.go` | `urls.py` | Pure functions | +| `CreateOptsBuilder` (interface) | pydantic `BaseModel` | Validation via pydantic | +| struct tags (`json:`, `q:`, `required:`) | pydantic Field + model_dump | exclude_none for optionals | +| `golangsdk.Result.ExtractInto()` | `pydantic.BaseModel.model_validate()` | Automatic deserialization | +| `pagination.Pager` | Iterator/generator | Pythonic approach | +| `go.mod` (3 dependencies) | `pyproject.toml` (httpx + pydantic) | Minimal dependencies | + +--- + +## 5. Dependencies + +| Dependency | Purpose | Notes | +|------------|---------|-------| +| `httpx` | HTTP client | Sync + async out of the box. MVP is sync-only, architecture is async-ready | +| `pydantic` | Model validation | Replaces Go struct tags | + +Everything else (SigV4 signing, retry, pagination) is **implemented internally**. No openstacksdk, keystoneauth, or os-service-types. + +### 5.1. Dev Tooling: uv + +We use [uv](https://docs.astral.sh/uv/) as the project manager. uv is a Rust-based drop-in replacement for pip, virtualenv, and poetry — 10–50x faster, single binary, no Python required to bootstrap. + +`pyproject.toml` remains the standard project config file. uv simply reads it and handles everything else: + +```toml +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "otc-sdk" +version = "0.1.0" +requires-python = ">=3.11" +dependencies = [ + "httpx>=0.27", + "pydantic>=2.0", +] + +[project.optional-dependencies] +dev = [ + "pytest>=8.0", + "pytest-httpx>=0.30", + "ruff>=0.5", + "mypy>=1.10", +] +``` + +Daily workflow: + +```bash +# Setup (replaces python -m venv + pip install -e .) +uv sync # creates .venv + installs everything from pyproject.toml +uv sync --group dev # includes dev dependencies + +# Running +uv run pytest # runs in the correct venv automatically +uv run mypy src/ # type checking +uv run ruff check src/ # linting + +# Dependency management +uv add httpx # adds to pyproject.toml + installs +uv remove some-package # removes from pyproject.toml + uninstalls + +# Python version management (optional) +uv python install 3.12 # installs Python 3.12 if not present +uv python pin 3.12 # pins project to 3.12 +``` + +uv generates a `uv.lock` file (replaces `poetry.lock` / `pip-compile` output) — deterministic, cross-platform lock file that should be committed to the repository. + +Why uv over poetry/pip: +- **Speed.** Cold install of the project takes ~1s instead of 15–30s. +- **Standards-based.** Uses standard `pyproject.toml`, no vendor lock-in. The project works with plain `pip install -e .` for anyone who doesn't want uv. +- **Single tool.** Replaces pip + virtualenv + pip-tools + pyenv. Simplifies CI and onboarding. + +--- + +## 6. Principles + +1. **Zero service coupling.** Each service is an isolated subpackage. Depends only on `core/`. +2. **Explicit contracts.** Typed pydantic models for every request and response. No `dict` or `**kwargs` in the public API. +3. **Own auth implementation.** Single `AuthConfig` model — provider auto-detects strategy (AK/SK, password, token) from the fields provided. SigV4 signing implemented inside the SDK. The user never needs to know which auth class to use. +4. **Free functions for operations.** `zones.create(client, opts)` instead of `client.zones.create(opts)`. Follows the Go pattern — easier to test and generate. +5. **Minimal dependencies.** Only httpx + pydantic. Full control over the codebase. +6. **Type hinting & IDE support.** 100% type hint coverage thanks to pydantic and explicit function signatures. + +> **Proposal: Functional style justification.** The functional approach may look unusual to Python developers accustomed to boto3 or azure-sdk (`client.zones.create(opts)`). However, free functions are stateless — `create`, `list` are pure and take a client as a dependency. This simplifies mocking in tests, eliminates circular imports, and dramatically simplifies code generation. We keep the functional approach. + +> **Proposal: Type hinting as a selling point.** In the current SDK (dynamic proxies from openstacksdk), autocomplete in VS Code and PyCharm barely works. In the new SDK — pydantic models with typed fields + explicit function signatures mean IDEs will suggest `CreateZoneOpts` fields and `Zone` response field types. This is a significant developer experience improvement. + +--- + +## 7. Code Generation Benefits (gen-sdk-tooling) + +This architecture is well suited for automatic SDK generation from RST documentation: + +- **Uniform structure** for every service → Jinja2 templates for `models.py`, `requests.py`, `urls.py`. +- **Pydantic models** are generated directly from request/response specs found in RST. +- **Free functions** instead of classes → simpler templates, less inheritance. +- **No OpenStack dependency** → no need to maintain compatibility with external code. + +--- + +## 8. Implementation Plan + +### Phase 1: Core (2–3 weeks) + +- `core/auth.py` — AuthConfig with auto-detection (AK/SK, password, token) +- `core/signer.py` — AK/SK signing (ported from Go) +- `core/provider.py` — ProviderClient with auth, retry, reauth +- `core/service_client.py` — ServiceClient +- `core/exceptions.py` — exception hierarchy +- `core/pagination.py` — pagination strategies + +### Phase 2: Pilot Service (1–2 weeks) + +- Implement DNS v2 manually as a reference +- Write acceptance tests against real OTC +- Debug auth flow for both token and AK/SK + +### Phase 3: Generation (parallel with gen-sdk-tooling) + +- Jinja2 templates for models.py, requests.py, urls.py +- Generate SDK for 2–3 services, compare with reference +- Iterate on generation quality + +### Phase 4: Scaling + +- Generate remaining 50+ services +- CI/CD pipeline for automatic regeneration + +--- + +## 9. Decisions on Open Questions + +> **Proposal:** Close the open questions with the following decisions so this section reads as an action plan rather than uncertainty. + +1. **Async support.** + *Decision:* MVP (Phases 1–2) implements sync API only (`httpx.Client`). The architecture is async-ready: httpx has an identical API for sync and async, and free functions allow adding `async def create(...)` + `httpx.AsyncClient` later with minimal generator changes (template swap). + +2. **Package naming.** + *Decision:* `otc-sdk` (PyPI) / `import otc_sdk`. Short and clear. `otcextensions` is a bad legacy name. + +3. **Service discovery.** + *Decision:* Lazy properties in `OTCClient` (see proposal in section 3.4). Only services that are actually used get imported. No entry points or plugin magic. + +4. **Backward compatibility.** + *Decision:* Full replacement (major version). Maintaining compatibility with the openstacksdk architecture is impossible and counterproductive — it is the root of the problems. The old and new SDKs can be installed side by side (`pip install otc-sdk` alongside `pip install python-otcextensions`). + +5. **Paginators.** + *Decision:* Python iterators with `yield` (see proposal in section 3.3). `for zone in zones.list_zones(client)` — automatic traversal of all pages. \ No newline at end of file diff --git a/docs/quickstart.rst b/docs/quickstart.rst new file mode 100644 index 0000000..ec56e9e --- /dev/null +++ b/docs/quickstart.rst @@ -0,0 +1,4 @@ +Quick Start +=========== + +Coming soon. diff --git a/git b/git new file mode 100644 index 0000000..e69de29 diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..3e7b24b --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,53 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "sdk" +version = "0.1.0" +description = "Python SDK for T Cloud Public" +readme = "README.md" +license = "Apache-2.0" +requires-python = ">=3.13" +dependencies = [ + "httpx>=0.28,<1", + "pydantic>=2.12,<3", + "pyyaml>=6.0.2,<7", +] + +[dependency-groups] +dev = [ + "pytest>=9.0.2,<10", + "pytest-mock>=3.14", + "pytest-httpx>=0.36", + "ruff>=0.15.2", + "mypy>=1.19", +] + +docs = [ + "sphinx>=8.0", + "sphinx-rtd-theme>=3.0", + "sphinx-autodoc-typehints>=2.0", +] + +[tool.hatch.build.targets.wheel] +packages = ["src/sdk"] + +[tool.ruff] +line-length = 88 +target-version = "py313" + +[tool.ruff.lint] +select = ["E", "F", "I", "UP", "B", "SIM"] + +[tool.mypy] +python_version = "3.13" +strict = true + +[tool.pytest.ini_options] +testpaths = ["tests"] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%H:%M:%S" +addopts = "--import-mode=importlib --ignore=tests/acceptance" \ No newline at end of file diff --git a/src/sdk/__init__.py b/src/sdk/__init__.py new file mode 100644 index 0000000..7412908 --- /dev/null +++ b/src/sdk/__init__.py @@ -0,0 +1,5 @@ +"""Python SDK for T Cloud Public.""" + +from ._version import __version__ + +__all__ = ["__version__"] \ No newline at end of file diff --git a/src/sdk/_version.py b/src/sdk/_version.py new file mode 100644 index 0000000..3f5c4a7 --- /dev/null +++ b/src/sdk/_version.py @@ -0,0 +1 @@ +__version__ = "0.1.0" diff --git a/src/sdk/common/__init__.py b/src/sdk/common/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/sdk/core/__init__.py b/src/sdk/core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/sdk/core/auth.py b/src/sdk/core/auth.py new file mode 100644 index 0000000..377fe57 --- /dev/null +++ b/src/sdk/core/auth.py @@ -0,0 +1,225 @@ +"""Authentication configuration. + +Combines Go SDK's ``AuthOptions`` and ``AKSKAuthOptions`` into a single +``AuthConfig`` model. The provider auto-detects the auth strategy +based on which fields are populated: + +- ``access_key`` + ``secret_key`` → AK/SK (AWS Signature V4) +- ``password`` → Token (Keystone V3 password) +- ``token_id`` → Token (Keystone V3 token reuse) + +The user never picks a strategy class — they just pass credentials. +Validation is delegated to per-strategy validator functions. + +Example:: + + from sdk.core.auth import AuthConfig + + # Password — detected automatically + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + username="user", + password="secret", + domain_name="my_domain", + ) + cfg.auth_mode # AuthMode.PASSWORD + + # AK/SK — same class, different fields + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + access_key="AK...", + secret_key="SK...", + ) + cfg.auth_mode # AuthMode.AKSK +""" + +from __future__ import annotations + +from collections.abc import Callable +from enum import StrEnum + +from pydantic import (BaseModel, SecretStr, model_validator, + computed_field, ConfigDict) + +from sdk.core.exceptions import MissingCredentialsError + +from typing import Any + + +class AuthMode(StrEnum): + """Authentication strategy identifier. + + Used by ``ProviderClient`` to select the correct auth flow. + """ + + AKSK = "aksk" + PASSWORD = "password" + TOKEN = "token" + + +# --- Strategy validators --- +def _validate_aksk(cfg: AuthConfig) -> None: + """Validate AK/SK auth fields. + + AK/SK only requires ``access_key`` and ``secret_key``, + which are already guaranteed present by mode detection. + """ + if cfg.access_key is None or cfg.secret_key is None: + raise MissingCredentialsError( + "AK/SK auth requires BOTH access_key and secret_key" + ) + +def _validate_password(cfg: AuthConfig) -> None: + """Validate password auth fields. + + - Exactly one of ``username`` or ``user_id`` must be provided. + - If ``username`` is provided, exactly one of ``domain_id`` or + ``domain_name`` is required. + + Raises: + MissingCredentialsError: If required fields are missing + or incompatible fields are present. + """ + if cfg.password is None: + raise MissingCredentialsError( + "Password is required for password authentication" + ) + + # Corresponds to Go: ErrUsernameOrUserID + if not cfg.username and not cfg.user_id: + raise MissingCredentialsError( + "Password auth requires username or user_id" + ) + + # Corresponds to Go: ErrUsernameOrUserID (second check) + if cfg.username and cfg.user_id: + raise MissingCredentialsError( + "Provide either username or user_id, not both" + ) + + if cfg.username: + # Corresponds to Go: ErrDomainIDOrDomainName + if not cfg.domain_id and not cfg.domain_name: + raise MissingCredentialsError( + "Username auth requires domain_id or domain_name" + ) + if cfg.domain_id and cfg.domain_name: + raise MissingCredentialsError( + "Provide either domain_id or domain_name, not both" + ) + +def _validate_token(cfg: AuthConfig) -> None: + """Validate token reuse fields. + + Token auth should not be mixed with password-based fields. + Mirrors Go SDK's ``ErrUsernameWithToken`` / ``ErrUserIDWithToken``. + + Raises: + MissingCredentialsError: If incompatible fields are present. + """ + if not cfg.token_id: + raise MissingCredentialsError("token_id is required") + + +_STRATEGY_VALIDATORS: dict[AuthMode, Callable[[AuthConfig], None]] = { + AuthMode.AKSK: _validate_aksk, + AuthMode.PASSWORD: _validate_password, + AuthMode.TOKEN: _validate_token, +} + + +class AuthConfig(BaseModel): + """Single auth config for all authentication strategies. + + This model provides a unified interface for all authentication strategies. + The SDK automatically detects the correct authentication mode (AK/SK, + Password, or Token) based on the provided fields. The model is frozen + after instantiation to guarantee consistency between the credentials and + the detected ``auth_mode``. + + Attributes: + identity_endpoint: IAM endpoint URL (e.g., ``OS_AUTH_URL``). + username: Keystone username (used for V3 password auth). + user_id: Keystone user ID (alternative to ``username``). + password: Keystone password (stored securely as ``SecretStr``). + passcode: MFA TOTP verification code. + token_id: Existing Keystone token for direct authentication. + access_key: AK/SK access key (AWS Signature V4). + secret_key: AK/SK secret key (stored securely as ``SecretStr``). + domain_id: Domain ID (required when using ``username``). + domain_name: Domain name (alternative to ``domain_id``). + project_id: Project ID for scoping (common across all strategies). + project_name: Project name for scoping. + region: Target region for endpoint discovery (e.g., ``eu-de``). + tenant_id: Project ID for scoping (alias: ``project_id``). + tenant_name: Project name for scoping. + security_token: Temporary security token (STS) used with temporary AK/SK. + allow_reauth: Whether the SDK should cache and automatically refresh tokens. + agency_name: Agency name for cross-account delegated access. + agency_domain_name: Domain name that owns the target agency. + delegated_project: Specific project to access via the agency delegation. + auth_mode: The strictly detected authentication strategy. Computed + automatically based on the provided credentials. + """ + model_config = ConfigDict(frozen=True) + identity_endpoint: str + + # --- Credentials --- + username: str | None = None + user_id: str | None = None + password: SecretStr | None = None + passcode: str | None = None + token_id: str | None = None + access_key: str | None = None + secret_key: SecretStr | None = None + + # --- Context / Scoping --- + domain_id: str | None = None + domain_name: str | None = None + project_id: str | None = None + project_name: str | None = None + region: str | None = None + tenant_id: str | None = None + tenant_name: str | None = None + + # --- Advanced --- + security_token: str | None = None + allow_reauth: bool = False + agency_name: str | None = None + agency_domain_name: str | None = None + delegated_project: str | None = None + + @model_validator(mode="before") + @classmethod + def _sync_tenant_and_project(cls, data: dict[str, Any]) -> dict[str, Any]: + if isinstance(data, dict): + if "tenant_id" in data and "project_id" not in data: + data["project_id"] = data["tenant_id"] + if "tenant_name" in data and "project_name" not in data: + data["project_name"] = data["tenant_name"] + return data + + @computed_field + @property + def auth_mode(self) -> AuthMode: + """Determines auth strategy based on populated fields. + + Strictly enforces that only one credential type is provided. + """ + if self.token_id is not None: + return AuthMode.TOKEN + if self.access_key is not None or self.secret_key is not None: + return AuthMode.AKSK + if self.password is not None or self.passcode is not None: + return AuthMode.PASSWORD + + raise MissingCredentialsError( + "Incomplete credentials: provide AK/SK, password, or token_id" + ) + + @model_validator(mode="after") + def _validate_credentials(self) -> AuthConfig: + """Detect mode and delegate to the appropriate strategy validator.""" + mode = self.auth_mode + _STRATEGY_VALIDATORS[mode](self) + return self diff --git a/src/sdk/core/config.py b/src/sdk/core/config.py new file mode 100644 index 0000000..cee7399 --- /dev/null +++ b/src/sdk/core/config.py @@ -0,0 +1,123 @@ +"""Configuration loader for clouds.yaml.""" + +from __future__ import annotations + +import logging +import os +from pathlib import Path +from typing import Any + +import yaml + +from sdk.core.auth import AuthConfig, AuthMode + +logger = logging.getLogger(__name__) + + +def load_from_yaml(cloud_name: str = "otc", file_path: str | Path | None = None) -> AuthConfig: + """Load authentication configuration from a clouds.yaml file. + + Follows the standard OpenStack search path order if no explicit + path is provided: + 1. Current directory (./clouds.yaml) + 2. User config (~/.config/openstack/clouds.yaml) + 3. System config (/etc/openstack/clouds.yaml) + """ + path_to_load = _find_clouds_yaml(file_path) + logger.debug("Loading cloud config from: %s", path_to_load) + + with open(path_to_load, "r", encoding="utf-8") as f: + try: + data = yaml.safe_load(f) + except yaml.YAMLError as e: + raise ValueError(f"Failed to parse YAML file at {path_to_load}: {e}") from e + + clouds = data.get("clouds", {}) + if cloud_name not in clouds: + raise ValueError(f"Cloud '{cloud_name}' not found in {path_to_load}") + + cloud_config = clouds[cloud_name] + auth_data = cloud_config.get("auth", {}) + + logger.debug("Loaded cloud config: '%s'", cloud_name) + return _map_to_auth_config(cloud_config, auth_data) + + +def _find_clouds_yaml(explicit_path: str | Path | None) -> Path: + """Locate the clouds.yaml file in standard locations.""" + if explicit_path: + p = Path(explicit_path) + if p.exists(): + return p + raise FileNotFoundError(f"Explicit config file not found: {explicit_path}") + + search_paths = [ + Path.cwd() / "clouds.yaml", + Path.home() / ".config" / "openstack" / "clouds.yaml", + Path("/etc/openstack/clouds.yaml"), + ] + + for p in search_paths: + if p.is_file(): + return p + + raise FileNotFoundError( + "Could not find 'clouds.yaml' in standard locations " + "(./, ~/.config/openstack/, /etc/openstack/)." + ) + + +def _resolve_env(value: Any) -> Any: + """Resolve ${ENV_VAR} or $ENV_VAR syntax in strings.""" + if isinstance(value, str) and "$" in value: + # Встроенный метод Python: сам найдет и подставит переменные окружения + return os.path.expandvars(value) + return value + + +def _map_to_auth_config(cloud_config: dict[str, Any], auth_data: dict[str, Any]) -> AuthConfig: + """Map OpenStack clouds.yaml fields to our AuthConfig Pydantic model.""" + + auth = {k: _resolve_env(v) for k, v in auth_data.items()} + cloud = {k: _resolve_env(v) for k, v in cloud_config.items()} + + if auth.get("password"): + mode = AuthMode.PASSWORD + elif any(k in auth for k in ("access_key", "ak", "secret_key", "sk")): + mode = AuthMode.AKSK + else: + mode = AuthMode.TOKEN + + logger.debug("Auth mode resolved: %s", mode) + + raw_url = auth.get("auth_url") or auth.get("identity_endpoint") or "" + auth_url = raw_url.rstrip("/") + if auth_url and not auth_url.endswith("/v3"): + auth_url += "/v3" + + config_kwargs: dict[str, Any] = { + "auth_mode": mode, + "identity_endpoint": auth_url, + "region": cloud.get("region_name", ""), + } + + config_kwargs["project_name"] = auth.get("project_name") or auth.get("tenant_name") + config_kwargs["project_id"] = auth.get("project_id") or auth.get("tenant_id") + + config_kwargs["domain_name"] = ( + auth.get("domain_name") + or auth.get("user_domain_name") + or auth.get("project_domain_name") + ) + config_kwargs["domain_id"] = auth.get("domain_id") or auth.get("user_domain_id") + + if mode == AuthMode.AKSK: + config_kwargs["access_key"] = auth.get("access_key") or auth.get("ak") + config_kwargs["secret_key"] = auth.get("secret_key") or auth.get("sk") + else: + config_kwargs["username"] = auth.get("username") + config_kwargs["password"] = auth.get("password") + + clean_kwargs = {k: v for k, v in config_kwargs.items() if v} + + return AuthConfig(**clean_kwargs) \ No newline at end of file diff --git a/src/sdk/core/endpoint.py b/src/sdk/core/endpoint.py new file mode 100644 index 0000000..eb6f142 --- /dev/null +++ b/src/sdk/core/endpoint.py @@ -0,0 +1,169 @@ +"""Endpoint discovery from the IAM service catalog. + +Extracts the endpoint lookup logic into a reusable module. + +The ``EndpointOpts`` dataclass specifies search criteria, and +``find_endpoint()`` searches a service catalog for a matching URL. +``build_endpoint_locator()`` returns a closure that captures the +catalog and default region, ready to be stored on ``ProviderClient``. + +Example:: + + from sdk.core.endpoint import EndpointOpts, find_endpoint + + catalog = [...] # from IAM auth response + opts = EndpointOpts(service_type="compute", region="eu-de") + url = find_endpoint(catalog, opts) + # → "https://ecs.eu-de.otc.t-systems.com/v2.1/" +""" + +from __future__ import annotations + +from collections.abc import Callable +from enum import StrEnum + +from pydantic import BaseModel, Field, AliasChoices + +from sdk.core.exceptions import EndpointNotFoundError, ServiceNotFoundError + + +class Availability(StrEnum): + """Endpoint visibility level. + """ + + PUBLIC = "public" + INTERNAL = "internal" + ADMIN = "admin" + + +class EndpointOpts(BaseModel): + """Search criteria for locating a service endpoint. + At minimum, ``service_type`` must be provided. + + Attributes: + service_type: Catalog service type (e.g. ``compute``, ``dns``). + name: Optional service name filter (e.g. ``nova``). + region: Region to match. Empty means accept any region. + availability: Endpoint interface visibility. + """ + model_config = {"frozen": True} + + service_type: str + name: str = "" + region: str = "" + availability: Availability = Availability.PUBLIC + + +class CatalogEndpoint(BaseModel): + interface: str + region_id: str = Field(default="", + validation_alias=AliasChoices("region_id", "region") + ) + url: str + + +class CatalogEntry(BaseModel): + type: str + name: str = "" + endpoints: list[CatalogEndpoint] = Field(default_factory=list) + + +def find_endpoint( + catalog: list[CatalogEntry], + opts: EndpointOpts, +) -> str: + """Find a single endpoint URL from the service catalog. + + Searches catalog entries for a match on ``service_type``, + optional ``name``, ``region``, and ``availability``. + Falls back to wildcard (``*``) region entries if no exact + match is found. + + Args: + catalog: Service catalog entries from IAM response. + opts: Search criteria. + + Returns: + Endpoint URL string (always ends with ``/``). + + Raises: + ServiceNotFoundError: No catalog entry matches the type. + EndpointNotFoundError: Entry found but no endpoint matches + region/availability. + """ + matched: list[str] = [] + wildcard: list[str] = [] + service_found = False + + for entry in catalog: + if entry.type != opts.service_type: + continue + if opts.name and entry.name != opts.name: + continue + + service_found = True + + for ep in entry.endpoints: + if ep.interface != opts.availability: + continue + + if not ep.url: + continue + + url = _normalize_url(ep.url) + + if not opts.region or ep.region_id == opts.region: + matched.append(url) + elif ep.region_id == "*": + wildcard.append(url) + + if not matched: + matched = wildcard + + if matched: + return matched[0] + + if not service_found: + raise ServiceNotFoundError(service=opts.service_type) + + raise EndpointNotFoundError( + service=opts.service_type, region=opts.region, + ) + + +EndpointLocator = Callable[[EndpointOpts], str] +"""Callable type that resolves an ``EndpointOpts`` → URL string.""" + + +def build_endpoint_locator( + catalog: list[CatalogEntry], + default_region: str = "", +) -> EndpointLocator: + """Build an endpoint locator closure from a service catalog. + + Returns a callable that accepts ``EndpointOpts`` (or keyword + shorthand) and resolves the endpoint URL. If the opts have + no region set, ``default_region`` is used. + + This is the Python equivalent of the Go SDK pattern where + ``ProviderClient.EndpointLocator`` is a ``func(EndpointOpts) string``. + + Args: + catalog: Service catalog from IAM. + default_region: Fallback region from auth config. + + Returns: + Callable ``(EndpointOpts) → str``. + """ + + def locator(opts: EndpointOpts) -> str: + if not opts.region and default_region: + opts = opts.model_copy(update={"region": default_region}) + return find_endpoint(catalog, opts) + + return locator + + +def _normalize_url(url: str) -> str: + """Ensure URL ends with ``/``.""" + return url if url.endswith("/") else url + "/" diff --git a/src/sdk/core/exceptions/__init__.py b/src/sdk/core/exceptions/__init__.py new file mode 100644 index 0000000..91aa6e6 --- /dev/null +++ b/src/sdk/core/exceptions/__init__.py @@ -0,0 +1,99 @@ +"""Exception hierarchy for the SDK. + +Hierarchy:: + + SDKError + ├── MissingInputError + ├── InvalidInputError + ├── AuthError + │ ├── MissingCredentialsError + │ ├── ReauthError + │ └── PostReauthError + ├── EndpointError + │ ├── ServiceNotFoundError + │ └── EndpointNotFoundError + ├── HttpError + │ ├── BadRequestError (400) + │ ├── UnauthorizedError (401) + │ ├── ForbiddenError (403) + │ ├── NotFoundError (404) + │ ├── MethodNotAllowedError (405) + │ ├── RequestTimeoutError (408) + │ ├── ConflictError (409) + │ ├── TooManyRequestsError (429) + │ ├── InternalServerError (500) + │ └── ServiceUnavailableError (503) + ├── ResourceNotFoundError + ├── MultipleResourcesFoundError + └── SDKTimeoutError +""" + +from .auth import ( + AuthError, + MissingCredentialsError, + PostReauthError, + ReauthError, +) +from .base import ( + InvalidInputError, + MissingInputError, + SDKError, +) +from .endpoint import ( + EndpointError, + EndpointNotFoundError, + ServiceNotFoundError, +) +from .response import ( + BadRequestError, + ConflictError, + ForbiddenError, + HttpError, + InternalServerError, + MethodNotAllowedError, + NotFoundError, + RequestTimeoutError, + ServiceUnavailableError, + TooManyRequestsError, + UnauthorizedError, + raise_for_status, +) +from .lookup import ( + MultipleResourcesFoundError, + ResourceNotFoundError, +) +from .timeout import SDKTimeoutError + +__all__ = [ + # base + "SDKError", + "MissingInputError", + "InvalidInputError", + # auth + "AuthError", + "MissingCredentialsError", + "ReauthError", + "PostReauthError", + # endpoint + "EndpointError", + "ServiceNotFoundError", + "EndpointNotFoundError", + # response + "HttpError", + "BadRequestError", + "UnauthorizedError", + "ForbiddenError", + "NotFoundError", + "MethodNotAllowedError", + "RequestTimeoutError", + "ConflictError", + "TooManyRequestsError", + "InternalServerError", + "ServiceUnavailableError", + "raise_for_status", + # lookup + "ResourceNotFoundError", + "MultipleResourcesFoundError", + # timeout + "SDKTimeoutError", +] diff --git a/src/sdk/core/exceptions/auth.py b/src/sdk/core/exceptions/auth.py new file mode 100644 index 0000000..4e7e1bd --- /dev/null +++ b/src/sdk/core/exceptions/auth.py @@ -0,0 +1,58 @@ +"""Authentication-related exceptions. +""" + +from __future__ import annotations + +from .base import SDKError + +# Corresponds to Go SDK's ``ErrUnableToReauthenticate`` and +# ``ErrErrorAfterReauthentication``. + +class AuthError(SDKError): + """Authentication-related error.""" + + +class MissingCredentialsError(AuthError): + """Required credentials were not provided. + + Raised when ``AuthConfig`` cannot determine an auth strategy + from the provided fields. + """ + + +class ReauthError(AuthError): + """Re-authentication failed. + + Args: + original: The underlying exception that caused the failure. + """ + # Corresponds to Go SDK's ``ErrUnableToReauthenticate``. + def __init__(self, original: Exception | None = None) -> None: + self.original = original + msg = ( + f"Unable to re-authenticate: {original}" + if original + else "Unable to re-authenticate" + ) + super().__init__(msg) + + +class PostReauthError(AuthError): + """Request failed after successful re-authentication. + + Raised when the token was refreshed successfully, but the + subsequent request still failed (usually an HTTP error). + + Args: + original: The underlying exception from the failed request. + """ + # Corresponds to Go SDK's ``ErrErrorAfterReauthentication``. + def __init__(self, original: Exception | None = None) -> None: + self.original = original + msg = ( + f"Successfully re-authenticated, but got error " + f"executing request: {original}" + if original + else "Successfully re-authenticated, but got error executing request" + ) + super().__init__(msg) diff --git a/src/sdk/core/exceptions/base.py b/src/sdk/core/exceptions/base.py new file mode 100644 index 0000000..785685b --- /dev/null +++ b/src/sdk/core/exceptions/base.py @@ -0,0 +1,45 @@ +"""Base exception types for the SDK. +""" + +# Corresponds to Go SDK's ``BaseError``, ``ErrMissingInput``, +# and ``ErrInvalidInput``. + +from __future__ import annotations + +from typing import Any + + +class SDKError(Exception): + """Base exception for all SDK errors. + All SDK exceptions inherit from this class, so + ``except SDKError`` catches every SDK-related error. + """ + # Corresponds to Go SDK's ``BaseError``. + +class MissingInputError(SDKError): + """Required input argument was not provided. + + Args: + argument: Name of the missing argument. + """ + # Corresponds to Go SDK's ``ErrMissingInput``. + def __init__(self, argument: str) -> None: + self.argument = argument + super().__init__(f"Missing input for argument [{argument}]") + + +class InvalidInputError(SDKError): + """Invalid value provided for an input argument. + + Args: + argument: Name of the argument. + value: The invalid value that was provided. + """ + # Corresponds to Go SDK's ``ErrInvalidInput``. + + def __init__(self, argument: str, value: Any) -> None: + self.argument = argument + self.value = value + super().__init__( + f"Invalid input provided for argument [{argument}]: [{value!r}]" + ) diff --git a/src/sdk/core/exceptions/endpoint.py b/src/sdk/core/exceptions/endpoint.py new file mode 100644 index 0000000..33573e0 --- /dev/null +++ b/src/sdk/core/exceptions/endpoint.py @@ -0,0 +1,52 @@ +"""Endpoint discovery exceptions. +""" + +# Corresponds to Go SDK's ``ErrServiceNotFound`` and +# ``ErrEndpointNotFound``. + +from __future__ import annotations + +from .base import SDKError + + +class EndpointError(SDKError): + """Endpoint discovery error.""" + + +class ServiceNotFoundError(EndpointError): + """No matching service found in the service catalog. + + Args: + service: Name of the service that was not found. + """ + # Corresponds to Go SDK's ``ErrServiceNotFound``. + + def __init__(self, service: str = "") -> None: + self.service = service + msg = ( + f"No suitable service could be found in the " + f"service catalog: {service}" + if service + else "No suitable service could be found in the service catalog" + ) + super().__init__(msg) + + +class EndpointNotFoundError(EndpointError): + """No matching endpoint found for the service. + + Args: + service: Name of the service. + region: Region where the endpoint was expected. + """ + # Corresponds to Go SDK's ``ErrEndpointNotFound``. + + def __init__(self, service: str = "", region: str = "") -> None: + self.service = service + self.region = region + parts = ["No suitable endpoint could be found in the service catalog"] + if service: + parts.append(f"for service '{service}'") + if region: + parts.append(f"in region '{region}'") + super().__init__(" ".join(parts)) diff --git a/src/sdk/core/exceptions/lookup.py b/src/sdk/core/exceptions/lookup.py new file mode 100644 index 0000000..21b9562 --- /dev/null +++ b/src/sdk/core/exceptions/lookup.py @@ -0,0 +1,46 @@ +"""Resource lookup exceptions. +""" + +# Corresponds to Go SDK's ``ErrResourceNotFound`` and +# ``ErrMultipleResourcesFound``. + +from __future__ import annotations + +from .base import SDKError + + +class ResourceNotFoundError(SDKError): + """Resource not found during lookup by name. + + Raised when a find-by-name operation returns no results. + + Args: + resource_type: Type of resource (e.g. ``"VPC"``, ``"Subnet"``). + name: Name that was searched for. + """ + # Corresponds to Go SDK's ``ErrResourceNotFound``. + def __init__(self, resource_type: str, name: str) -> None: + self.resource_type = resource_type + self.name = name + super().__init__(f"Unable to find {resource_type} with name {name}") + + +class MultipleResourcesFoundError(SDKError): + """Multiple resources found during lookup by name. + + Raised when a find-by-name operation returns more than + one result and a single match was expected. + + Args: + resource_type: Type of resource (e.g. ``"VPC"``, ``"Subnet"``). + name: Name that was searched for. + count: Number of matching resources found. + """ + # Corresponds to Go SDK's ``ErrMultipleResourcesFound``. + def __init__(self, resource_type: str, name: str, count: int) -> None: + self.resource_type = resource_type + self.name = name + self.count = count + super().__init__( + f"Found {count} {resource_type}s matching {name}" + ) diff --git a/src/sdk/core/exceptions/response.py b/src/sdk/core/exceptions/response.py new file mode 100644 index 0000000..5bb00a5 --- /dev/null +++ b/src/sdk/core/exceptions/response.py @@ -0,0 +1,237 @@ +"""HTTP response exceptions. +""" + +# Corresponds to Go SDK's ``ErrUnexpectedResponseCode`` and all +# ``ErrDefaultNNN`` types. + +from __future__ import annotations + +from typing import Any + +from .base import SDKError + + +class HttpError(SDKError): + """HTTP response error. + + Stores full request context for debuggability. + + Args: + method: HTTP method (GET, POST, etc.). + url: Request URL. + body: Response body text. + expected: List of expected HTTP status codes. + headers: Response headers. + status_code: HTTP status code. Overrides the class-level + default when constructing a generic ``HttpError``. + + Attributes: + status_code: HTTP status code. Set as a class variable + on subclasses (e.g. ``BadRequestError.status_code == 400``). + request_id: Value of the ``X-Request-Id`` response header, + extracted automatically for OTC request tracing. + """ + # Corresponds to Go SDK's ``ErrUnexpectedResponseCode``. + status_code: int = 0 + + def __init__( + self, + *, + method: str, + url: str, + body: str, + expected: list[int] | None = None, + headers: dict[str, Any] | None = None, + status_code: int | None = None, + ) -> None: + self.method = method + self.url = url + self.body = body + self.expected = expected or [] + self.headers = headers or {} + self.request_id: str = self.headers.get("x-request-id", "") + if status_code is not None: + self.status_code = status_code + super().__init__(self._format_message()) + + def _format_message(self) -> str: + msg = ( + f"Expected HTTP response code {self.expected} when accessing " + f"[{self.method} {self.url}], but got {self.status_code} instead" + ) + if self.body: + msg += f"\n{self.body}" + return msg + + +# --- Status-code specific errors --- + + +class BadRequestError(HttpError): + """400 Bad Request. + """ + # Corresponds to Go SDK's ``ErrDefault400``. + status_code = 400 + + def _format_message(self) -> str: + return ( + f"Bad request with: [{self.method} {self.url}], " + f"error message: {self.body}" + ) + + +class UnauthorizedError(HttpError): + """401 Unauthorized. + """ + # Corresponds to Go SDK's ``ErrDefault401``. + status_code = 401 + + def _format_message(self) -> str: + return f"Authentication failed, error message: {self.body}" + + +class ForbiddenError(HttpError): + """403 Forbidden. + """ + # Corresponds to Go SDK's ``ErrDefault403``. + + status_code = 403 + + def _format_message(self) -> str: + return f"Action forbidden, error message: {self.body}" + + +class NotFoundError(HttpError): + """404 Not Found. + """ + # Corresponds to Go SDK's ``ErrDefault404``. + status_code = 404 + + def _format_message(self) -> str: + return ( + f"Resource not found: [{self.method} {self.url}], " + f"error message: {self.body}" + ) + + +class MethodNotAllowedError(HttpError): + """405 Method Not Allowed. + """ + # Corresponds to Go SDK's ``ErrDefault405``. + status_code = 405 + + def _format_message(self) -> str: + return "Method not allowed" + + +class RequestTimeoutError(HttpError): + """408 Request Timeout. + """ + # Corresponds to Go SDK's ``ErrDefault408``. + status_code = 408 + + def _format_message(self) -> str: + return "The server timed out waiting for the request" + + +class ConflictError(HttpError): + """409 Conflict. + """ + # Corresponds to Go SDK's ``ErrDefault409``. + status_code = 409 + + +class TooManyRequestsError(HttpError): + """429 Too Many Requests. + Checks ``Retry-After`` response header when present. + """ + # Corresponds to Go SDK's ``ErrDefault429``. + status_code = 429 + + def _format_message(self) -> str: + retry_after = self.headers.get("Retry-After", "") + msg = ( + "Too many requests have been sent in a given amount of time." + ) + if retry_after: + msg += f" Retry after {retry_after}s." + else: + msg += " Pause requests, wait up to one minute, and try again." + return msg + + +class InternalServerError(HttpError): + """500 Internal Server Error. + """ + # Corresponds to Go SDK's ``ErrDefault500``. + status_code = 500 + + def _format_message(self) -> str: + return "Internal Server Error" + + +class ServiceUnavailableError(HttpError): + """503 Service Unavailable. + """ + # Corresponds to Go SDK's ``ErrDefault503``. + status_code = 503 + + def _format_message(self) -> str: + return ( + "The service is currently unable to handle the request due to " + "a temporary overloading or maintenance. This is a temporary " + "condition. Try again later." + ) + + +# --- Mapping from status code to exception class --- + +HTTP_ERROR_MAP: dict[int, type[HttpError]] = { + 400: BadRequestError, + 401: UnauthorizedError, + 403: ForbiddenError, + 404: NotFoundError, + 405: MethodNotAllowedError, + 408: RequestTimeoutError, + 409: ConflictError, + 429: TooManyRequestsError, + 500: InternalServerError, + 503: ServiceUnavailableError, +} + + +def raise_for_status( + status_code: int, + *, + method: str, + url: str, + body: str, + expected: list[int] | None = None, + headers: dict[str, Any] | None = None, +) -> None: + """Raise the appropriate ``HttpError`` for a non-2xx status code. + + Looks up the status code in ``HTTP_ERROR_MAP`` and raises the + matching exception. Falls back to a generic ``HttpError`` for + unmapped codes. + + Args: + status_code: HTTP response status code. + method: HTTP method (GET, POST, etc.). + url: Request URL. + body: Response body text. + expected: List of expected HTTP status codes. + headers: Response headers. + + Raises: + HttpError: Always raised (specific subclass when possible). + """ + exc_class = HTTP_ERROR_MAP.get(status_code, HttpError) + raise exc_class( + method=method, + url=url, + body=body, + expected=expected, + headers=headers, + status_code=status_code, + ) diff --git a/src/sdk/core/exceptions/timeout.py b/src/sdk/core/exceptions/timeout.py new file mode 100644 index 0000000..477a853 --- /dev/null +++ b/src/sdk/core/exceptions/timeout.py @@ -0,0 +1,18 @@ +"""Client-side timeout exception. + +Corresponds to Go SDK's ``ErrTimeOut``. +""" + +from __future__ import annotations + +from .base import SDKError + + +class SDKTimeoutError(SDKError): + """Client-side operation timeout. + + Distinct from ``RequestTimeoutError`` (HTTP 408) which is a + server response. This error is raised when the SDK's own + timeout is exceeded, e.g. waiting for a resource to become active. + """ + # Corresponds to Go SDK's ``ErrTimeOut``. diff --git a/src/sdk/core/opts.py b/src/sdk/core/opts.py new file mode 100644 index 0000000..017b9a3 --- /dev/null +++ b/src/sdk/core/opts.py @@ -0,0 +1,61 @@ +"""Base classes for request options.""" + +from __future__ import annotations + +from typing import Any, ClassVar + +from pydantic import BaseModel + + +class BaseOpts(BaseModel): + """Base class for request body options. + + Subclasses set ``_wrapper_key`` to wrap the payload in a named object + (e.g. ``{"vpc": {...}}``). Without it the body is returned flat. + + Field-omission rules (``exclude_none=True, exclude_defaults=True``): + + * ``name: str = ""`` -> omitted when caller does not set it. + * ``name: str | None = None`` -> ``None`` is omitted, but any other + value (including ``""``) is sent, allowing callers to clear + server-side fields. + """ + + _wrapper_key: ClassVar[str | None] = None + + def to_request_body(self) -> dict[str, Any]: + body = self.model_dump( + exclude_none=True, + exclude_defaults=True, + by_alias=True, + ) + if self._wrapper_key is not None: + return {self._wrapper_key: body} + return body + + +class BaseQueryOpts(BaseModel): + """Base class for query string options. + + Output is a flat ``dict[str, str]`` ready for an HTTP client's + ``params=`` argument. ``None`` and empty strings are dropped; numeric + zero and ``False`` are preserved. Nested models or collections raise + :class:`TypeError`. + """ + + def to_query_params(self) -> dict[str, str]: + raw = self.model_dump(exclude_none=True, by_alias=True) + params: dict[str, str] = {} + for key, value in raw.items(): + if value == "": + continue + if isinstance(value, (dict, list)): + raise TypeError( + f"Query param {key!r} has non-scalar value {value!r}; " + "query strings only support scalar values." + ) + if isinstance(value, bool): + params[key] = "true" if value else "false" + else: + params[key] = str(value) + return params diff --git a/src/sdk/core/pagination.py b/src/sdk/core/pagination.py new file mode 100644 index 0000000..c65026e --- /dev/null +++ b/src/sdk/core/pagination.py @@ -0,0 +1,316 @@ +"""Pagination strategies for list operations. + +Each strategy is a generator function that yields items one by one, +automatically fetching the next page when needed. + +Three strategies are supported: + +- **Marker**: Next page determined by ``marker`` query param set to + the last item's ID (e.g. CCE clusters, ECS servers). +- **Offset**: Next page determined by ``offset`` + ``limit`` query + params (e.g. SMN topics). +- **Linked**: Next page URL extracted from response body + (e.g. ``links.next`` field — Keystone-style pagination). + +Additionally, ``single_page`` is a trivial helper for non-paginated +list endpoints that return all items at once. + +Example:: + + from sdk.core.pagination import marker_paginate + + # yields individual cluster dicts, fetching pages automatically + for cluster in marker_paginate( + client=service_client, + path="clusters", + items_key="items", + ): + print(cluster["metadata"]["name"]) +""" + +from __future__ import annotations +from pydantic import BaseModel +from typing import TypeVar +from collections.abc import Generator +from typing import Any +from urllib.parse import parse_qs, urlencode, urlparse, urlunparse, urljoin + +from sdk.core.service_client import ServiceClient + +T = TypeVar("T", bound=BaseModel) +PaginatedItem = T | dict[str, Any] + +def marker_paginate( + client: ServiceClient, + path: str, + *, + items_key: str, + model: type[T] | None = None, + marker_key: str = "id", + limit: int = 0, + params: dict[str, str] | None = None, +) -> Generator[PaginatedItem, None, None]: + """Paginate using marker-based strategy. + + Fetches pages by setting ``marker`` query param to the last + item's ``marker_key`` value. Stops when a page returns + fewer items than ``limit`` or an empty list. + + Args: + client: Service client to send requests through. + path: Relative resource path (e.g. ``"servers/detail"``). + items_key: JSON key containing the items list + (e.g. ``"servers"``, ``"items"``). + marker_key: Field name on each item used as the marker. + Default: ``"id"``. + model: Optional Pydantic model class. If provided, raw JSON + items will be validated and parsed into instances of this + class. If omitted, raw dicts are returned. + limit: Page size. If 0, the server default is used. + params: Additional query parameters. + + Yields: + Parsed Pydantic model instances (if ``model`` is provided), + otherwise raw resource dicts. + """ + query: dict[str, str] = dict(params) if params else {} + if limit: + query["limit"] = str(limit) + + while True: + url = _build_url(path, query) + _, items = _fetch_page(client, url, items_key) + if not items: + return + + for item in items: + yield model.model_validate(item) if model else item + + if limit and len(items) < limit: + return + last = items[-1] + raw_marker = last.get(marker_key) + + if raw_marker is None or raw_marker == "": + return + + marker_str = str(raw_marker) + if query.get("marker") == marker_str: + return + + query["marker"] = marker_str + +def offset_paginate( + client: ServiceClient, + path: str, + *, + items_key: str, + model: type[T] | None = None, + limit: int, + start_offset: int = 0, + params: dict[str, str] | None = None, +) -> Generator[PaginatedItem, None, None]: + """Paginate using offset-based strategy. + + Increments ``offset`` by ``limit`` on each page. Stops when + a page returns an empty list or fewer items than ``limit``. + + This mirrors Go SDK's ``OffsetPageBase`` behavior. + + Args: + client: Service client to send requests through. + path: Relative resource path. + items_key: JSON key containing the items list. + model: Optional Pydantic model class. If provided, raw JSON + items will be validated and parsed into instances of this + class. If omitted, raw dicts are returned. + limit: Page size (required for offset pagination). + start_offset: Starting offset. Default: 0. + params: Additional query parameters. + + Yields: + Parsed Pydantic model instances (if ``model`` is provided), + otherwise raw resource dicts. + """ + if limit <= 0: + raise ValueError("Limit must be strictly positive for offset pagination.") + query: dict[str, str] = dict(params) if params else {} + query["limit"] = str(limit) + offset = start_offset + + while True: + query["offset"] = str(offset) + url = _build_url(path, query) + _, items = _fetch_page(client, url, items_key) + + if not items: + return + + for item in items: + yield model.model_validate(item) if model else item + + if len(items) < limit: + return + + offset += limit + + +def linked_paginate( + client: ServiceClient, + path: str, + *, + items_key: str, + model: type[T] | None = None, + link_path: list[str] | None = None, + params: dict[str, str] | None = None, +) -> Generator[PaginatedItem, None, None]: + """Paginate using linked (next URL) strategy. + + Follows a ``next`` link embedded in the response body. + The link path defaults to ``["links", "next"]`` (Keystone + convention) but can be customized. + + This mirrors Go SDK's ``LinkedPageBase`` behavior. + + Args: + client: Service client to send requests through. + path: Relative resource path for the first page. + items_key: JSON key containing the items list. + model: Optional Pydantic model class. If provided, raw JSON + items will be validated and parsed into instances of this + class. If omitted, raw dicts are returned. + link_path: List of keys to traverse in the response + to find the next page URL. Default: ``["links", "next"]``. + params: Additional query parameters for the first request. + + Yields: + Parsed Pydantic model instances (if ``model`` is provided), + otherwise raw resource dicts. + """ + if link_path is None: + link_path = ["links", "next"] + + url = _build_url(path, params) if params else path + seen_urls: set[str] = set() + + while url: + if url in seen_urls: + break + seen_urls.add(url) + + data, items = _fetch_page(client, url, items_key) + + if not items: + return + + for item in items: + yield model.model_validate(item) if model else item + + next_url = _extract_link(data, link_path) + if not next_url: + return + url = urljoin(url, next_url) + + +def single_page( + client: ServiceClient, + path: str, + *, + items_key: str, + model: type[T] | None = None, + params: dict[str, str] | None = None, +) -> list[PaginatedItem]: + """Fetch a single (non-paginated) list response. + + Convenience wrapper for endpoints that return all items + at once. Returns a plain list instead of a generator. + + This mirrors Go SDK's ``SinglePageBase``. + + Args: + client: Service client to send requests through. + path: Relative resource path. + items_key: JSON key containing the items list. + model: Optional Pydantic model class. If provided, raw JSON + items will be validated and parsed into instances of this + class. If omitted, raw dicts are returned. + params: Additional query parameters. + + Returns: + Parsed Pydantic model instances (if ``model`` is provided), + otherwise raw resource dicts. + """ + url = _build_url(path, params) if params else path + _, items = _fetch_page(client, url, items_key) + + if model: + return [model.model_validate(item) for item in items] + return items + + +# ====================================================================== +# Internal helpers +# ====================================================================== + + +def _build_url(path: str, params: dict[str, str] | None) -> str: + """Append query parameters to a path. + + If the path already contains query params, they are merged + (new params override existing ones). + + Args: + path: Base path, possibly with existing query string. + params: Query parameters to add. + + Returns: + Path with query string. + """ + if not params: + return path + + parsed = urlparse(path) + existing = parse_qs(parsed.query, keep_blank_values=True) + merged = {k: v[0] if len(v) == 1 else v for k, v in existing.items()} + merged.update(params) + + new_query = urlencode(merged, doseq=True) + return urlunparse(parsed._replace(query=new_query)) + + +def _extract_link(data: dict[str, Any], path: list[str]) -> str: + """Traverse nested dict to extract a link URL. + + Args: + data: Response body dict. + path: Key path to traverse (e.g. ``["links", "next"]``). + + Returns: + URL string, or empty string if not found. + """ + if not path: + return "" + + current: Any = data + for key in path: + if not isinstance(current, dict): + return "" + current = current.get(key) + if current is None: + return "" + return str(current) if current else "" + + +def _fetch_page( + client: ServiceClient, + url: str, + items_key: str +) -> tuple[dict[str, Any], list[dict[str, Any]]]: + """Fetch a page, parse JSON, and strictly validate the items key.""" + resp = client.get(url) + data = resp.json() + + if items_key not in data: + raise ValueError(f"Expected key '{items_key}' not found in API response") + + return data, data[items_key] diff --git a/src/sdk/core/provider.py b/src/sdk/core/provider.py new file mode 100644 index 0000000..13ea265 --- /dev/null +++ b/src/sdk/core/provider.py @@ -0,0 +1,810 @@ +"""Provider client — central HTTP client for the SDK. + +Combines HTTP transport (via ``httpx``), credential management, +and retry logic into a single client. + +The ``authenticate()`` method dispatches to the correct auth flow +based on ``AuthConfig.auth_mode`` and presence of agency fields: + +.. code-block:: text + + AuthConfig.auth_mode + ├── PASSWORD / TOKEN + │ ├── agency_name? → _v3_auth_with_agency() + │ └── else → _v3_auth() + └── AKSK + ├── agency_name? → _aksk_auth_with_agency() + └── else → _aksk_auth() + +Example:: + + from sdk.core.auth import AuthConfig + from sdk.core.provider import ProviderClient + + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + username="user", + password="secret", + domain_name="my_domain", + tenant_name="eu-de", + ) + client = ProviderClient(cfg) + client.authenticate() + # client.token_id is now set, endpoint_locator is ready +""" + +from __future__ import annotations + +import logging +import re +import time +from typing import Any + +import httpx + +from sdk.core.auth import AuthConfig, AuthMode +from sdk.core.endpoint import (EndpointLocator, + build_endpoint_locator, + CatalogEntry) +from sdk.core.exceptions import ( + ReauthError, + UnauthorizedError, + raise_for_status +) +from sdk.core.signer import SignOptions, sign_request + +logger = logging.getLogger(__name__) + +USER_AGENT = "python-t-cloud/0.1.0" +"""Default User-Agent header value.""" + +_DEFAULT_OK_CODES: dict[str, list[int]] = { + "GET": [200], + "POST": [200, 201, 202], + "PUT": [200, 201, 202], + "PATCH": [200, 204], + "DELETE": [200, 202, 204], + "HEAD": [204, 206], +} + +_DEFAULT_MAX_BACKOFF_RETRIES = 20 +"""Maximum number of retries on 429 (Too Many Requests).""" + +_DEFAULT_BACKOFF_TIMEOUT = 60.0 +"""Seconds to wait before retrying on 429.""" + +_DEFAULT_RETRY_COUNT = 1 +"""Number of retries on gateway errors (502, 504).""" + +_DEFAULT_RETRY_TIMEOUT = 0.5 +"""Seconds to wait before retrying on gateway errors.""" + +_VERSION_SUFFIX = re.compile(r"/v\d+(\.\d+)?$") + + +class ProviderClient: + """Central HTTP client for OTC API interaction. + + Holds authentication state (token, AK/SK credentials), + project/domain context, and an endpoint locator built from + the IAM service catalog. All service clients reference a single + ``ProviderClient`` instance. + + .. note:: + + This implementation is not thread-safe. If thread safety is needed, + add external synchronization around ``authenticate()`` and + ``request()``. + + Args: + auth_config: Authentication configuration. + http_client: Optional pre-configured httpx client. + Created automatically if not provided. + max_backoff_retries: Max retries on 429 responses. + backoff_timeout: Wait time (seconds) per 429 retry. + + Attributes: + token_id: Current Keystone token. + project_id: Scoped project ID from auth response. + user_id: Authenticated user ID. + domain_id: Domain ID from auth response. + region_id: Region derived from auth config. + endpoint_locator: Callable to resolve service endpoints. + """ + + def __init__( + self, + auth_config: AuthConfig, + *, + http_client: httpx.Client | None = None, + max_backoff_retries: int = _DEFAULT_MAX_BACKOFF_RETRIES, + backoff_timeout: float = _DEFAULT_BACKOFF_TIMEOUT, + ) -> None: + self.auth_config = auth_config + self._owns_http_client = http_client is None + self._http = http_client or httpx.Client( + headers={"User-Agent": USER_AGENT}, + timeout=httpx.Timeout(30.0), + ) + + self.token_id: str = "" + self.project_id: str = "" + self.user_id: str = "" + self.domain_id: str = "" + self.region_id: str = auth_config.region or "" + self.endpoint_locator: EndpointLocator | None = None + + self.max_backoff_retries = max_backoff_retries + self.backoff_timeout = backoff_timeout + + # ------------------------------------------------------------------ + # Public API + # ------------------------------------------------------------------ + + @property + def identity_base(self) -> str: + """IAM base URL (without version path). + + Strips ``/v3``, ``/v3.0``, etc. from the identity endpoint. + """ + endpoint = self.auth_config.identity_endpoint.rstrip("/") + endpoint = _VERSION_SUFFIX.sub("", endpoint) + return endpoint + "/" + + @property + def identity_v3_endpoint(self) -> str: + """IAM v3 endpoint URL (always ends with ``/``).""" + return self.identity_base + "v3/" + + def authenticate(self) -> None: + """Run the appropriate auth flow based on ``AuthConfig``. + + Dispatches to one of four internal methods depending on + ``auth_mode`` and presence of ``agency_name``. + + Raises: + UnauthorizedError: If the IAM request fails. + MissingCredentialsError: If auth mode cannot be determined. + """ + mode = self.auth_config.auth_mode + has_agency = bool( + self.auth_config.agency_name + and self.auth_config.agency_domain_name + ) + + if mode in (AuthMode.PASSWORD, AuthMode.TOKEN): + if has_agency: + self._v3_auth_with_agency() + else: + self._v3_auth() + else: + if has_agency: + self._aksk_auth_with_agency() + else: + self._aksk_auth() + if self.endpoint_locator is None: + raise RuntimeError( + "Endpoint locator not initialized after authentication" + ) + + def request( + self, + method: str, + url: str, + *, + service_name: str = "", + json: Any | None = None, + content: bytes | None = None, + headers: dict[str, str] | None = None, + ok_codes: list[int] | None = None, + retry_count: int | None = None, + retry_timeout: float | None = None, + ) -> httpx.Response: + """Execute an authenticated HTTP request with retry logic. + + Handles: + - Auth header injection (token or AK/SK signing) + - 401 → re-authenticate and retry once + - 429 → backoff retry (up to ``max_backoff_retries``) + - 502/504 → gateway retry (up to ``retry_count``) + + Args: + method: HTTP method (GET, POST, etc.). + url: Full request URL. + service_name: Service name. + json: JSON-serializable body. + content: Raw bytes body (mutually exclusive with ``json``). + headers: Additional request headers. + ok_codes: Acceptable status codes. Defaults per HTTP method. + retry_count: Gateway error retries. Default: 1. + retry_timeout: Gateway retry wait (seconds). Default: 0.5. + + Returns: + httpx.Response on success. + + Raises: + HttpError: On non-OK status codes after exhausting retries. + """ + if ok_codes is None: + ok_codes = _DEFAULT_OK_CODES.get(method.upper(), [200]) + if retry_count is None: + retry_count = _DEFAULT_RETRY_COUNT + if retry_timeout is None: + retry_timeout = _DEFAULT_RETRY_TIMEOUT + + return self._do_request( + method=method, + url=url, + service_name=service_name, + json=json, + content=content, + headers=headers, + ok_codes=ok_codes, + retry_count=retry_count, + retry_timeout=retry_timeout, + backoff_remaining=self.max_backoff_retries, + _is_retry=False, + ) + + def close(self) -> None: + """Close the underlying HTTP client.""" + if self._owns_http_client: + self._http.close() + + def __enter__(self) -> ProviderClient: + return self + + def __exit__(self, *_: object) -> None: + self.close() + + # ------------------------------------------------------------------ + # Internal: HTTP request engine + # ------------------------------------------------------------------ + + def _do_request( + self, + *, + method: str, + url: str, + service_name: str, + json: Any | None, + content: bytes | None, + headers: dict[str, str] | None, + ok_codes: list[int], + retry_count: int, + retry_timeout: float, + backoff_remaining: int, + _is_retry: bool = False, + ) -> httpx.Response: + """Core request logic with retry/reauth handling.""" + reauthed = _is_retry + + while True: + req = self._build_request( + method=method, + url=url, + json=json, + content=content, + headers=headers, + ) + + self._apply_auth(req, service_name=service_name) + + t0 = time.monotonic() + resp = self._http.send(req) + duration_ms = (time.monotonic() - t0) * 1000 + + _log_response( + logger, method, url, resp.status_code, + duration_ms, resp.headers.get("x-request-id", ""), + ) + + if resp.status_code in ok_codes: + return resp + + body = resp.text + + if (resp.status_code == 401 + and self.auth_config.allow_reauth + and not reauthed): + logger.debug("Got 401, attempting re-authentication") + try: + self.authenticate() + except Exception as exc: + raise ReauthError(original=exc) from exc + reauthed = True + continue + + if resp.status_code == 429 and backoff_remaining > 0: + logger.warning( + "Rate limited (429), waiting %.1fs (%d retries left)", + self.backoff_timeout, + backoff_remaining, + ) + time.sleep(self.backoff_timeout) + backoff_remaining -= 1 + continue + + if resp.status_code in (502, 504) and retry_count > 0: + logger.warning( + "Gateway error (%d), retrying in %.1fs (%d left)", + resp.status_code, + retry_timeout, + retry_count, + ) + time.sleep(retry_timeout) + retry_count -= 1 + continue + + raise_for_status( + resp.status_code, + method=method, + url=url, + body=body, + expected=ok_codes, + headers=dict(resp.headers), + ) + + def _build_request( + self, + *, + method: str, + url: str, + json: Any | None, + content: bytes | None, + headers: dict[str, str] | None, + ) -> httpx.Request: + """Build a httpx.Request with correct content type.""" + req_headers = dict(self._http.headers) + req_headers["Accept"] = "application/json" + if headers: + req_headers.update(headers) + req_headers.setdefault("User-Agent", USER_AGENT) + + if json is not None: + req_headers.setdefault("Content-Type", "application/json") + return self._http.build_request( + method, url, json=json, headers=req_headers, + ) + if content is not None: + return self._http.build_request( + method, url, content=content, headers=req_headers, + ) + return self._http.build_request( + method, url, headers=req_headers, + ) + + def _apply_auth(self, request: httpx.Request, + service_name: str) -> None: + """Apply auth headers to a request. + + Returns the pre-request token for reauth comparison + (mirrors Go SDK's ``prereqtok`` pattern). + """ + if self.auth_config.auth_mode == AuthMode.AKSK and self.auth_config.access_key: + sign_request( + request, + SignOptions( + access_key=self.auth_config.access_key, + secret_key=_secret_value(self.auth_config.secret_key), + region_name=self.region_id, + service_name=service_name, + ), + ) + # Set project/domain scope headers + if self.project_id and not self.domain_id: + request.headers["x-project-id"] = self.project_id + if self.domain_id: + request.headers["x-domain-id"] = self.domain_id + if self.auth_config.security_token: + request.headers["x-security-token"] = ( + self.auth_config.security_token + ) + elif self.token_id: + request.headers["x-auth-token"] = self.token_id + + # ------------------------------------------------------------------ + # Internal: Auth flows + # ------------------------------------------------------------------ + + def _v3_auth(self) -> None: + """Keystone V3 password/token authentication. + + POST /v3/auth/tokens → extracts token, project, user, catalog. + Sets ``_reauth_func`` for automatic token refresh on 401. + """ + cfg = self.auth_config + + if cfg.token_id: + self.token_id = _secret_value(cfg.token_id) + resp = self._iam_request( + "GET", + self.identity_v3_endpoint + "auth/tokens", + headers={"x-subject-token": self.token_id}, + ) + else: + body = _build_v3_auth_body(cfg) + resp = self._iam_request( + "POST", + self.identity_v3_endpoint + "auth/tokens", + json=body, + ) + self.token_id = resp.headers.get("x-subject-token", "") + + self._extract_auth_result(resp.json()) + + def _v3_auth_with_agency(self) -> None: + """Keystone V3 auth + agency assume_role. + + First authenticates normally (password/token), then issues + a second POST with ``assume_role`` identity method to get + a delegated token. + """ + cfg = self.auth_config + + if not cfg.token_id: + self._v3_auth() + else: + self.token_id = _secret_value(cfg.token_id) + + body = _build_agency_auth_body(cfg) + resp = self._iam_request( + "POST", + self.identity_v3_endpoint + "auth/tokens", + json=body, + ) + self.token_id = resp.headers.get("x-subject-token", "") + + self._extract_auth_result(resp.json()) + + def _aksk_auth(self) -> None: + """AK/SK authentication. + + Does not create a token. Instead, stores AK/SK credentials + for signing future requests and fetches the service catalog + via ``GET /v3/auth/catalog``. + """ + cfg = self.auth_config + self.project_id = self._resolve_project_id( + cfg.project_name) if not cfg.project_id and cfg.project_name \ + else cfg.project_id or "" + self.domain_id = self._resolve_domain_id( + cfg.domain_name) if not cfg.domain_id and cfg.domain_name \ + else cfg.domain_id or "" + self.region_id = cfg.region or "" + + catalog = self._fetch_catalog() + self.endpoint_locator = build_endpoint_locator(catalog, self.region_id) + + def _aksk_auth_with_agency(self) -> None: + """AK/SK auth + agency assume_role. + + First sets up AK/SK signing, then issues a token request + with ``assume_role`` to get a delegated token. After this, + subsequent requests use the token (not AK/SK). + """ + cfg = self.auth_config + self._aksk_auth() + + if not self.domain_id: + raise UnauthorizedError( + method="POST", + url=self.identity_v3_endpoint + "auth/tokens", + body="Agency auth requires domain_id or domain_name", + ) + + body = _build_agency_auth_body(cfg) + resp = self._iam_request( + "POST", + self.identity_v3_endpoint + "auth/tokens", + json=body, + ) + self.token_id = resp.headers.get("x-subject-token", "") + self._extract_auth_result(resp.json()) + + # ------------------------------------------------------------------ + # Internal: IAM helpers + # ------------------------------------------------------------------ + + def _iam_request( + self, + method: str, + url: str, + *, + json: Any | None = None, + headers: dict[str, str] | None = None, + ) -> httpx.Response: + """Send a request to the IAM service. + + AK/SK-signed if in AKSK mode, otherwise uses current token. + Raises on non-2xx. + + Args: + method: HTTP method. + url: Full IAM URL. + json: JSON body. + headers: Extra headers. + + Returns: + httpx.Response. + + Raises: + HttpError: On non-2xx response. + """ + req = self._build_request( + method=method, + url=url, + json=json, + content=None, + headers=headers, + ) + self._apply_auth(req, service_name="iam") + + t0 = time.monotonic() + resp = self._http.send(req) + duration_ms = (time.monotonic() - t0) * 1000 + + _log_response(logger, method, url, resp.status_code, + duration_ms, resp.headers.get("x-request-id", "")) + + if resp.status_code >= 400: + raise_for_status( + resp.status_code, + method=method, + url=url, + body=resp.text, + headers=dict(resp.headers), + ) + return resp + + def _extract_auth_result(self, data: dict[str, Any]) -> None: + """Extract project, user, domain, and catalog from auth response. + + Populates ``project_id``, ``user_id``, ``domain_id``, + ``region_id``, and ``endpoint_locator``. + + Args: + data: Parsed JSON from IAM auth response. + """ + token_data = data.get("token", {}) + + project = token_data.get("project") + if project: + self.project_id = project.get("id", "") + domain = project.get("domain", {}) + if domain: + self.domain_id = domain.get("id", "") + + user = token_data.get("user") + if user: + self.user_id = user.get("id", "") + if not self.domain_id: + domain = user.get("domain", {}) + if domain: + self.domain_id = domain.get("id", "") + + if not self.region_id: + cfg = self.auth_config + self.region_id = cfg.region or cfg.tenant_name or "" + + catalog = token_data.get("catalog", []) + if catalog: + parsed_catalog = [CatalogEntry.model_validate(c) for c in catalog] + self.endpoint_locator = build_endpoint_locator(parsed_catalog, + self.region_id) + + def _fetch_catalog(self) -> list[CatalogEntry]: + """Fetch the service catalog via ``GET /v3/auth/catalog``. + + Used by AK/SK auth where the catalog is not embedded in + a token response. + + Returns: + List of catalog entries. + """ + resp = self._iam_request("GET", + self.identity_v3_endpoint + "auth/catalog") + raw_catalog = resp.json().get("catalog", []) + return [CatalogEntry.model_validate(entry) for entry in raw_catalog] + + def _resolve_project_id(self, name: str) -> str: + """Look up project ID by name via IAM API. + + Args: + name: Project name. + + Returns: + Project ID string. + + Raises: + EndpointNotFoundError: If no project is found. + """ + resp = self._iam_request( + "GET", + self.identity_v3_endpoint + f"projects?name={name}", + ) + data = resp.json() + projects = data.get("projects", []) + if not projects: + raise ValueError(f"Project with name '{name}' not found") + return projects[0]["id"] + + def _resolve_domain_id(self, name: str) -> str: + """Look up domain ID by name via IAM API. + + Args: + name: Domain name. + + Returns: + Domain ID string, or empty string if not found. + """ + resp = self._iam_request("GET", self.identity_v3_endpoint + f"auth/domains?name={name}") + domains = resp.json().get("domains", []) + if not domains: + raise ValueError(f"Domain with name '{name}' not found") + return domains[0]["id"] + +# ====================================================================== +# Module-level helpers +# ====================================================================== + + +def _log_response( + log: logging.Logger, + method: str, + url: str, + status_code: int, + duration_ms: float, + request_id: str, +) -> None: + """Log an HTTP response at the appropriate level. + + - 2xx → DEBUG + - 4xx → WARNING + - 5xx → ERROR + """ + rid = f" [{request_id}]" if request_id else "" + msg = f"{method} {url} → {status_code} ({duration_ms:.0f}ms){rid}" + + if status_code >= 500: + log.error(msg) + elif status_code >= 400: + log.warning(msg) + else: + log.debug(msg) + + +def _secret_value(value: Any) -> str: + """Extract the plain string from a value that may be ``SecretStr``. + + Works transparently with both ``str`` and ``pydantic.SecretStr``, + so callers don't need to know which type ``AuthConfig`` uses + for sensitive fields. + + Args: + value: A ``str`` or ``SecretStr`` instance. + + Returns: + Plain string. + """ + if value is None: + return "" + if hasattr(value, "get_secret_value"): + return value.get_secret_value() + return str(value) + + +def _build_v3_auth_body(cfg: AuthConfig) -> dict[str, Any]: + """Build the JSON body for ``POST /v3/auth/tokens``. + + Constructs the identity and scope sections based on + available credentials (password or token). + + Args: + cfg: Auth configuration. + + Returns: + JSON-serializable dict for the request body. + """ + auth: dict[str, Any] = {"identity": {}} + + if cfg.password: + user: dict[str, Any] = {"password": _secret_value(cfg.password)} + if cfg.user_id: + user["id"] = cfg.user_id + else: + user["name"] = cfg.username + domain: dict[str, str] = {} + if cfg.domain_id: + domain["id"] = cfg.domain_id + elif cfg.domain_name: + domain["name"] = cfg.domain_name + user["domain"] = domain + + auth["identity"]["methods"] = ["password"] + auth["identity"]["password"] = {"user": user} + + if cfg.passcode: + auth["identity"]["methods"].append("totp") + totp_user: dict[str, str] = { + "passcode": _secret_value(cfg.passcode), + } + if cfg.user_id: + totp_user["id"] = cfg.user_id + if cfg.username: + totp_user["name"] = cfg.username + auth["identity"]["totp"] = {"user": totp_user} + + elif cfg.token_id: + auth["identity"]["methods"] = ["token"] + auth["identity"]["token"] = {"id": _secret_value(cfg.token_id)} + + scope = _build_scope(cfg) + if scope: + auth["scope"] = scope + + return {"auth": auth} + + +def _build_agency_auth_body(cfg: AuthConfig) -> dict[str, Any]: + """Build the JSON body for agency ``assume_role`` auth. + + Args: + cfg: Auth configuration with agency fields populated. + + Returns: + JSON-serializable dict for the request body. + """ + auth: dict[str, Any] = { + "identity": { + "methods": ["assume_role"], + "assume_role": { + "domain_name": cfg.agency_domain_name, + "xrole_name": cfg.agency_name, + }, + }, + } + + if cfg.delegated_project and cfg.agency_domain_name: + auth["scope"] = { + "project": { + "name": cfg.delegated_project, + "domain": {"name": cfg.agency_domain_name}, + }, + } + + return {"auth": auth} + + +def _build_scope(cfg: AuthConfig) -> dict[str, Any] | None: + """Build the ``scope`` section of a V3 auth request. + + Args: + cfg: Auth configuration. + + Returns: + Scope dict or None if no scoping fields are set. + """ + project_id = cfg.tenant_id or cfg.project_id + project_name = cfg.tenant_name or cfg.project_name + + if project_id: + return {"project": {"id": project_id}} + + if project_name: + domain: dict[str, str] = {} + if cfg.domain_id: + domain["id"] = cfg.domain_id + elif cfg.domain_name: + domain["name"] = cfg.domain_name + scope: dict[str, Any] = {"project": {"name": project_name}} + if domain: + scope["project"]["domain"] = domain + return scope + + if cfg.domain_id: + return {"domain": {"id": cfg.domain_id}} + if cfg.domain_name: + return {"domain": {"name": cfg.domain_name}} + + return None diff --git a/src/sdk/core/service_client.py b/src/sdk/core/service_client.py new file mode 100644 index 0000000..8967f13 --- /dev/null +++ b/src/sdk/core/service_client.py @@ -0,0 +1,342 @@ +"""Service client — per-service wrapper over ``ProviderClient``. + +Mirrors the Go SDK's ``ServiceClient`` struct. Each OTC service +(compute, DNS, CCE, etc.) gets its own ``ServiceClient`` with +a resolved endpoint and convenience HTTP methods. + +A ``ServiceClient`` delegates all HTTP work to the underlying +``ProviderClient``, adding service-level headers and URL +construction via ``service_url()``. + +Example:: + + from sdk.core.provider import ProviderClient + from sdk.core.service_client import ServiceClient + + provider = ProviderClient(auth_config) + provider.authenticate() + + # Resolve endpoint from catalog + compute = ServiceClient( + provider, + service_type="compute", + region="eu-de", + ) + resp = compute.get("servers/detail") + + # With project-scoped resource base + cce = ServiceClient( + provider, + service_type="ccev2.0", + resource_base=endpoint + "api/v1/projects/" + project_id + "/", + ) + url = cce.service_url("clusters") +""" + +from __future__ import annotations + +from typing import Any + +import httpx + +from sdk.core.endpoint import EndpointOpts +from sdk.core.provider import ProviderClient + + +class ServiceClient: + """Client for a specific OTC service API. + + Wraps a ``ProviderClient`` and holds the resolved service + endpoint. Provides ``service_url()`` for building resource URLs + and convenience HTTP methods that mirror the Go SDK's + ``Get``, ``Post``, ``Put``, ``Patch``, ``Delete``, ``Head``. + + Args: + provider: Authenticated ``ProviderClient``. + service_type: Catalog service type (e.g. ``compute``, ``dns``). + region: Region override. Falls back to ``provider.region_id``. + endpoint_override: Bypass catalog lookup and use this URL + directly. + resource_base: Custom base URL for resource paths. Some + services need a project-scoped base that differs from the + catalog endpoint (see CCE example above). If not set, + ``endpoint`` is used. + extra_headers: Headers merged into every request from this + service client (Go SDK ``MoreHeaders``). + + Attributes: + provider: Reference to the parent ``ProviderClient``. + endpoint: Resolved service endpoint URL (always ends with ``/``). + resource_base: Base URL for ``service_url()`` path building. + service_type: Service type string. + extra_headers: Service-wide headers. + """ + + def __init__( + self, + provider: ProviderClient, + service_type: str = "", + *, + region: str = "", + endpoint_override: str = "", + resource_base: str = "", + extra_headers: dict[str, str] | None = None, + microversion: str = "", + ) -> None: + self.provider = provider + self.service_type = service_type + self.extra_headers: dict[str, str] = extra_headers or {} + self.microversion = microversion + # Resolve endpoint + if endpoint_override: + self.endpoint = _ensure_trailing_slash(endpoint_override) + elif provider.endpoint_locator and service_type: + opts = EndpointOpts(service_type=service_type, region=region) + self.endpoint = provider.endpoint_locator(opts) + else: + self.endpoint = "" + + # Resource base — defaults to endpoint + self.resource_base = ( + _ensure_trailing_slash(resource_base) + if resource_base + else self.endpoint + ) + + # ------------------------------------------------------------------ + # URL building + # ------------------------------------------------------------------ + + def service_url(self, *parts: str) -> str: + """Build a full URL from the resource base and path segments. + + Joins ``resource_base`` with the given path parts using ``/``. + + Example:: + + client.service_url("servers", server_id, "action") + # → "https://ecs.eu-de.../v2.1/servers/{id}/action" + + Args: + *parts: URL path segments to join. + + Returns: + Full URL string. + """ + return self.resource_base + "/".join(parts) + + # ------------------------------------------------------------------ + # HTTP convenience methods + # ------------------------------------------------------------------ + + def get( + self, + path: str, + *, + headers: dict[str, str] | None = None, + ok_codes: list[int] | None = None, + ) -> httpx.Response: + """GET request. Default ok: 200. + + Args: + path: Relative path appended to ``resource_base``. + headers: Extra request headers. + ok_codes: Acceptable status codes. + + Returns: + httpx.Response. + """ + return self._request("GET", path, headers=headers, ok_codes=ok_codes) + + def post( + self, + path: str, + *, + json: Any | None = None, + content: bytes | None = None, + headers: dict[str, str] | None = None, + ok_codes: list[int] | None = None, + ) -> httpx.Response: + """POST request. Default ok: 200, 201, 202. + + Args: + path: Relative path. + json: JSON-serializable body. + content: Raw bytes body. + headers: Extra request headers. + ok_codes: Acceptable status codes. + + Returns: + httpx.Response. + """ + return self._request( + "POST", path, json=json, content=content, + headers=headers, ok_codes=ok_codes, + ) + + def put( + self, + path: str, + *, + json: Any | None = None, + content: bytes | None = None, + headers: dict[str, str] | None = None, + ok_codes: list[int] | None = None, + ) -> httpx.Response: + """PUT request. Default ok: 200, 201, 202. + + Args: + path: Relative path. + json: JSON-serializable body. + content: Raw bytes body. + headers: Extra request headers. + ok_codes: Acceptable status codes. + + Returns: + httpx.Response. + """ + return self._request( + "PUT", path, json=json, content=content, + headers=headers, ok_codes=ok_codes, + ) + + def patch( + self, + path: str, + *, + json: Any | None = None, + content: bytes | None = None, + headers: dict[str, str] | None = None, + ok_codes: list[int] | None = None, + ) -> httpx.Response: + """PATCH request. Default ok: 200, 204. + + Args: + path: Relative path. + json: JSON-serializable body. + content: Raw bytes body. + headers: Extra request headers. + ok_codes: Acceptable status codes. + + Returns: + httpx.Response. + """ + return self._request( + "PATCH", path, json=json, content=content, + headers=headers, ok_codes=ok_codes, + ) + + def delete( + self, + path: str, + *, + json: Any | None = None, + headers: dict[str, str] | None = None, + ok_codes: list[int] | None = None, + ) -> httpx.Response: + """DELETE request. Default ok: 200, 202, 204. + + Supports optional JSON body for APIs that require + delete-with-body (Go SDK ``DeleteWithBody``). + + Args: + path: Relative path. + json: Optional JSON body. + headers: Extra request headers. + ok_codes: Acceptable status codes. + + Returns: + httpx.Response. + """ + return self._request( + "DELETE", path, json=json, + headers=headers, ok_codes=ok_codes, + ) + + def head( + self, + path: str, + *, + headers: dict[str, str] | None = None, + ok_codes: list[int] | None = None, + ) -> httpx.Response: + """HEAD request. Default ok: 204, 206. + + Args: + path: Relative path. + headers: Extra request headers. + ok_codes: Acceptable status codes. + + Returns: + httpx.Response. + """ + return self._request("HEAD", path, headers=headers, ok_codes=ok_codes) + + # ------------------------------------------------------------------ + # Internal + # ------------------------------------------------------------------ + + def _set_microversion_header(self, headers: dict[str, str]) -> None: + """Set microversion headers based on service type. + + Corresponds to Go SDK's ``setMicroversionHeader``. + """ + if not self.microversion: + return + + mv_header_map = { + "compute": "X-OpenStack-Nova-API-Version", + "sharev2": "X-OpenStack-Manila-API-Version", + "volume": "X-OpenStack-Volume-API-Version", + } + + specific = mv_header_map.get(self.service_type) + if specific: + headers[specific] = self.microversion + + if self.service_type: + headers["OpenStack-API-Version"] = ( + f"{self.service_type} {self.microversion}" + ) + + def _request( + self, + method: str, + path: str, + *, + json: Any | None = None, + content: bytes | None = None, + headers: dict[str, str] | None = None, + ok_codes: list[int] | None = None, + ) -> httpx.Response: + """Build full URL, merge service headers, delegate to provider. + + Args: + method: HTTP method. + path: Relative resource path. + json: JSON body. + content: Raw body. + headers: Per-request headers. + ok_codes: Acceptable status codes. + + Returns: + httpx.Response. + """ + url = self.service_url(path) + merged: dict[str, str] = {**self.extra_headers} + if headers: + merged.update(headers) + self._set_microversion_header(merged) + return self.provider.request( + method, + url, + json=json, + content=content, + headers=merged or None, + ok_codes=ok_codes, + ) + + +def _ensure_trailing_slash(url: str) -> str: + """Ensure a URL ends with ``/``.""" + return url if url.endswith("/") else url + "/" diff --git a/src/sdk/core/signer.py b/src/sdk/core/signer.py new file mode 100644 index 0000000..cdb9780 --- /dev/null +++ b/src/sdk/core/signer.py @@ -0,0 +1,470 @@ +"""AK/SK request signing. + +Signs HTTP requests using the AK/SK authentication scheme compatible +with services. + +The signing process follows these steps: + +1. Build a **canonical request** from method, path, query, headers, body. +2. Build a **string to sign** from algorithm, timestamp, scope, and + the hash of the canonical request. +3. **Derive a signing key** from the secret key, date, region, and service. +4. **Compute the signature** and set the ``Authorization`` header + on the request. + +Example:: + + import httpx + from t_cloud.core.signer import sign_request, SignOptions + + opts = SignOptions( + access_key="AK...", + secret_key="SK...", + region_name="eu-de", + service_name="dns", + ) + request = httpx.Request("GET", "https://dns.eu-de.otc.t-systems.com/v2/zones") + sign_request(request, opts) + # request now has Authorization and X-Sdk-Date headers +""" + +from __future__ import annotations + +import hashlib +import hmac +import re +import logging +import threading +from collections import OrderedDict +from datetime import UTC, datetime, timedelta +from urllib.parse import quote + +import httpx +from pydantic import BaseModel, ConfigDict, computed_field, SecretStr + +logger = logging.getLogger(__name__) + +SIGN_ALGORITHM_HMAC_SHA256 = "SDK-HMAC-SHA256" + +_SUPPORTED_ALGORITHMS = frozenset({ + SIGN_ALGORITHM_HMAC_SHA256, +}) + +_CONTENT_SHA256_HEADER = "x-sdk-content-sha256" + +_SPACE_RE = re.compile(r"\s+") + +_SIGNED_HEADERS_WHITELIST = frozenset({ + "host", + "content-type", + "x-sdk-date", + "x-sdk-content-sha256", +}) + +# === 1. TYPES & CACHE === + + +class _SignKeyCache: + """Thread-safe LRU-like cache for derived signing keys. + + Evicts the oldest entry when ``max_count`` is reached. + + Args: + max_count: Maximum number of cached entries. + """ + + def __init__(self, max_count: int = 300) -> None: + self._max_count = max_count + self._lock = threading.Lock() + self._store: OrderedDict[str, _SignKeyCacheEntry] = OrderedDict() + + def get(self, key: str) -> _SignKeyCacheEntry | None: + with self._lock: + return self._store.get(key) + + def put(self, key: str, entry: _SignKeyCacheEntry) -> None: + with self._lock: + if len(self._store) >= self._max_count and self._store: + self._store.popitem(last=False) + self._store[key] = entry + + +class _SignKeyCacheEntry(BaseModel): + """Cached signing key with its day-of-epoch stamp. + """ + model_config = ConfigDict(frozen=True) + key: bytes + days_since_epoch: int + + +class _SignParams(BaseModel): + """Resolved signing parameters. + """ + + access_key: str + secret_key: SecretStr + region_name: str + service_name: str + sign_algorithm: str + enable_cache_sign_key: bool + signing_time: datetime + + @computed_field + @property + def formatted_date(self) -> str: + return _format_date(self.signing_time) + + @computed_field + @property + def formatted_datetime(self) -> str: + return _format_datetime(self.signing_time) + + @property + def scope(self) -> str: + return ( + f"{self.formatted_date}/" + f"{self.region_name}/" + f"{self.service_name}/" + f"sdk_request" + ) + + @property + def days_since_epoch(self) -> int: + """Number of days since Unix epoch for the signing time.""" + ts = int(self.signing_time.timestamp()) + return ts // 86400 + + +class SignOptions(BaseModel): + """Options for signing a request. + + Args: + access_key: AK/SK access key. + secret_key: AK/SK secret key. + region_name: Target region (e.g. ``eu-de``). + service_name: Service identifier (e.g. ``dns``, ``cce``). + sign_algorithm: Signing algorithm. Defaults to + ``SDK-HMAC-SHA256``. Must be a value from + ``_SUPPORTED_ALGORITHMS``. + enable_cache_sign_key: Cache the derived signing key for + one day. Disabled by default (matches Go SDK default). + time_offset_seconds: Offset in seconds to adjust the + signing timestamp. Useful when the client clock is + out of sync with the server. + """ + model_config = ConfigDict(frozen=True) + + access_key: str + secret_key: SecretStr + region_name: str = "" + service_name: str = "" + sign_algorithm: str = SIGN_ALGORITHM_HMAC_SHA256 + enable_cache_sign_key: bool = False + time_offset_seconds: int = 0 + + +_cache = _SignKeyCache() + + +# === 2. PUBLIC API === + + +def sign_request( + request: httpx.Request, + opts: SignOptions, + *, + timestamp: datetime | None = None, +) -> None: + """Sign a httpx request in place with AK/SK credentials. + + Adds ``X-Sdk-Date``, ``Host``, and ``Authorization`` headers. + + Args: + request: The httpx request to sign (modified in place). + opts: Signing credentials and scope. + timestamp: Override the signing time (for testing). + Defaults to ``datetime.now(UTC)``. + """ + params = _build_sign_params(opts, timestamp) + host = request.url.host or "" + if request.url.port and request.url.port not in (80, 443): + host = f"{host}:{request.url.port}" + request.headers["host"] = host + request.headers["x-sdk-date"] = params.formatted_datetime + + _sign_with_params(request, params) + + +def re_sign_request( + request: httpx.Request, + opts: SignOptions, + *, + timestamp: datetime | None = None, +) -> None: + """Re-sign a request for redirection. + + Corresponds to Go SDK's ``ReSign``. + Overwrites ``X-Sdk-Date`` and removes the old ``Authorization`` + header before re-signing. + + Args: + request: The httpx request to re-sign (modified in place). + opts: Signing credentials and scope. + timestamp: Override the signing time (for testing). + """ + params = _build_sign_params(opts, timestamp) + + # Overwrite date, remove stale auth (matches Go's setRequiredHeaders) + request.headers["x-sdk-date"] = params.formatted_datetime + request.headers.pop("authorization", None) + + _sign_with_params(request, params) + + +# === 3. CORE SIGNING FLOW === + + +def _build_sign_params( + opts: SignOptions, + timestamp: datetime | None, +) -> _SignParams: + """Build resolved signing parameters from options. + + Strips whitespace from keys (matches Go SDK behavior) + and applies time offset. + """ + algorithm = opts.sign_algorithm or SIGN_ALGORITHM_HMAC_SHA256 + if algorithm not in _SUPPORTED_ALGORITHMS: + raise ValueError( + f"Unsupported signing algorithm '{algorithm}', " + f"supported: {sorted(_SUPPORTED_ALGORITHMS)}" + ) + + base_time = timestamp if timestamp is not None else datetime.now(UTC) + signing_time = base_time - timedelta(seconds=opts.time_offset_seconds) + clean_secret = opts.secret_key.get_secret_value().strip() + return _SignParams( + access_key=opts.access_key.strip(), + secret_key=SecretStr(clean_secret), + region_name=opts.region_name, + service_name=opts.service_name, + sign_algorithm=algorithm, + enable_cache_sign_key=opts.enable_cache_sign_key, + signing_time=signing_time, + ) + +def _sign_with_params( + request: httpx.Request, + params: _SignParams, +) -> None: + """Core signing logic shared by ``sign_request`` and ``re_sign_request``.""" + content_sha256 = request.headers.get( + _CONTENT_SHA256_HEADER, + _hash_sha256(_read_body(request)), + ) + canonical = _canonical_request(request, content_sha256) + logger.debug("Canonical Request:\n%s", canonical) + string_to_sign = "\n".join([ + params.sign_algorithm, + params.formatted_datetime, + params.scope, + _hash_sha256(canonical.encode()), + ]) + + signing_key = _derive_signing_key(params) + + signature = _compute_signature( + string_to_sign, signing_key, params.sign_algorithm, + ).hex() + + signed_headers = _signed_headers_string(request) + credential = f"{params.access_key}/{params.scope}" + request.headers["authorization"] = ( + f"{params.sign_algorithm} " + f"Credential={credential}, " + f"SignedHeaders={signed_headers}, " + f"Signature={signature}" + ) + +def _derive_signing_key(params: _SignParams) -> bytes: + """Derive the signing key, optionally using cache. + + Corresponds to Go SDK's ``deriveSigningKey``. + When caching is enabled, the key is cached per + (secret, region, service) and valid for one day. + """ + secret = params.secret_key.get_secret_value() + if not params.enable_cache_sign_key: + return _build_sign_key(params) + + h_secret = _hash_sha256(secret.encode()) + cache_key = f"{h_secret}-{params.region_name}-{params.service_name}" + + cached = _cache.get(cache_key) + if cached is not None and cached.days_since_epoch == params.days_since_epoch: + return cached.key + + sign_key = _build_sign_key(params) + _cache.put(cache_key, _SignKeyCacheEntry( + key=sign_key, + days_since_epoch=params.days_since_epoch, + )) + return sign_key + +def _build_sign_key(params: _SignParams) -> bytes: + """Build signing key from secret + scope components. + + Corresponds to Go SDK's ``buildSignKey``:: + + kDate = HMAC("SDK" + secret, date) + kRegion = HMAC(kDate, region) + kService = HMAC(kRegion, service) + kSigning = HMAC(kService, "sdk_request") + """ + algorithm = params.sign_algorithm + k_secret = f"SDK{params.secret_key.get_secret_value()}".encode() + k_date = _compute_signature(params.formatted_date, k_secret, algorithm) + k_region = _compute_signature(params.region_name, k_date, algorithm) + k_service = _compute_signature(params.service_name, k_region, algorithm) + return _compute_signature("sdk_request", k_service, algorithm) + +# === 4. CANONICALIZATION === + +def _canonical_path(request: httpx.Request) -> str: + """Build the canonical URI path. + Uses the decoded path and re-encodes it to avoid double encoding. + """ + path = request.url.path + if not path.startswith("/"): + path = "/" + path + path = _url_encode(path, is_path=True) + return path or "/" + +def _canonical_query(request: httpx.Request) -> str: + """Build the canonical query string. + Parameters are sorted by encoded key (case-insensitive). + Duplicate keys are preserved. + """ + if _use_payload_for_query(request): + return "" + + pairs = request.url.params.multi_items() + if not pairs: + return "" + + encoded = [(_url_encode(k), _url_encode(v)) for k, v in pairs] + encoded.sort(key=lambda p: (p[0].lower(), p[1])) + + return "&".join(f"{k}={v}" for k, v in encoded) + +def _get_signable_headers(request: httpx.Request) -> dict[str, str]: + result = {} + for key, value in request.headers.items(): + k = key.lower() + if k in _SIGNED_HEADERS_WHITELIST or k.startswith("x-sdk-"): + result[k] = value + return result + +def _canonical_headers(request: httpx.Request) -> str: + """Build canonical header string. + Headers are lowercased, sorted, and whitespace-collapsed. + """ + headers = _get_signable_headers(request) + + lines = [] + for key in sorted(headers.keys()): + name = _SPACE_RE.sub(" ", key.strip()) + value = _SPACE_RE.sub(" ", headers[key].strip()) + lines.append(f"{name}:{value}\n") + return "".join(lines) + +def _signed_headers_string(request: httpx.Request) -> str: + """Build the semicolon-separated signed headers list. + """ + headers = _get_signable_headers(request) + return ";".join(sorted(headers.keys())) + +def _canonical_request(request: httpx.Request, content_sha256: str) -> str: + """Assemble the full canonical request string. + + Format:: + + METHOD + CanonicalURI + CanonicalQueryString + CanonicalHeaders + SignedHeaders + ContentHash + """ + return "\n".join([ + request.method, + _canonical_path(request), + _canonical_query(request), + _canonical_headers(request), + _signed_headers_string(request), + content_sha256, + ]) + +# === 5. UTILS === + +def _hash_sha256(data: bytes) -> str: + """Hex-encoded SHA-256 hash.""" + return hashlib.sha256(data).hexdigest() + +def _hmac_sha256(data: str, key: bytes) -> bytes: + """HMAC-SHA256 of string data with byte key.""" + return hmac.new(key, data.encode(), hashlib.sha256).digest() + +def _compute_signature(data: str, key: bytes, algorithm: str) -> bytes: + """Compute signature with the specified algorithm. + + Corresponds to Go SDK's ``computeSignature``. + + Raises: + ValueError: If the algorithm is not supported. + """ + if algorithm == SIGN_ALGORITHM_HMAC_SHA256: + return _hmac_sha256(data, key) + raise ValueError( + f"Unsupported algorithm '{algorithm}', " + f"supported: {sorted(_SUPPORTED_ALGORITHMS)}" + ) + +def _format_datetime(dt: datetime) -> str: + """Format timestamp as ``20060102T150405Z``.""" + return dt.astimezone(UTC).strftime("%Y%m%dT%H%M%SZ") + +def _format_date(dt: datetime) -> str: + """Format date as ``20060102``.""" + return dt.astimezone(UTC).strftime("%Y%m%d") + +def _read_body(request: httpx.Request) -> bytes: + """Read the request body as bytes. + + For POST with no body, uses the query string as content + (matches Go SDK's ``calculateContentHash``). + """ + if _use_payload_for_query(request): + return str(request.url.params).encode() + + try: + return request.content or b"" + except httpx.RequestNotRead as e: + raise RuntimeError( + "Streaming bodies are not supported for AK/SK signing. " + "The request content must be fully loaded in memory." + ) from e + +def _use_payload_for_query(request: httpx.Request) -> bool: + """Check if query string should be used as payload. + """ + if request.method.upper() != "POST": + return False + body = request.content + return body is None or body == b"" + +def _url_encode(value: str, *, is_path: bool = False) -> str: + """URL-encode a value, preserving ``/`` in paths. + """ + safe = "/-_.~" if is_path else "-_.~" + return quote(value, safe=safe) diff --git a/src/sdk/core/waiter.py b/src/sdk/core/waiter.py new file mode 100644 index 0000000..ba7c9ff --- /dev/null +++ b/src/sdk/core/waiter.py @@ -0,0 +1,66 @@ +"""Waiters for eventual consistency and long-running operations.""" + +from __future__ import annotations + +import logging +import time +from collections.abc import Callable +from typing import TypeVar, Any + +from sdk.core.exceptions import HttpError + +logger = logging.getLogger(__name__) + +T = TypeVar("T") + + +def wait_for( + func: Callable[[], T], + condition: Callable[[T], bool], + timeout: int = 60, + interval: float = 2.0, + label: str = "resource", +) -> T: + """Generic waiter that polls 'func' until 'condition' is True.""" + start_time = time.monotonic() + + while True: + try: + result = func() + if condition(result): + return result + except Exception as exc: + logger.debug("Waiter [%s] caught temporary error: %s", + label, exc) + + if time.monotonic() - start_time > timeout: + raise TimeoutError( + f"Timed out waiting for {label} after {timeout}s" + ) + + time.sleep(interval) + + +def wait_for_delete( + get_func: Callable[[], Any], + timeout: int = 60, + interval: float = 2.0, + label: str = "resource", +) -> None: + """Specialized waiter that polls until the resource returns 404.""" + start_time = time.monotonic() + + while True: + try: + get_func() + except HttpError as exc: + if exc.status_code == 404: + return + raise + + if time.monotonic() - start_time > timeout: + raise TimeoutError( + f"Timed out waiting for {label} deletion after {timeout}s" + ) + + time.sleep(interval) diff --git a/src/sdk/services/__init__.py b/src/sdk/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/sdk/services/vpc/__init__.py b/src/sdk/services/vpc/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/sdk/services/vpc/v1/__init__.py b/src/sdk/services/vpc/v1/__init__.py new file mode 100644 index 0000000..abc1e90 --- /dev/null +++ b/src/sdk/services/vpc/v1/__init__.py @@ -0,0 +1,18 @@ +"""VPC service, API v1. + +Resources are exposed as submodules. Usage:: + + from sdk.services.vpc.v1 import vpcs + + new_vpc = vpcs.create(client, vpcs.CreateVpcOpts(name="my-vpc")) + for v in vpcs.list(client): + print(v.id, v.name) + +Future resources (subnets, peerings, ...) are added as sibling submodules. +""" + +from __future__ import annotations + +from . import vpcs + +__all__ = ["vpcs"] diff --git a/src/sdk/services/vpc/v1/vpcs/__init__.py b/src/sdk/services/vpc/v1/vpcs/__init__.py new file mode 100644 index 0000000..bd9a439 --- /dev/null +++ b/src/sdk/services/vpc/v1/vpcs/__init__.py @@ -0,0 +1,23 @@ +"""VPCs resource (VPC service, v1).""" + +from __future__ import annotations + +from .common import Route, Vpc +from .create import CreateVpcOpts, create +from .delete import delete +from .get import get +from .list import ListVpcsOpts, list +from .update import UpdateVpcOpts, update + +__all__ = [ + "CreateVpcOpts", + "ListVpcsOpts", + "Route", + "UpdateVpcOpts", + "Vpc", + "create", + "delete", + "get", + "list", + "update", +] diff --git a/src/sdk/services/vpc/v1/vpcs/common.py b/src/sdk/services/vpc/v1/vpcs/common.py new file mode 100644 index 0000000..c138a46 --- /dev/null +++ b/src/sdk/services/vpc/v1/vpcs/common.py @@ -0,0 +1,35 @@ +"""Shared VPC v1 models and constants.""" + +from __future__ import annotations + +from pydantic import BaseModel, Field + +_BASE_PATH = "vpcs" + + +class Route(BaseModel): + """VPC route entry.""" + + destination: str | None = None + nexthop: str | None = None + + +class Vpc(BaseModel): + """VPC resource returned by the API. + + Only ``id`` is guaranteed to be present and non-null. Every other + field can be missing or ``null`` depending on how the VPC was + created and which API version returned it. + """ + + id: str + name: str | None = None + description: str | None = None + cidr: str | None = None + status: str | None = None + enterprise_project_id: str | None = None + routes: list[Route] = Field(default_factory=list) + enable_shared_snat: bool | None = None + tenant_id: str | None = None + created_at: str | None = None + updated_at: str | None = None diff --git a/src/sdk/services/vpc/v1/vpcs/create.py b/src/sdk/services/vpc/v1/vpcs/create.py new file mode 100644 index 0000000..4ca157e --- /dev/null +++ b/src/sdk/services/vpc/v1/vpcs/create.py @@ -0,0 +1,33 @@ +"""Create a VPC.""" + +from __future__ import annotations + +from typing import ClassVar + +from sdk.core.opts import BaseOpts +from sdk.core.service_client import ServiceClient + +from .common import _BASE_PATH, Vpc + + +class CreateVpcOpts(BaseOpts): + """Options for creating a VPC. + + All fields are optional per the API spec. + """ + + _wrapper_key: ClassVar[str | None] = "vpc" + + name: str | None = None + description: str | None = None + cidr: str | None = None + enterprise_project_id: str | None = None + + +def create(client: ServiceClient, opts: CreateVpcOpts) -> Vpc: + """Create a VPC. + + ``POST /v1/{project_id}/vpcs`` + """ + resp = client.post(_BASE_PATH, json=opts.to_request_body()) + return Vpc.model_validate(resp.json()["vpc"]) diff --git a/src/sdk/services/vpc/v1/vpcs/delete.py b/src/sdk/services/vpc/v1/vpcs/delete.py new file mode 100644 index 0000000..98b7d5b --- /dev/null +++ b/src/sdk/services/vpc/v1/vpcs/delete.py @@ -0,0 +1,15 @@ +"""Delete a VPC.""" + +from __future__ import annotations + +from sdk.core.service_client import ServiceClient + +from .common import _BASE_PATH + + +def delete(client: ServiceClient, vpc_id: str) -> None: + """Delete a VPC. + + ``DELETE /v1/{project_id}/vpcs/{vpc_id}`` + """ + client.delete(f"{_BASE_PATH}/{vpc_id}") diff --git a/src/sdk/services/vpc/v1/vpcs/get.py b/src/sdk/services/vpc/v1/vpcs/get.py new file mode 100644 index 0000000..4419c16 --- /dev/null +++ b/src/sdk/services/vpc/v1/vpcs/get.py @@ -0,0 +1,16 @@ +"""Get a VPC by ID.""" + +from __future__ import annotations + +from sdk.core.service_client import ServiceClient + +from .common import _BASE_PATH, Vpc + + +def get(client: ServiceClient, vpc_id: str) -> Vpc: + """Get VPC details. + + ``GET /v1/{project_id}/vpcs/{vpc_id}`` + """ + resp = client.get(f"{_BASE_PATH}/{vpc_id}") + return Vpc.model_validate(resp.json()["vpc"]) diff --git a/src/sdk/services/vpc/v1/vpcs/list.py b/src/sdk/services/vpc/v1/vpcs/list.py new file mode 100644 index 0000000..e358225 --- /dev/null +++ b/src/sdk/services/vpc/v1/vpcs/list.py @@ -0,0 +1,45 @@ +"""List VPCs with auto-pagination.""" + +from __future__ import annotations + +from collections.abc import Iterator + +from sdk.core.opts import BaseQueryOpts +from sdk.core.pagination import marker_paginate +from sdk.core.service_client import ServiceClient + +from .common import _BASE_PATH, Vpc + + +class ListVpcsOpts(BaseQueryOpts): + """Query parameters for listing VPCs.""" + + id: str | None = None + limit: int | None = None + marker: str | None = None + enterprise_project_id: str | None = None + + +def list( # noqa: A001 - shadows builtin intentionally; matches Go SDK style + client: ServiceClient, + opts: ListVpcsOpts | None = None, +) -> Iterator[Vpc]: + """List VPCs with auto-pagination. + + ``GET /v1/{project_id}/vpcs`` + + Uses marker-based pagination. Yields VPC objects one by one, + fetching next pages automatically. + """ + params = opts.to_query_params() if opts else None + limit = opts.limit if (opts and opts.limit and opts.limit > 0) else 0 + + return marker_paginate( + client=client, + path=_BASE_PATH, + items_key="vpcs", + model=Vpc, + marker_key="id", + limit=limit, + params=params, + ) diff --git a/src/sdk/services/vpc/v1/vpcs/update.py b/src/sdk/services/vpc/v1/vpcs/update.py new file mode 100644 index 0000000..a1a67c4 --- /dev/null +++ b/src/sdk/services/vpc/v1/vpcs/update.py @@ -0,0 +1,38 @@ +"""Update a VPC.""" + +from __future__ import annotations + +from typing import ClassVar + +from sdk.core.opts import BaseOpts +from sdk.core.service_client import ServiceClient + +from .common import _BASE_PATH, Route, Vpc + + +class UpdateVpcOpts(BaseOpts): + """Options for updating a VPC. + + All fields are optional. ``None`` means "do not touch", an explicit + empty string clears the field on the server. + """ + + _wrapper_key: ClassVar[str | None] = "vpc" + + name: str | None = None + description: str | None = None + cidr: str | None = None + routes: list[Route] | None = None + + +def update( + client: ServiceClient, + vpc_id: str, + opts: UpdateVpcOpts, +) -> Vpc: + """Update a VPC. + + ``PUT /v1/{project_id}/vpcs/{vpc_id}`` + """ + resp = client.put(f"{_BASE_PATH}/{vpc_id}", json=opts.to_request_body()) + return Vpc.model_validate(resp.json()["vpc"]) diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/acceptance/clients.py b/tests/acceptance/clients.py new file mode 100644 index 0000000..bc1e3db --- /dev/null +++ b/tests/acceptance/clients.py @@ -0,0 +1,43 @@ +"""Service client factories for acceptance tests. + +Centralised here so that adding support for a new service in tests is +just one new fixture line rather than copying provider/auth setup. + +To add a client for a new service: + +1. Add a fixture in your test's ``conftest.py`` (typically alongside the + test files, e.g. ``tests/acceptance/services//conftest.py``):: + + from tests.acceptance.clients import make_service_client + + @pytest.fixture + def dns_client(provider): + return make_service_client(provider, "dns") + +2. Use it in tests like any other fixture:: + + def test_something(dns_client): + ... + +If a service needs a custom region, microversion, or extra headers, +pass them through to ``make_service_client`` - it forwards keyword +arguments to ``ServiceClient``. +""" + +from __future__ import annotations + +from sdk.core.provider import ProviderClient +from sdk.core.service_client import ServiceClient + + +def make_service_client( + provider: ProviderClient, + service_type: str, + **kwargs, +) -> ServiceClient: + """Build a :class:`ServiceClient` for the given service type. + + Extra keyword arguments are forwarded to :class:`ServiceClient` + (``region``, ``microversion``, ``endpoint_override``, ...). + """ + return ServiceClient(provider, service_type=service_type, **kwargs) diff --git a/tests/acceptance/conftest.py b/tests/acceptance/conftest.py new file mode 100644 index 0000000..b3db2e4 --- /dev/null +++ b/tests/acceptance/conftest.py @@ -0,0 +1,75 @@ +"""Shared fixtures for functional tests against a real OTC tenant. + +Tests in this tree skip automatically when ``clouds.yaml`` does not +contain credentials for the configured cloud (``OS_CLOUD``, default +``otc``). + +Tests should never refer to existing resources by hardcoded ID; +everything is created during the test, registered with the ``cleanup`` +fixture, and deleted on teardown in reverse order. +""" + +from __future__ import annotations + +import os +import uuid +from collections.abc import Callable, Generator +from contextlib import ExitStack + +import pytest + +from sdk.core.config import load_from_yaml +from sdk.core.provider import ProviderClient +from sdk.core.service_client import ServiceClient + + +# Cloud name in clouds.yaml. Override to point tests at a different +# tenant without editing the file. +DEFAULT_CLOUD = os.environ.get("OS_CLOUD", "otc") + + +@pytest.fixture(scope="session") +def provider() -> ProviderClient: + """Authenticated ``ProviderClient`` shared across the test session. + + Skips the session when no usable clouds.yaml is found or the named + cloud is missing. + """ + try: + auth_config = load_from_yaml(DEFAULT_CLOUD) + except (FileNotFoundError, ValueError) as exc: + pytest.skip(f"clouds.yaml not usable for cloud '{DEFAULT_CLOUD}': {exc}") + + p = ProviderClient(auth_config) + p.authenticate() + return p + + +@pytest.fixture +def cleanup() -> Generator[Callable[..., None], None, None]: + """LIFO cleanup registry backed by :class:`contextlib.ExitStack`. + + Tests register teardown callables; they run in reverse registration + order at fixture teardown. Failures in one callback do not prevent + later callbacks from running (standard ``ExitStack`` behaviour). + + Usage:: + + def test_x(vpc_client, cleanup): + vpc = vpcs.create(vpc_client, ...) + cleanup(vpcs.delete, vpc_client, vpc.id) + + subnet = subnets.create(vpc_client, vpc_id=vpc.id, ...) + cleanup(subnets.delete, vpc_client, subnet.id) + """ + with ExitStack() as stack: + yield stack.callback + + +def unique_name(prefix: str = "sdk-test") -> str: + """Generate a unique resource name with the SDK test prefix. + + The prefix lets humans (and a future sweep script) recognise leftover + resources from test runs. + """ + return f"{prefix}-{uuid.uuid4().hex[:8]}" \ No newline at end of file diff --git a/tests/acceptance/services/vpc/v1/vpcs/__init__.py b/tests/acceptance/services/vpc/v1/vpcs/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/acceptance/services/vpc/v1/vpcs/conftest.py b/tests/acceptance/services/vpc/v1/vpcs/conftest.py new file mode 100644 index 0000000..d756304 --- /dev/null +++ b/tests/acceptance/services/vpc/v1/vpcs/conftest.py @@ -0,0 +1,66 @@ +"""Shared fixtures for VPC functional tests.""" + +from __future__ import annotations + +from collections.abc import Callable + +import pytest + +from sdk.core.exceptions import HttpError +from sdk.core.service_client import ServiceClient +from sdk.services.vpc.v1.vpcs import CreateVpcOpts, Vpc, create, delete +from tests.acceptance.clients import make_service_client + +from tests.acceptance.conftest import unique_name + + +@pytest.fixture +def vpc_client(provider) -> ServiceClient: + """``ServiceClient`` for the VPC service. + + Thin wrapper over :func:`tests.acceptance.clients.make_service_client`. + """ + return make_service_client(provider, "vpc") + + +def _safe_delete(client, vpc_id: str) -> None: + """Idempotent delete used by cleanup hooks. + + A test may delete the VPC explicitly as part of its assertions; the + cleanup hook fires afterwards, so a 404 here is expected and ignored. + Any other error surfaces as a real cleanup failure. + """ + try: + delete(client, vpc_id) + except HttpError as exc: + if exc.status_code != 404: + raise + + +@pytest.fixture +def created_vpc(vpc_client, cleanup) -> Callable[..., Vpc]: + """Factory fixture: create a test VPC and auto-register its cleanup. + + Returns a callable. Each call creates a new VPC, registers its + deletion with the session ``cleanup`` hook, and returns the created + resource. Override any field of :class:`CreateVpcOpts` via kwargs. + + Usage:: + + def test_x(created_vpc): + vpc = created_vpc() # default name and CIDR + big = created_vpc(cidr="10.0.0.0/8") # second VPC + # No cleanup code in the test - factory took care of it. + """ + + def _factory(**overrides) -> Vpc: + defaults = { + "name": unique_name("sdk-test-vpc"), + "cidr": "192.168.0.0/16", + } + opts = CreateVpcOpts(**{**defaults, **overrides}) + vpc = create(vpc_client, opts) + cleanup(_safe_delete, vpc_client, vpc.id) + return vpc + + return _factory diff --git a/tests/acceptance/services/vpc/v1/vpcs/test_lifecycle.py b/tests/acceptance/services/vpc/v1/vpcs/test_lifecycle.py new file mode 100644 index 0000000..7e153b1 --- /dev/null +++ b/tests/acceptance/services/vpc/v1/vpcs/test_lifecycle.py @@ -0,0 +1,73 @@ +"""Acceptance test: VPC v1 full lifecycle against a real OTC tenant. + +Covers create -> get -> list -> update -> delete -> get-after-delete. +Skipped automatically when ``clouds.yaml`` does not provide credentials +for the configured cloud (``OS_CLOUD``, default ``otc``). +""" + +from __future__ import annotations + +import pytest + +from sdk.core.exceptions import HttpError +from sdk.services.vpc.v1.vpcs import ( + UpdateVpcOpts, + delete, + get, + list as list_vpcs, + update, +) + +from tests.acceptance.conftest import unique_name + + +def test_full_lifecycle(vpc_client, created_vpc): + initial_cidr = "192.168.0.0/16" + vpc = created_vpc(description="initial", cidr=initial_cidr) + + # --- Create result --- + assert vpc.id, "server must return an id" + assert vpc.cidr == initial_cidr + assert vpc.description == "initial" + # Status right after create is typically CREATING or OK depending + # on backend; both are valid, just verify it was set. + assert vpc.status, "status must be populated" + + # --- Get --- + fetched = get(vpc_client, vpc.id) + assert fetched.id == vpc.id + assert fetched.name == vpc.name + assert fetched.cidr == initial_cidr + assert fetched.description == "initial" + + # --- List: the new VPC must appear --- + found = next( + (v for v in list_vpcs(vpc_client) if v.id == vpc.id), + None, + ) + assert found is not None, "newly created VPC missing from list" + assert found.name == vpc.name + + # --- Update: rename and change description --- + new_name = unique_name("sdk-test-vpc-renamed") + updated = update( + vpc_client, + vpc.id, + UpdateVpcOpts(name=new_name, description="updated"), + ) + assert updated.id == vpc.id + assert updated.name == new_name + assert updated.description == "updated" + + # Re-fetch to confirm the change was persisted, not just echoed back. + refetched = get(vpc_client, vpc.id) + assert refetched.name == new_name + assert refetched.description == "updated" + + # --- Delete --- + delete(vpc_client, vpc.id) + + # --- Get-after-delete: must 404 --- + with pytest.raises(HttpError) as exc_info: + get(vpc_client, vpc.id) + assert exc_info.value.status_code == 404 diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py new file mode 100644 index 0000000..53ddeb1 --- /dev/null +++ b/tests/unit/conftest.py @@ -0,0 +1,32 @@ +"""Shared fixtures and helpers for unit tests across all services.""" + +from __future__ import annotations + +from unittest.mock import MagicMock + +import pytest + + +@pytest.fixture +def mock_client(): + """``ServiceClient`` mock with ``get/post/put/delete`` recorded. + + Tests typically configure ``return_value`` per call:: + + mock_client.post.return_value = make_response({"vpc": {...}}) + + Then assert against ``mock_client..assert_called_once_with(...)``. + """ + return MagicMock() + + +def make_response(payload: dict) -> MagicMock: + """Build a mock ``httpx.Response`` whose ``.json()`` returns ``payload``. + + Imported explicitly by test modules; not a fixture because tests + typically build several response objects per test (different payloads + for different calls), and a fixture would force one-per-test. + """ + resp = MagicMock() + resp.json.return_value = payload + return resp diff --git a/tests/unit/core/__init__.py b/tests/unit/core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/core/test_auth.py b/tests/unit/core/test_auth.py new file mode 100644 index 0000000..10a8fe9 --- /dev/null +++ b/tests/unit/core/test_auth.py @@ -0,0 +1,126 @@ +"""Tests for sdk.core.auth.""" + +import pytest + +from sdk.core.auth import AuthConfig, AuthMode +from sdk.core.exceptions import MissingCredentialsError + + +class TestAuthModeDetection: + """AuthConfig should auto-detect strategy from provided fields.""" + + def test_password_mode(self): + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + username="user", + password="pass", + domain_name="my_domain", + ) + assert cfg.auth_mode == AuthMode.PASSWORD + + def test_aksk_mode(self): + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + access_key="AK_TEST", + secret_key="SK_TEST", + project_id="project123", + ) + assert cfg.auth_mode == AuthMode.AKSK + + def test_token_mode(self): + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + token_id="gAAAA_test_token", + ) + assert cfg.auth_mode == AuthMode.TOKEN + + def test_aksk_takes_priority_over_password(self): + """If both AK/SK and password are provided, AK/SK wins.""" + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + access_key="AK", + secret_key="SK", + password="pass", + username="user", + domain_name="domain", + ) + assert cfg.auth_mode == AuthMode.AKSK + + +class TestAuthConfigValidation: + """AuthConfig should reject invalid credential combinations.""" + + def test_no_credentials_raises(self): + with pytest.raises(MissingCredentialsError): + AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + ) + + def test_password_without_username_raises(self): + with pytest.raises(MissingCredentialsError, match="username or user_id"): + AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + password="pass", + ) + + def test_username_without_domain_raises(self): + with pytest.raises(MissingCredentialsError, match="domain_id or domain_name"): + AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + username="user", + password="pass", + ) + + def test_password_with_user_id_is_valid(self): + """user_id doesn't require domain.""" + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + user_id="user123", + password="pass", + ) + assert cfg.auth_mode == AuthMode.PASSWORD + + def test_password_with_domain_id_is_valid(self): + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + username="user", + password="pass", + domain_id="domain123", + ) + assert cfg.auth_mode == AuthMode.PASSWORD + + +class TestAuthConfigOptionalFields: + """Optional fields should be preserved.""" + + def test_agency_fields(self): + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + access_key="AK", + secret_key="SK", + agency_name="my_agency", + agency_domain_name="agency_domain", + delegated_project="delegated", + ) + assert cfg.agency_name == "my_agency" + assert cfg.agency_domain_name == "agency_domain" + assert cfg.delegated_project == "delegated" + + def test_temporary_aksk(self): + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + access_key="AK", + secret_key="SK", + security_token="temp_token", + ) + assert cfg.security_token == "temp_token" + + def test_mfa_passcode(self): + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + username="user", + password="pass", + domain_name="domain", + passcode="123456", + ) + assert cfg.passcode == "123456" diff --git a/tests/unit/core/test_endpoint.py b/tests/unit/core/test_endpoint.py new file mode 100644 index 0000000..df2761e --- /dev/null +++ b/tests/unit/core/test_endpoint.py @@ -0,0 +1,287 @@ +"""Tests for ``sdk.core.endpoint``.""" + +from __future__ import annotations +from pydantic import ValidationError + +from typing import Any + +import pytest + +from sdk.core.endpoint import ( + Availability, + EndpointOpts, + build_endpoint_locator, + find_endpoint, + _normalize_url, + CatalogEntry +) +from sdk.core.exceptions import EndpointNotFoundError, ServiceNotFoundError + + +# ====================================================================== +# Test data +# ====================================================================== + + +def _sample_catalog() -> list[CatalogEntry]: + raw_catalog = [ + { + "type": "compute", + "name": "nova", + "endpoints": [ + {"interface": "public", "region_id": "eu-de", "url": "https://ecs.eu-de.otc.t-systems.com/v2.1"}, + {"interface": "internal", "region_id": "eu-de", + "url": "https://ecs-internal.eu-de.otc.t-systems.com/v2.1"}, + {"interface": "public", "region_id": "eu-nl", "url": "https://ecs.eu-nl.otc.t-systems.com/v2.1"} + ] + }, + { + "type": "dns", + "name": "dns", + "endpoints": [ + { + "interface": "public", + "region_id": "eu-de", + "url": "https://dns.eu-de.otc.t-systems.com/v2", + }, + ], + }, + { + "type": "identity", + "name": "keystone", + "endpoints": [ + {"interface": "public", "region_id": "eu-de", "url": "https://iam.eu-de.otc.t-systems.com/v3"}, + {"interface": "admin", "region_id": "*", "url": "https://iam-admin.otc.t-systems.com/v3"} + ] + }, + ] + return [CatalogEntry.model_validate(entry) for entry in raw_catalog] + + +# ====================================================================== +# _normalize_url +# ====================================================================== + + +class TestNormalizeUrl: + def test_adds_slash(self) -> None: + assert _normalize_url("https://example.com") == "https://example.com/" + + def test_keeps_slash(self) -> None: + assert _normalize_url("https://example.com/") == "https://example.com/" + + def test_with_path(self) -> None: + assert _normalize_url("https://example.com/v2.1") == "https://example.com/v2.1/" + + +# ====================================================================== +# EndpointOpts +# ====================================================================== + + +class TestEndpointOpts: + def test_defaults(self) -> None: + opts = EndpointOpts(service_type="compute") + assert opts.service_type == "compute" + assert opts.name == "" + assert opts.region == "" + assert opts.availability == Availability.PUBLIC + + def test_frozen(self) -> None: + opts = EndpointOpts(service_type="compute") + with pytest.raises(ValidationError): + opts.region = "eu-de" # type: ignore[misc] + + def test_all_fields(self) -> None: + opts = EndpointOpts( + service_type="compute", + name="nova", + region="eu-de", + availability=Availability.INTERNAL, + ) + assert opts.name == "nova" + assert opts.region == "eu-de" + assert opts.availability == Availability.INTERNAL + + +# ====================================================================== +# find_endpoint +# ====================================================================== + + +class TestFindEndpoint: + """Test direct catalog search.""" + + def test_finds_public_compute_eu_de(self) -> None: + catalog = _sample_catalog() + opts = EndpointOpts(service_type="compute", region="eu-de") + url = find_endpoint(catalog, opts) + assert url == "https://ecs.eu-de.otc.t-systems.com/v2.1/" + + def test_finds_public_compute_eu_nl(self) -> None: + catalog = _sample_catalog() + opts = EndpointOpts(service_type="compute", region="eu-nl") + url = find_endpoint(catalog, opts) + assert url == "https://ecs.eu-nl.otc.t-systems.com/v2.1/" + + def test_finds_internal_endpoint(self) -> None: + catalog = _sample_catalog() + opts = EndpointOpts( + service_type="compute", + region="eu-de", + availability=Availability.INTERNAL, + ) + url = find_endpoint(catalog, opts) + assert url == "https://ecs-internal.eu-de.otc.t-systems.com/v2.1/" + + def test_finds_dns(self) -> None: + catalog = _sample_catalog() + opts = EndpointOpts(service_type="dns", region="eu-de") + url = find_endpoint(catalog, opts) + assert url == "https://dns.eu-de.otc.t-systems.com/v2/" + + def test_wildcard_region_fallback(self) -> None: + """Wildcard ``*`` region used when no exact match.""" + catalog = _sample_catalog() + opts = EndpointOpts(service_type="identity", region="eu-de") + url = find_endpoint(catalog, opts) + assert url == "https://iam.eu-de.otc.t-systems.com/v3/" + + def test_wildcard_admin(self) -> None: + catalog = _sample_catalog() + opts = EndpointOpts( + service_type="identity", + availability=Availability.ADMIN, + ) + url = find_endpoint(catalog, opts) + assert url == "https://iam-admin.otc.t-systems.com/v3/" + + def test_no_region_matches_first(self) -> None: + """Empty region returns the first matching endpoint.""" + catalog = _sample_catalog() + opts = EndpointOpts(service_type="compute") + url = find_endpoint(catalog, opts) + assert url == "https://ecs.eu-de.otc.t-systems.com/v2.1/" + + def test_name_filter(self) -> None: + catalog = _sample_catalog() + opts = EndpointOpts( + service_type="compute", name="nova", region="eu-de", + ) + url = find_endpoint(catalog, opts) + assert url == "https://ecs.eu-de.otc.t-systems.com/v2.1/" + + def test_name_filter_no_match(self) -> None: + """Wrong name on correct type → ServiceNotFoundError.""" + catalog = _sample_catalog() + opts = EndpointOpts( + service_type="compute", name="wrong_name", region="eu-de", + ) + with pytest.raises(ServiceNotFoundError): + find_endpoint(catalog, opts) + + def test_service_not_found(self) -> None: + catalog = _sample_catalog() + opts = EndpointOpts(service_type="nonexistent") + with pytest.raises(ServiceNotFoundError): + find_endpoint(catalog, opts) + + def test_endpoint_not_found_wrong_region(self) -> None: + catalog = _sample_catalog() + opts = EndpointOpts(service_type="dns", region="us-west-1") + with pytest.raises(EndpointNotFoundError): + find_endpoint(catalog, opts) + + def test_endpoint_not_found_wrong_availability(self) -> None: + """DNS has only public, asking for admin → EndpointNotFoundError.""" + catalog = _sample_catalog() + opts = EndpointOpts( + service_type="dns", + region="eu-de", + availability=Availability.ADMIN, + ) + with pytest.raises(EndpointNotFoundError): + find_endpoint(catalog, opts) + + def test_empty_catalog(self) -> None: + opts = EndpointOpts(service_type="compute") + with pytest.raises(ServiceNotFoundError): + find_endpoint([], opts) + + def test_uses_region_field_fallback(self) -> None: + """Some catalogs use ``region`` instead of ``region_id``.""" + catalog = [ + { + "type": "object-store", + "endpoints": [ + { + "interface": "public", + "region": "eu-de", + "url": "https://obs.eu-de.example.com", + }, + ], + }, + ] + cat = [CatalogEntry.model_validate(catalog) for catalog in catalog] + opts = EndpointOpts(service_type="object-store", region="eu-de") + url = find_endpoint(cat, opts) + assert url == "https://obs.eu-de.example.com/" + + +# ====================================================================== +# build_endpoint_locator +# ====================================================================== + + +class TestBuildEndpointLocator: + """Test the locator closure factory.""" + + def test_locator_uses_default_region(self) -> None: + catalog = _sample_catalog() + locator = build_endpoint_locator(catalog, default_region="eu-de") + + opts = EndpointOpts(service_type="compute") + url = locator(opts) + assert url == "https://ecs.eu-de.otc.t-systems.com/v2.1/" + + def test_locator_region_in_opts_overrides_default(self) -> None: + catalog = _sample_catalog() + locator = build_endpoint_locator(catalog, default_region="eu-de") + + opts = EndpointOpts(service_type="compute", region="eu-nl") + url = locator(opts) + assert url == "https://ecs.eu-nl.otc.t-systems.com/v2.1/" + + def test_locator_propagates_service_not_found(self) -> None: + catalog = _sample_catalog() + locator = build_endpoint_locator(catalog, default_region="eu-de") + + opts = EndpointOpts(service_type="nope") + with pytest.raises(ServiceNotFoundError): + locator(opts) + + def test_locator_propagates_endpoint_not_found(self) -> None: + catalog = _sample_catalog() + locator = build_endpoint_locator(catalog, default_region="eu-de") + + opts = EndpointOpts(service_type="dns", region="us-east-1") + with pytest.raises(EndpointNotFoundError): + locator(opts) + + def test_locator_no_default_region(self) -> None: + """Without default region, opts.region is used as-is.""" + catalog = _sample_catalog() + locator = build_endpoint_locator(catalog) + + opts = EndpointOpts(service_type="compute", region="eu-nl") + url = locator(opts) + assert url == "https://ecs.eu-nl.otc.t-systems.com/v2.1/" + + def test_locator_no_region_at_all_returns_first(self) -> None: + """No default, no opts.region → returns first public endpoint.""" + catalog = _sample_catalog() + locator = build_endpoint_locator(catalog) + + opts = EndpointOpts(service_type="compute") + url = locator(opts) + assert url == "https://ecs.eu-de.otc.t-systems.com/v2.1/" diff --git a/tests/unit/core/test_exceptions.py b/tests/unit/core/test_exceptions.py new file mode 100644 index 0000000..d0b603d --- /dev/null +++ b/tests/unit/core/test_exceptions.py @@ -0,0 +1,143 @@ +"""Tests for sdk.core.exceptions.""" + +import pytest + +from sdk.core.exceptions import ( + UnauthorizedError, + BadRequestError, + HttpError, + NotFoundError, + SDKError, + SDKTimeoutError, + ServiceUnavailableError, + TooManyRequestsError, + raise_for_status, +) + + +class TestHttpErrorHierarchy: + def test_all_http_errors_inherit_from_sdk_error(self): + err = BadRequestError(method="POST", url="/test", body="bad") + assert isinstance(err, SDKError) + assert isinstance(err, HttpError) + + def test_status_code_on_class(self): + assert BadRequestError.status_code == 400 + assert UnauthorizedError.status_code == 401 + assert NotFoundError.status_code == 404 + + def test_default_format(self): + err = BadRequestError(method="POST", url="/v1/zones", body="invalid json") + assert "Bad request" in str(err) + assert "POST" in str(err) + assert "/v1/zones" in str(err) + assert "invalid json" in str(err) + + def test_custom_format_not_found(self): + err = NotFoundError(method="GET", url="/v1/zones/123", body="zone not found") + assert "Resource not found" in str(err) + assert "zone not found" in str(err) + + def test_custom_format_too_many_requests_without_header(self): + err = TooManyRequestsError(method="GET", url="/test", body="") + assert "too many requests" in str(err).lower() + assert "wait up to one minute" in str(err).lower() + + def test_custom_format_too_many_requests_with_retry_after(self): + err = TooManyRequestsError( + method="GET", url="/test", body="", + headers={"Retry-After": "30"}, + ) + assert "30" in str(err) + assert "Retry after" in str(err) + + def test_custom_format_service_unavailable(self): + err = ServiceUnavailableError(method="GET", url="/test", body="") + assert "try again later" in str(err).lower() + + +class TestHttpErrorHeaders: + def test_headers_stored(self): + err = BadRequestError( + method="POST", url="/test", body="bad", + headers={"X-Request-Id": "abc123"}, + ) + assert err.headers["X-Request-Id"] == "abc123" + + def test_headers_default_to_empty_dict(self): + err = BadRequestError(method="POST", url="/test", body="bad") + assert err.headers == {} + + def test_request_id_extracted(self): + err = BadRequestError( + method="POST", url="/test", body="bad", + headers={"x-request-id": "req-abc123"}, + ) + assert err.request_id == "req-abc123" + + def test_request_id_empty_when_missing(self): + err = BadRequestError(method="POST", url="/test", body="bad") + assert err.request_id == "" + + +class TestHttpErrorExpectedCodes: + def test_expected_codes_stored(self): + err = HttpError( + method="GET", url="/test", body="error", + expected=[200, 201], status_code=409, + ) + assert err.expected == [200, 201] + assert "[200, 201]" in str(err) + + def test_expected_codes_default_to_empty(self): + err = HttpError( + method="GET", url="/test", body="error", + status_code=500, + ) + assert err.expected == [] + + +class TestRaiseForStatus: + def test_raises_known_status(self): + with pytest.raises(NotFoundError) as exc_info: + raise_for_status(404, method="GET", url="/test", body="gone") + assert exc_info.value.status_code == 404 + + def test_raises_generic_for_unknown_status(self): + with pytest.raises(HttpError) as exc_info: + raise_for_status(418, method="GET", url="/teapot", body="short and stout") + assert exc_info.value.status_code == 418 + + def test_authentication_error_message(self): + with pytest.raises(UnauthorizedError) as exc_info: + raise_for_status(401, method="POST", url="/v1/auth/tokens", body="invalid token") + assert "Authentication failed" in str(exc_info.value) + + def test_passes_headers_through(self): + with pytest.raises(TooManyRequestsError) as exc_info: + raise_for_status( + 429, method="GET", url="/test", body="", + headers={"Retry-After": "60"}, + ) + assert exc_info.value.headers["Retry-After"] == "60" + + def test_passes_expected_codes_through(self): + with pytest.raises(NotFoundError) as exc_info: + raise_for_status( + 404, method="GET", url="/test", body="gone", + expected=[200], + ) + assert exc_info.value.expected == [200] + + +class TestSDKTimeoutError: + def test_not_builtin_timeout(self): + """SDKTimeoutError should not be confused with builtin TimeoutError.""" + err = SDKTimeoutError("operation timed out") + assert isinstance(err, SDKError) + assert not isinstance(err, builtins_timeout_error()) + + +def builtins_timeout_error(): + """Return the builtin TimeoutError for isinstance check.""" + return TimeoutError diff --git a/tests/unit/core/test_opts.py b/tests/unit/core/test_opts.py new file mode 100644 index 0000000..9063e29 --- /dev/null +++ b/tests/unit/core/test_opts.py @@ -0,0 +1,167 @@ +"""Tests for ``sdk.core.opts``.""" + +from __future__ import annotations + +from typing import ClassVar + +import pytest +from pydantic import BaseModel, Field + +from sdk.core.opts import BaseOpts, BaseQueryOpts + + +class _Route(BaseModel): + destination: str = "" + nexthop: str = "" + + +class _CreateOpts(BaseOpts): + _wrapper_key: ClassVar[str | None] = "vpc" + + name: str = "" + description: str = "" + cidr: str = "" + + +class _UpdateOpts(BaseOpts): + _wrapper_key: ClassVar[str | None] = "vpc" + + name: str | None = None + description: str | None = None + cidr: str | None = None + routes: list[_Route] | None = None + + +class _UnwrappedOpts(BaseOpts): + name: str | None = None + value: int | None = None + + +class _ListOpts(BaseQueryOpts): + id: str | None = None + limit: int | None = None + marker: str | None = None + enterprise_project_id: str | None = None + + +# --- BaseOpts --- + + +def test_empty_opts_returns_empty_wrapped_body(): + assert _CreateOpts().to_request_body() == {"vpc": {}} + + +def test_partially_filled_opts_includes_only_set_fields(): + opts = _CreateOpts(name="my-vpc", cidr="192.168.0.0/16") + assert opts.to_request_body() == { + "vpc": {"name": "my-vpc", "cidr": "192.168.0.0/16"} + } + + +def test_nested_models_serialized_recursively(): + opts = _UpdateOpts( + name="my-vpc", + routes=[ + _Route(destination="10.0.0.0/8", nexthop="192.168.1.1"), + _Route(destination="172.16.0.0/12", nexthop="192.168.1.2"), + ], + ) + assert opts.to_request_body() == { + "vpc": { + "name": "my-vpc", + "routes": [ + {"destination": "10.0.0.0/8", "nexthop": "192.168.1.1"}, + {"destination": "172.16.0.0/12", "nexthop": "192.168.1.2"}, + ], + } + } + + +def test_none_fields_excluded_from_body(): + opts = _UpdateOpts(name="my-vpc") + assert opts.to_request_body() == {"vpc": {"name": "my-vpc"}} + + +def test_explicit_empty_string_is_preserved(): + opts = _UpdateOpts(name="my-vpc", description="") + assert opts.to_request_body() == { + "vpc": {"name": "my-vpc", "description": ""} + } + + +def test_wrapper_key_omitted_returns_flat_body(): + opts = _UnwrappedOpts(name="x", value=42) + assert opts.to_request_body() == {"name": "x", "value": 42} + + +def test_wrapper_key_with_empty_unwrapped_body(): + assert _UnwrappedOpts().to_request_body() == {} + + +def test_nested_model_with_partial_fields(): + opts = _UpdateOpts(routes=[_Route(destination="10.0.0.0/8")]) + assert opts.to_request_body() == { + "vpc": {"routes": [{"destination": "10.0.0.0/8"}]} + } + + +def test_empty_routes_list_is_preserved(): + opts = _UpdateOpts(routes=[]) + assert opts.to_request_body() == {"vpc": {"routes": []}} + + +def test_alias_is_respected_in_body(): + class _Aliased(BaseOpts): + snake_case: str | None = Field(default=None, alias="camelCase") + + assert _Aliased(camelCase="value").to_request_body() == {"camelCase": "value"} + + +# --- BaseQueryOpts --- + + +def test_empty_query_opts_returns_empty_dict(): + assert _ListOpts().to_query_params() == {} + + +def test_query_params_converts_int_to_string(): + opts = _ListOpts(limit=20, marker="abc") + assert opts.to_query_params() == {"limit": "20", "marker": "abc"} + + +def test_query_params_preserves_zero(): + assert _ListOpts(limit=0).to_query_params() == {"limit": "0"} + + +def test_query_params_skips_empty_string(): + opts = _ListOpts(id="", marker="real") + assert opts.to_query_params() == {"marker": "real"} + + +def test_query_params_skips_none(): + opts = _ListOpts(id="real-id", limit=None) + assert opts.to_query_params() == {"id": "real-id"} + + +def test_query_params_serializes_bool_lowercase(): + class _BoolQuery(BaseQueryOpts): + active: bool | None = None + + assert _BoolQuery(active=True).to_query_params() == {"active": "true"} + assert _BoolQuery(active=False).to_query_params() == {"active": "false"} + + +def test_query_params_rejects_list_value(): + class _BadQuery(BaseQueryOpts): + tags: list[str] | None = None + + with pytest.raises(TypeError, match="non-scalar"): + _BadQuery(tags=["a", "b"]).to_query_params() + + +def test_query_params_rejects_dict_value(): + class _BadQuery(BaseQueryOpts): + meta: dict[str, str] | None = None + + with pytest.raises(TypeError, match="non-scalar"): + _BadQuery(meta={"k": "v"}).to_query_params() \ No newline at end of file diff --git a/tests/unit/core/test_pagination.py b/tests/unit/core/test_pagination.py new file mode 100644 index 0000000..b9839f1 --- /dev/null +++ b/tests/unit/core/test_pagination.py @@ -0,0 +1,452 @@ +"""Tests for ``sdk.core.pagination``.""" + +from __future__ import annotations + +import json +from typing import Any + +import httpx +import pytest + +from sdk.core.auth import AuthConfig +from sdk.core.pagination import ( + _build_url, + _extract_link, + linked_paginate, + marker_paginate, + offset_paginate, + single_page, +) +from sdk.core.provider import ProviderClient +from sdk.core.service_client import ServiceClient + + +# ====================================================================== +# Fixtures +# ====================================================================== + + +def _make_service_client(handler: Any) -> ServiceClient: + """Build a ServiceClient with mocked transport.""" + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + username="user", + password="pass", + domain_name="dom", + ) + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + provider = ProviderClient(cfg, http_client=http_client) + provider.token_id = "test-token" + return ServiceClient( + provider, endpoint_override="https://api.example.com/v1", + ) + + +# ====================================================================== +# _build_url +# ====================================================================== + + +class TestBuildUrl: + def test_no_params(self) -> None: + assert _build_url("servers", None) == "servers" + + def test_adds_params(self) -> None: + url = _build_url("servers", {"limit": "10"}) + assert "limit=10" in url + assert url.startswith("servers?") + + def test_merges_existing_params(self) -> None: + url = _build_url("servers?status=ACTIVE", {"limit": "5"}) + assert "status=ACTIVE" in url + assert "limit=5" in url + + def test_overrides_existing(self) -> None: + url = _build_url("servers?limit=20", {"limit": "5"}) + assert "limit=5" in url + assert "limit=20" not in url + + +# ====================================================================== +# _extract_link +# ====================================================================== + + +class TestExtractLink: + def test_simple_path(self) -> None: + data = {"links": {"next": "https://example.com/page2"}} + assert _extract_link(data, ["links", "next"]) == "https://example.com/page2" + + def test_missing_key(self) -> None: + data = {"links": {"prev": "..."}} + assert _extract_link(data, ["links", "next"]) == "" + + def test_null_value(self) -> None: + data = {"links": {"next": None}} + assert _extract_link(data, ["links", "next"]) == "" + + def test_deep_path(self) -> None: + data = {"a": {"b": {"c": "url"}}} + assert _extract_link(data, ["a", "b", "c"]) == "url" + + def test_not_a_dict(self) -> None: + data = {"links": "not-a-dict"} + assert _extract_link(data, ["links", "next"]) == "" + + def test_empty_path(self) -> None: + data = {"links": {"next": "url"}} + assert _extract_link(data, []) == "" + + +# ====================================================================== +# marker_paginate +# ====================================================================== + + +class TestMarkerPaginate: + def test_single_page(self) -> None: + """Single page with fewer items than limit.""" + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(200, json={ + "servers": [ + {"id": "s1", "name": "a"}, + {"id": "s2", "name": "b"}, + ], + }) + + sc = _make_service_client(handler) + items = list(marker_paginate(sc, "servers", items_key="servers", limit=10)) + + assert len(items) == 2 + assert items[0]["id"] == "s1" + + def test_multi_page(self) -> None: + """Two pages, second page is shorter → stops.""" + call_count = 0 + + def handler(req: httpx.Request) -> httpx.Response: + nonlocal call_count + call_count += 1 + url = str(req.url) + if "marker=" not in url: + return httpx.Response(200, json={ + "items": [{"id": "1"}, {"id": "2"}], + }) + else: + return httpx.Response(200, json={ + "items": [{"id": "3"}], + }) + + sc = _make_service_client(handler) + items = list(marker_paginate(sc, "items", items_key="items", limit=2)) + + assert len(items) == 3 + assert call_count == 2 + assert items[-1]["id"] == "3" + + def test_empty_first_page(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(200, json={"items": []}) + + sc = _make_service_client(handler) + items = list(marker_paginate(sc, "items", items_key="items")) + + assert items == [] + + def test_marker_param_passed(self) -> None: + """Second request should include marker from last item.""" + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + url = str(req.url) + if "marker=" not in url: + return httpx.Response(200, json={ + "items": [{"id": "abc"}, {"id": "def"}], + }) + return httpx.Response(200, json={"items": []}) + + sc = _make_service_client(handler) + list(marker_paginate(sc, "items", items_key="items", limit=2)) + + assert len(captured) == 2 + assert "marker=def" in str(captured[1].url) + + def test_custom_marker_key(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + url = str(req.url) + if "marker=" not in url: + return httpx.Response(200, json={ + "items": [{"uid": "x1"}], + }) + return httpx.Response(200, json={"items": []}) + + sc = _make_service_client(handler) + list(marker_paginate( + sc, "items", items_key="items", marker_key="uid", limit=1, + )) + + assert "marker=x1" in str(captured[1].url) + + def test_no_limit_stops_on_empty(self) -> None: + """Without limit, pagination stops when page is empty.""" + call_count = 0 + + def handler(req: httpx.Request) -> httpx.Response: + nonlocal call_count + call_count += 1 + if call_count == 1: + return httpx.Response(200, json={ + "items": [{"id": "a"}], + }) + return httpx.Response(200, json={"items": []}) + + sc = _make_service_client(handler) + items = list(marker_paginate(sc, "items", items_key="items")) + + assert len(items) == 1 + assert call_count == 2 + + def test_extra_params_forwarded(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={"items": []}) + + sc = _make_service_client(handler) + list(marker_paginate( + sc, "items", items_key="items", + params={"status": "ACTIVE"}, + )) + + assert "status=ACTIVE" in str(captured[0].url) + + +# ====================================================================== +# offset_paginate +# ====================================================================== + + +class TestOffsetPaginate: + def test_single_page(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(200, json={ + "topics": [{"id": "t1"}, {"id": "t2"}], + }) + + sc = _make_service_client(handler) + items = list(offset_paginate( + sc, "topics", items_key="topics", limit=10, + )) + + assert len(items) == 2 + + def test_multi_page(self) -> None: + call_count = 0 + + def handler(req: httpx.Request) -> httpx.Response: + nonlocal call_count + call_count += 1 + url = str(req.url) + if "offset=0" in url or "offset" not in url: + return httpx.Response(200, json={ + "items": [{"id": "1"}, {"id": "2"}], + }) + elif "offset=2" in url: + return httpx.Response(200, json={ + "items": [{"id": "3"}], + }) + return httpx.Response(200, json={"items": []}) + + sc = _make_service_client(handler) + items = list(offset_paginate( + sc, "items", items_key="items", limit=2, + )) + + assert len(items) == 3 + assert call_count == 2 + + def test_offset_increments(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + url = str(req.url) + if "offset=0" in url: + return httpx.Response(200, json={ + "items": [{"id": "a"}, {"id": "b"}, {"id": "c"}], + }) + if "offset=3" in url: + return httpx.Response(200, json={ + "items": [{"id": "d"}], + }) + return httpx.Response(200, json={"items": []}) + + sc = _make_service_client(handler) + items = list(offset_paginate( + sc, "items", items_key="items", limit=3, + )) + + assert len(items) == 4 + assert "offset=0" in str(captured[0].url) + assert "offset=3" in str(captured[1].url) + + def test_start_offset(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={"items": []}) + + sc = _make_service_client(handler) + list(offset_paginate( + sc, "items", items_key="items", limit=5, start_offset=10, + )) + + assert "offset=10" in str(captured[0].url) + + def test_empty_first_page(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(200, json={"items": []}) + + sc = _make_service_client(handler) + items = list(offset_paginate( + sc, "items", items_key="items", limit=10, + )) + + assert items == [] + + +# ====================================================================== +# linked_paginate +# ====================================================================== + + +class TestLinkedPaginate: + def test_single_page_no_next(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(200, json={ + "items": [{"id": "1"}], + "links": {"next": None}, + }) + + sc = _make_service_client(handler) + items = list(linked_paginate(sc, "items", items_key="items")) + + assert len(items) == 1 + + def test_follows_next_link(self) -> None: + call_count = 0 + + def handler(req: httpx.Request) -> httpx.Response: + nonlocal call_count + call_count += 1 + url = str(req.url) + if "page2" not in url: + return httpx.Response(200, json={ + "items": [{"id": "1"}], + "links": {"next": "items?page=page2"}, + }) + return httpx.Response(200, json={ + "items": [{"id": "2"}], + "links": {"next": None}, + }) + + sc = _make_service_client(handler) + items = list(linked_paginate(sc, "items", items_key="items")) + + assert len(items) == 2 + assert call_count == 2 + + def test_custom_link_path(self) -> None: + call_count = 0 + + def handler(req: httpx.Request) -> httpx.Response: + nonlocal call_count + call_count += 1 + if call_count == 1: + return httpx.Response(200, json={ + "data": [{"id": "1"}], + "pagination": {"next_url": "data?cursor=abc"}, + }) + return httpx.Response(200, json={ + "data": [{"id": "2"}], + "pagination": {"next_url": None}, + }) + + sc = _make_service_client(handler) + items = list(linked_paginate( + sc, "data", items_key="data", + link_path=["pagination", "next_url"], + )) + + assert len(items) == 2 + + def test_empty_first_page(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(200, json={ + "items": [], + "links": {"next": "irrelevant"}, + }) + + sc = _make_service_client(handler) + items = list(linked_paginate(sc, "items", items_key="items")) + + assert items == [] + + def test_missing_links_key(self) -> None: + """No 'links' in response → stops after first page.""" + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(200, json={ + "items": [{"id": "1"}], + }) + + sc = _make_service_client(handler) + items = list(linked_paginate(sc, "items", items_key="items")) + + assert len(items) == 1 + + +# ====================================================================== +# single_page +# ====================================================================== + + +class TestSinglePage: + def test_returns_list(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(200, json={ + "servers": [{"id": "s1"}, {"id": "s2"}], + }) + + sc = _make_service_client(handler) + items = single_page(sc, "servers", items_key="servers") + + assert isinstance(items, list) + assert len(items) == 2 + + def test_empty_list(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(200, json={"servers": []}) + + sc = _make_service_client(handler) + items = single_page(sc, "servers", items_key="servers") + + assert items == [] + + def test_with_params(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={"items": []}) + + sc = _make_service_client(handler) + single_page(sc, "items", items_key="items", params={"foo": "bar"}) + + assert "foo=bar" in str(captured[0].url) diff --git a/tests/unit/core/test_provider.py b/tests/unit/core/test_provider.py new file mode 100644 index 0000000..378e3ae --- /dev/null +++ b/tests/unit/core/test_provider.py @@ -0,0 +1,933 @@ +"""Tests for ``sdk.core.provider``.""" + +from __future__ import annotations + +import json +from typing import Any +from unittest.mock import patch +from pydantic import BaseModel +import httpx +import pytest + +from sdk.core.auth import AuthConfig, AuthMode +from sdk.core.exceptions import ( + UnauthorizedError, + BadRequestError, + EndpointNotFoundError, + InternalServerError, + NotFoundError, + ReauthError, + ServiceNotFoundError, + TooManyRequestsError, +) +from sdk.core.provider import ( + ProviderClient, + _build_agency_auth_body, + _build_scope, + _build_v3_auth_body, +) +from sdk.core.endpoint import build_endpoint_locator, CatalogEntry + + +# ====================================================================== +# Fixtures +# ====================================================================== + + +def _password_config(**overrides: Any) -> AuthConfig: + """Create a password AuthConfig with defaults.""" + defaults = { + "identity_endpoint": "https://iam.eu-de.otc.t-systems.com/v3", + "username": "testuser", + "password": "secret", + "domain_name": "testdomain", + "tenant_name": "eu-de", + "allow_reauth": True, + } + defaults.update(overrides) + return AuthConfig(**defaults) + + +def _aksk_config(**overrides: Any) -> AuthConfig: + """Create an AK/SK AuthConfig with defaults.""" + defaults = { + "identity_endpoint": "https://iam.eu-de.otc.t-systems.com/v3", + "access_key": "MYACCESSKEY", + "secret_key": "MYSECRETKEY", + "region": "eu-de", + "project_id": "project-123", + } + defaults.update(overrides) + return AuthConfig(**defaults) + + +def _token_response( + *, + token_id: str = "tok-abc-123", + project_id: str = "proj-123", + project_name: str = "eu-de", + domain_id: str = "dom-456", + user_id: str = "user-789", + catalog: list[dict[str, Any]] | None = None, +) -> httpx.Response: + """Build a mock IAM token response.""" + body: dict[str, Any] = { + "token": { + "project": { + "id": project_id, + "name": project_name, + "domain": {"id": domain_id}, + }, + "user": { + "id": user_id, + "domain": {"id": domain_id}, + }, + }, + } + raw_catalog = catalog if catalog is not None else _sample_catalog() + serialized_catalog = [] + for entry in raw_catalog: + if isinstance(entry, BaseModel): + serialized_catalog.append(entry.model_dump(by_alias=True)) + else: + serialized_catalog.append(entry) + + body["token"]["catalog"] = serialized_catalog + + resp = httpx.Response( + 201, + json=body, + headers={"x-subject-token": token_id}, + ) + return resp + + +def _sample_catalog() -> list[CatalogEntry]: + raw = [ + { + "type": "compute", + "endpoints": [ + { + "interface": "public", + "region_id": "eu-de", + "url": "https://ecs.eu-de.otc.t-systems.com/v2.1", + }, + { + "interface": "internal", + "region_id": "eu-de", + "url": "https://ecs-internal.eu-de.otc.t-systems.com/v2.1", + }, + ], + }, + { + "type": "dns", + "endpoints": [ + { + "interface": "public", + "region_id": "eu-de", + "url": "https://dns.eu-de.otc.t-systems.com/v2", + }, + ], + }, + ] + return [CatalogEntry.model_validate(raw) for raw in raw] + + +def _catalog_response() -> httpx.Response: + serialized_catalog = [ + entry.model_dump(by_alias=True) + for entry in _sample_catalog() + ] + return httpx.Response(200, json={"catalog": serialized_catalog}) + + +# ====================================================================== +# ProviderClient: basic properties +# ====================================================================== + + +class TestProviderClientProperties: + """Test basic ProviderClient attributes and properties.""" + + def test_identity_base_strips_v3(self) -> None: + cfg = _password_config() + client = ProviderClient(cfg) + assert client.identity_base == "https://iam.eu-de.otc.t-systems.com/" + + def test_identity_base_strips_v3_slash(self) -> None: + cfg = _password_config( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3/" + ) + client = ProviderClient(cfg) + assert client.identity_base == "https://iam.eu-de.otc.t-systems.com/" + + def test_identity_base_no_version(self) -> None: + cfg = _password_config( + identity_endpoint="https://iam.eu-de.otc.t-systems.com" + ) + client = ProviderClient(cfg) + assert client.identity_base == "https://iam.eu-de.otc.t-systems.com/" + + def test_identity_v3_endpoint(self) -> None: + cfg = _password_config() + client = ProviderClient(cfg) + assert client.identity_v3_endpoint == "https://iam.eu-de.otc.t-systems.com/v3/" + + def test_initial_state_empty(self) -> None: + cfg = _password_config() + client = ProviderClient(cfg) + assert client.token_id == "" + assert client.project_id == "" + assert client.user_id == "" + assert client.domain_id == "" + # region_id is "" because _password_config has no region field + assert client.region_id == "" + + def test_region_from_config(self) -> None: + cfg = _password_config(region="eu-nl") + client = ProviderClient(cfg) + assert client.region_id == "eu-nl" + + def test_context_manager(self) -> None: + cfg = _password_config() + with ProviderClient(cfg) as client: + assert isinstance(client, ProviderClient) + + +# ====================================================================== +# Auth body builders +# ====================================================================== + + +class TestBuildV3AuthBody: + """Test ``_build_v3_auth_body`` for password and token modes.""" + + def test_password_with_username_domain_name(self) -> None: + cfg = _password_config() + body = _build_v3_auth_body(cfg) + + auth = body["auth"] + assert auth["identity"]["methods"] == ["password"] + user = auth["identity"]["password"]["user"] + assert user["name"] == "testuser" + assert user["password"] == "secret" + assert user["domain"] == {"name": "testdomain"} + + def test_password_with_user_id(self) -> None: + cfg = _password_config(username=None, user_id="uid-123") + body = _build_v3_auth_body(cfg) + + user = body["auth"]["identity"]["password"]["user"] + assert user["id"] == "uid-123" + assert "name" not in user + + def test_password_with_domain_id(self) -> None: + cfg = _password_config(domain_name=None, domain_id="did-456") + body = _build_v3_auth_body(cfg) + + user = body["auth"]["identity"]["password"]["user"] + assert user["domain"] == {"id": "did-456"} + + def test_password_with_totp(self) -> None: + cfg = _password_config(passcode="123456") + body = _build_v3_auth_body(cfg) + + identity = body["auth"]["identity"] + assert "totp" in identity["methods"] + assert identity["totp"]["user"]["passcode"] == "123456" + + def test_token_auth(self) -> None: + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + token_id="existing-token", + ) + body = _build_v3_auth_body(cfg) + + identity = body["auth"]["identity"] + assert identity["methods"] == ["token"] + assert identity["token"]["id"] == "existing-token" + + def test_scope_project_by_name(self) -> None: + cfg = _password_config(tenant_name="eu-de") + body = _build_v3_auth_body(cfg) + + scope = body["auth"]["scope"] + assert scope["project"]["name"] == "eu-de" + assert scope["project"]["domain"]["name"] == "testdomain" + + def test_scope_project_by_id(self) -> None: + cfg = _password_config(tenant_id="proj-xyz", tenant_name=None) + body = _build_v3_auth_body(cfg) + + scope = body["auth"]["scope"] + assert scope["project"]["id"] == "proj-xyz" + + def test_no_scope(self) -> None: + cfg = _password_config( + username=None, user_id="uid-1", + tenant_name=None, domain_name=None, domain_id=None, + ) + body = _build_v3_auth_body(cfg) + assert "scope" not in body["auth"] + + +class TestBuildAgencyAuthBody: + """Test ``_build_agency_auth_body``.""" + + def test_agency_body(self) -> None: + cfg = _password_config( + agency_name="my_agency", + agency_domain_name="agency_domain", + delegated_project="delegated_proj", + ) + body = _build_agency_auth_body(cfg) + + identity = body["auth"]["identity"] + assert identity["methods"] == ["assume_role"] + assert identity["assume_role"]["xrole_name"] == "my_agency" + assert identity["assume_role"]["domain_name"] == "agency_domain" + + scope = body["auth"]["scope"] + assert scope["project"]["name"] == "delegated_proj" + + def test_agency_no_delegated_project(self) -> None: + cfg = _password_config( + agency_name="my_agency", + agency_domain_name="agency_domain", + ) + body = _build_agency_auth_body(cfg) + assert "scope" not in body["auth"] + + +class TestBuildScope: + """Test ``_build_scope``.""" + + def test_project_id(self) -> None: + cfg = _password_config(tenant_id="proj-1", tenant_name=None) + scope = _build_scope(cfg) + assert scope == {"project": {"id": "proj-1"}} + + def test_project_name_with_domain(self) -> None: + cfg = _password_config(tenant_name="eu-de") + scope = _build_scope(cfg) + assert scope == { + "project": { + "name": "eu-de", + "domain": {"name": "testdomain"}, + }, + } + + def test_domain_only(self) -> None: + cfg = _password_config(tenant_name=None, domain_name=None, domain_id="did-1") + scope = _build_scope(cfg) + assert scope == {"domain": {"id": "did-1"}} + + def test_none_when_empty(self) -> None: + cfg = _password_config( + username=None, user_id="uid-1", + tenant_name=None, domain_name=None, domain_id=None, + ) + assert _build_scope(cfg) is None + + +# ====================================================================== +# Endpoint locator +# ====================================================================== + + +class TestEndpointLocator: + """Test ``build_endpoint_locator`` (acceptance with provider).""" + + def test_finds_public_endpoint(self) -> None: + from sdk.core.endpoint import EndpointOpts + catalog = _sample_catalog() + locator = build_endpoint_locator(catalog, "eu-de") + + url = locator(EndpointOpts(service_type="compute")) + assert url == "https://ecs.eu-de.otc.t-systems.com/v2.1/" + + def test_finds_dns_endpoint(self) -> None: + from sdk.core.endpoint import EndpointOpts + catalog = _sample_catalog() + locator = build_endpoint_locator(catalog, "eu-de") + + url = locator(EndpointOpts(service_type="dns")) + assert url == "https://dns.eu-de.otc.t-systems.com/v2/" + + def test_region_override(self) -> None: + from sdk.core.endpoint import EndpointOpts + raw = [ + { + "type": "compute", + "endpoints": [ + { + "interface": "public", + "region_id": "eu-nl", + "url": "https://ecs.eu-nl.example.com/v2.1", + }, + ], + }, + ] + catalog = [CatalogEntry.model_validate(entry) for entry in raw] + locator = build_endpoint_locator(catalog, "eu-de") + + url = locator(EndpointOpts(service_type="compute", region="eu-nl")) + assert url == "https://ecs.eu-nl.example.com/v2.1/" + + def test_service_not_found(self) -> None: + from sdk.core.endpoint import EndpointOpts + catalog = _sample_catalog() + locator = build_endpoint_locator(catalog, "eu-de") + + with pytest.raises(ServiceNotFoundError): + locator(EndpointOpts(service_type="nonexistent")) + + def test_endpoint_not_found_wrong_region(self) -> None: + from sdk.core.endpoint import EndpointOpts + catalog = _sample_catalog() + locator = build_endpoint_locator(catalog, "eu-de") + + with pytest.raises(EndpointNotFoundError): + locator(EndpointOpts(service_type="compute", region="us-west-1")) + + +# ====================================================================== +# ProviderClient: authenticate (v3_auth) +# ====================================================================== + + +class TestV3Auth: + """Test password/token auth flows using mocked HTTP.""" + + def test_password_auth_sets_state(self) -> None: + """Password auth should set token, project, user, domain.""" + cfg = _password_config() + token_resp = _token_response() + + transport = httpx.MockTransport(lambda req: token_resp) + http_client = httpx.Client(transport=transport) + + client = ProviderClient(cfg, http_client=http_client) + client.authenticate() + + assert client.token_id == "tok-abc-123" + assert client.project_id == "proj-123" + assert client.user_id == "user-789" + assert client.domain_id == "dom-456" + assert client.endpoint_locator is not None + + def test_password_auth_sends_correct_body(self) -> None: + """Verify the auth request body is correct.""" + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return _token_response() + + cfg = _password_config() + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + + client = ProviderClient(cfg, http_client=http_client) + client.authenticate() + + assert len(captured) == 1 + body = json.loads(captured[0].content) + assert body["auth"]["identity"]["methods"] == ["password"] + assert body["auth"]["identity"]["password"]["user"]["name"] == "testuser" + + def test_token_reuse_auth(self) -> None: + """Token auth should GET and set token from config.""" + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + token_id="existing-tok", + ) + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + serialized_catalog = [ + entry.model_dump(by_alias=True) + for entry in _sample_catalog() + ] + return httpx.Response( + 200, + json={ + "token": { + "user": {"id": "u-1", "domain": {"id": "d-1"}}, + "catalog": serialized_catalog, + }, + }, + headers={"x-subject-token": "existing-tok"}, + ) + + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + + client = ProviderClient(cfg, http_client=http_client) + client.authenticate() + + assert len(captured) == 1 + assert captured[0].method == "GET" + assert client.token_id == "existing-tok" + + + def test_auth_failure_raises(self) -> None: + cfg = _password_config() + resp_401 = httpx.Response(401, text="Unauthorized") + transport = httpx.MockTransport(lambda req: resp_401) + http_client = httpx.Client(transport=transport) + + client = ProviderClient(cfg, http_client=http_client) + with pytest.raises(UnauthorizedError): + client.authenticate() + + +# ====================================================================== +# ProviderClient: authenticate (v3_auth_with_agency) +# ====================================================================== + + +class TestV3AuthWithAgency: + """Test password + agency auth flow.""" + + def test_agency_auth_two_requests(self) -> None: + """Agency auth should issue two requests: normal + assume_role.""" + call_count = 0 + + def handler(req: httpx.Request) -> httpx.Response: + nonlocal call_count + call_count += 1 + return _token_response( + token_id=f"tok-{call_count}", + ) + + cfg = _password_config( + agency_name="ag1", + agency_domain_name="ag_domain", + delegated_project="proj_deleg", + ) + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + + client = ProviderClient(cfg, http_client=http_client) + client.authenticate() + + assert call_count == 2 # first: v3auth, second: assume_role + assert client.token_id == "tok-2" + + +# ====================================================================== +# ProviderClient: authenticate (aksk) +# ====================================================================== + + +class TestAKSKAuth: + """Test AK/SK auth flow.""" + + def test_aksk_auth_fetches_catalog(self) -> None: + """AK/SK auth should fetch catalog and set endpoint_locator.""" + cfg = _aksk_config() + + def handler(req: httpx.Request) -> httpx.Response: + # Should be a signed catalog request + assert "authorization" in req.headers + return _catalog_response() + + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + + client = ProviderClient(cfg, http_client=http_client) + client.authenticate() + + assert client.token_id == "" # No token in AK/SK mode + assert client.project_id == "project-123" + assert client.endpoint_locator is not None + + def test_aksk_auth_resolves_project_name(self) -> None: + """When project_name given but no project_id, resolve via API.""" + cfg = _aksk_config(project_id=None, project_name="eu-de") + call_idx = 0 + + def handler(req: httpx.Request) -> httpx.Response: + nonlocal call_idx + call_idx += 1 + url = str(req.url) + if "projects" in url: + return httpx.Response( + 200, + json={"projects": [{"id": "resolved-proj-id"}]}, + ) + if "catalog" in url: + return _catalog_response() + return httpx.Response(404, text="Not found") + + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + + client = ProviderClient(cfg, http_client=http_client) + client.authenticate() + + assert client.project_id == "resolved-proj-id" + + +# ====================================================================== +# ProviderClient: request() with retry logic +# ====================================================================== + + +class TestRequest: + """Test the ``request()`` method with mocked transport.""" + + def _authenticated_client( + self, handler: Any, + ) -> ProviderClient: + """Create an already-authenticated client with a mock transport.""" + cfg = _password_config() + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + client = ProviderClient(cfg, http_client=http_client) + client.token_id = "test-token" + return client + + def test_successful_get(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(200, json={"result": "ok"}) + + client = self._authenticated_client(handler) + resp = client.request("GET", "https://api.example.com/resource") + + assert resp.status_code == 200 + assert resp.json() == {"result": "ok"} + + def test_auth_header_set(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={}) + + client = self._authenticated_client(handler) + client.request("GET", "https://api.example.com/resource") + + assert captured[0].headers["x-auth-token"] == "test-token" + + def test_post_with_json(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(201, json={"id": "new-1"}) + + client = self._authenticated_client(handler) + resp = client.request( + "POST", + "https://api.example.com/resource", + json={"name": "test"}, + ) + + assert resp.status_code == 201 + body = json.loads(captured[0].content) + assert body["name"] == "test" + + def test_400_raises_bad_request(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(400, text="Bad request body") + + client = self._authenticated_client(handler) + with pytest.raises(BadRequestError) as exc_info: + client.request("GET", "https://api.example.com/resource") + assert "Bad request body" in str(exc_info.value) + + def test_404_raises_not_found(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(404, text="Not found") + + client = self._authenticated_client(handler) + with pytest.raises(NotFoundError): + client.request("GET", "https://api.example.com/missing") + + def test_500_raises_internal_server_error(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(500, text="Server error") + + client = self._authenticated_client(handler) + with pytest.raises(InternalServerError): + client.request("GET", "https://api.example.com/broken") + + +class TestRequestRetry: + """Test retry logic on 401, 429, 502, 504.""" + + def test_401_triggers_reauth_and_retry(self) -> None: + call_count = 0 + + def handler(req: httpx.Request) -> httpx.Response: + nonlocal call_count + call_count += 1 + if call_count == 1: + return httpx.Response(401, text="Unauthorized") + return httpx.Response(200, json={"ok": True}) + + cfg = _password_config() + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + client = ProviderClient(cfg, http_client=http_client) + client.token_id = "old-token" + + def fake_authenticate() -> None: + client.token_id = "new-token" + + client.authenticate = fake_authenticate + + resp = client.request("GET", "https://api.example.com/resource") + + assert resp.status_code == 200 + assert call_count == 2 + assert client.token_id == "new-token" + + def test_401_without_reauth_raises(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(401, text="Unauthorized") + + cfg = _password_config() + cfg = cfg.model_copy(update={"allow_reauth": False}) + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + client = ProviderClient(cfg, http_client=http_client) + client.token_id = "tok" + + with pytest.raises(UnauthorizedError): + client.request("GET", "https://api.example.com/resource") + + def test_401_reauth_failure_raises_reauth_error(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(401, text="Unauthorized") + + cfg = _password_config() + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + client = ProviderClient(cfg, http_client=http_client) + client.token_id = "tok" + + def bad_reauth() -> None: + raise RuntimeError("reauth failed") + + client.authenticate = bad_reauth + + with pytest.raises(ReauthError): + client.request("GET", "https://api.example.com/resource") + + @patch("sdk.core.provider.time.sleep", return_value=None) + def test_429_backoff_retry(self, mock_sleep: Any) -> None: + call_count = 0 + + def handler(req: httpx.Request) -> httpx.Response: + nonlocal call_count + call_count += 1 + if call_count <= 2: + return httpx.Response(429, text="Rate limited") + return httpx.Response(200, json={"ok": True}) + + cfg = _password_config() + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + client = ProviderClient(cfg, http_client=http_client) + client.token_id = "tok" + + resp = client.request("GET", "https://api.example.com/resource") + + assert resp.status_code == 200 + assert call_count == 3 + assert mock_sleep.call_count == 2 + + @patch("sdk.core.provider.time.sleep", return_value=None) + def test_429_exhausts_retries(self, mock_sleep: Any) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(429, text="Rate limited") + + cfg = _password_config() + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + client = ProviderClient( + cfg, http_client=http_client, max_backoff_retries=2, + ) + client.token_id = "tok" + + with pytest.raises(TooManyRequestsError): + client.request("GET", "https://api.example.com/resource") + + @patch("sdk.core.provider.time.sleep", return_value=None) + def test_502_gateway_retry(self, mock_sleep: Any) -> None: + call_count = 0 + + def handler(req: httpx.Request) -> httpx.Response: + nonlocal call_count + call_count += 1 + if call_count == 1: + return httpx.Response(502, text="Bad Gateway") + return httpx.Response(200, json={"ok": True}) + + cfg = _password_config() + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + client = ProviderClient(cfg, http_client=http_client) + client.token_id = "tok" + + resp = client.request("GET", "https://api.example.com/resource") + + assert resp.status_code == 200 + assert call_count == 2 + + @patch("sdk.core.provider.time.sleep", return_value=None) + def test_504_gateway_retry(self, mock_sleep: Any) -> None: + call_count = 0 + + def handler(req: httpx.Request) -> httpx.Response: + nonlocal call_count + call_count += 1 + if call_count == 1: + return httpx.Response(504, text="Gateway Timeout") + return httpx.Response(200, json={"ok": True}) + + cfg = _password_config() + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + client = ProviderClient(cfg, http_client=http_client) + client.token_id = "tok" + + resp = client.request("GET", "https://api.example.com/resource") + + assert resp.status_code == 200 + assert call_count == 2 + + +class TestRequestAKSKSigning: + """Test that AK/SK requests are signed.""" + + def test_aksk_request_has_authorization(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={}) + + cfg = _aksk_config() + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + client = ProviderClient(cfg, http_client=http_client) + client.project_id = "proj-123" + + client.request("GET", "https://ecs.eu-de.otc.t-systems.com/v2.1/servers") + + req = captured[0] + assert "authorization" in req.headers + assert "SDK-HMAC-SHA256" in req.headers["authorization"] + assert "x-project-id" in req.headers + + def test_aksk_request_with_domain_id(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={}) + + cfg = _aksk_config() + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + client = ProviderClient(cfg, http_client=http_client) + client.domain_id = "dom-id" + + client.request("GET", "https://ecs.eu-de.otc.t-systems.com/v2.1/servers") + + req = captured[0] + assert req.headers.get("x-domain-id") == "dom-id" + assert "x-project-id" not in req.headers # domain_id present → no project_id + + def test_aksk_request_with_security_token(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={}) + + cfg = _aksk_config(security_token="temp-sec-tok") + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + client = ProviderClient(cfg, http_client=http_client) + client.project_id = "proj-123" + + client.request("GET", "https://ecs.eu-de.otc.t-systems.com/v2.1/servers") + + req = captured[0] + assert req.headers.get("x-security-token") == "temp-sec-tok" + + +# ====================================================================== +# ProviderClient: custom ok_codes +# ====================================================================== + + +class TestCustomOkCodes: + """Test custom ok_codes parameter.""" + + def test_custom_ok_codes_accepted(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(204, text="") + + cfg = _password_config() + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + client = ProviderClient(cfg, http_client=http_client) + client.token_id = "tok" + + resp = client.request( + "POST", "https://api.example.com/action", + ok_codes=[204], + ) + assert resp.status_code == 204 + + def test_default_post_ok_codes(self) -> None: + """POST default ok_codes include 201.""" + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(201, json={"id": "1"}) + + cfg = _password_config() + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + client = ProviderClient(cfg, http_client=http_client) + client.token_id = "tok" + + resp = client.request("POST", "https://api.example.com/resource") + assert resp.status_code == 201 + + +# ====================================================================== +# SecretStr compatibility +# ====================================================================== + + +class TestSecretStrCompatibility: + """Verify provider works when AuthConfig uses SecretStr fields.""" + + def test_secret_value_with_str(self) -> None: + from sdk.core.provider import _secret_value + assert _secret_value("plain") == "plain" + + def test_secret_value_with_secret_str(self) -> None: + from pydantic import SecretStr + from sdk.core.provider import _secret_value + assert _secret_value(SecretStr("hidden")) == "hidden" + + def test_build_body_with_secret_password(self) -> None: + """If password is SecretStr, body should contain plain string.""" + from pydantic import SecretStr + + cfg = _password_config() + # Simulate SecretStr by monkey-patching + object.__setattr__(cfg, "password", SecretStr("secret")) + + body = _build_v3_auth_body(cfg) + user = body["auth"]["identity"]["password"]["user"] + assert user["password"] == "secret" + assert isinstance(user["password"], str) diff --git a/tests/unit/core/test_service_client.py b/tests/unit/core/test_service_client.py new file mode 100644 index 0000000..410bec0 --- /dev/null +++ b/tests/unit/core/test_service_client.py @@ -0,0 +1,429 @@ +"""Tests for ``sdk.core.service_client``.""" + +from __future__ import annotations + +import json +from typing import Any +import httpx +import pytest + +from sdk.core.auth import AuthConfig +from sdk.core.exceptions import ( + EndpointNotFoundError, + NotFoundError, + ServiceNotFoundError, +) +from sdk.core.endpoint import build_endpoint_locator, CatalogEntry +from sdk.core.provider import ProviderClient +from sdk.core.service_client import ServiceClient, _ensure_trailing_slash + + +# ====================================================================== +# Fixtures +# ====================================================================== + + +def _sample_catalog() -> list[CatalogEntry]: + raw = [ + { + "type": "compute", + "endpoints": [ + { + "interface": "public", + "region_id": "eu-de", + "url": "https://ecs.eu-de.otc.t-systems.com/v2.1", + }, + ], + }, + { + "type": "dns", + "endpoints": [ + { + "interface": "public", + "region_id": "eu-de", + "url": "https://dns.eu-de.otc.t-systems.com/v2", + }, + ], + }, + { + "type": "network", + "endpoints": [ + { + "interface": "public", + "region_id": "eu-de", + "url": "https://vpc.eu-de.otc.t-systems.com", + }, + { + "interface": "public", + "region_id": "eu-nl", + "url": "https://vpc.eu-nl.otc.t-systems.com", + }, + ], + }, + ] + return [CatalogEntry.model_validate(entry) for entry in raw] + + +def _make_provider( + handler: Any = None, + *, + catalog: list[CatalogEntry] | None = None, +) -> ProviderClient: + """Create an authenticated ProviderClient with mock transport.""" + if handler is None: + handler = lambda req: httpx.Response(200, json={}) + + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + username="user", + password="pass", + domain_name="dom", + ) + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + provider = ProviderClient(cfg, http_client=http_client) + provider.token_id = "test-token" + provider.region_id = "eu-de" + + cat = catalog if catalog is not None else _sample_catalog() + provider.endpoint_locator = build_endpoint_locator(cat, "eu-de") + + return provider + + +# ====================================================================== +# _ensure_trailing_slash +# ====================================================================== + + +class TestEnsureTrailingSlash: + def test_adds_slash(self) -> None: + assert _ensure_trailing_slash("https://example.com") == "https://example.com/" + + def test_keeps_existing_slash(self) -> None: + assert _ensure_trailing_slash("https://example.com/") == "https://example.com/" + + def test_with_path(self) -> None: + assert _ensure_trailing_slash("https://example.com/v2.1") == "https://example.com/v2.1/" + + +# ====================================================================== +# Construction & endpoint resolution +# ====================================================================== + + +class TestServiceClientConstruction: + """Test endpoint resolution and attribute setup.""" + + def test_resolves_compute_from_catalog(self) -> None: + provider = _make_provider() + sc = ServiceClient(provider, "compute") + assert sc.endpoint == "https://ecs.eu-de.otc.t-systems.com/v2.1/" + + def test_resolves_dns_from_catalog(self) -> None: + provider = _make_provider() + sc = ServiceClient(provider, "dns") + assert sc.endpoint == "https://dns.eu-de.otc.t-systems.com/v2/" + + def test_endpoint_override_bypasses_catalog(self) -> None: + provider = _make_provider() + sc = ServiceClient( + provider, "compute", + endpoint_override="https://custom.example.com/v2", + ) + assert sc.endpoint == "https://custom.example.com/v2/" + assert sc.resource_base == "https://custom.example.com/v2/" + + def test_resource_base_override(self) -> None: + provider = _make_provider() + sc = ServiceClient( + provider, "compute", + resource_base="https://ecs.eu-de.otc.t-systems.com/v2.1/proj-123", + ) + assert sc.resource_base == "https://ecs.eu-de.otc.t-systems.com/v2.1/proj-123/" + assert sc.endpoint == "https://ecs.eu-de.otc.t-systems.com/v2.1/" + + def test_resource_base_defaults_to_endpoint(self) -> None: + provider = _make_provider() + sc = ServiceClient(provider, "compute") + assert sc.resource_base == sc.endpoint + + def test_service_not_found_raises(self) -> None: + provider = _make_provider() + with pytest.raises(ServiceNotFoundError): + ServiceClient(provider, "nonexistent_service") + + def test_endpoint_not_found_wrong_region(self) -> None: + raw = [ + { + "type": "compute", + "endpoints": [ + { + "interface": "public", + "region_id": "eu-de", + "url": "https://ecs.eu-de.example.com/v2.1", + }, + ], + }, + ] + catalog = [CatalogEntry.model_validate(entry) for entry in raw] + provider = _make_provider(catalog=catalog) + with pytest.raises(EndpointNotFoundError): + ServiceClient(provider, "compute", region="us-east-1") + + def test_region_override(self) -> None: + provider = _make_provider() + sc = ServiceClient(provider, "network", region="eu-nl") + assert "eu-nl" in sc.endpoint + + def test_extra_headers_stored(self) -> None: + provider = _make_provider() + sc = ServiceClient( + provider, "compute", + extra_headers={"X-Custom": "value"}, + ) + assert sc.extra_headers == {"X-Custom": "value"} + + def test_no_endpoint_locator_gives_empty(self) -> None: + """If provider has no locator, endpoint is empty string.""" + provider = _make_provider() + provider.endpoint_locator = None + sc = ServiceClient(provider, "compute") + assert sc.endpoint == "" + + def test_no_service_type_gives_empty(self) -> None: + provider = _make_provider() + sc = ServiceClient(provider) + assert sc.endpoint == "" + + +# ====================================================================== +# service_url +# ====================================================================== + + +class TestServiceUrl: + """Test URL construction.""" + + def test_single_part(self) -> None: + provider = _make_provider() + sc = ServiceClient(provider, "compute") + url = sc.service_url("servers") + assert url == "https://ecs.eu-de.otc.t-systems.com/v2.1/servers" + + def test_multiple_parts(self) -> None: + provider = _make_provider() + sc = ServiceClient(provider, "compute") + url = sc.service_url("servers", "abc-123", "action") + assert url == "https://ecs.eu-de.otc.t-systems.com/v2.1/servers/abc-123/action" + + def test_no_parts(self) -> None: + provider = _make_provider() + sc = ServiceClient(provider, "compute") + url = sc.service_url() + assert url == "https://ecs.eu-de.otc.t-systems.com/v2.1/" + + def test_with_custom_resource_base(self) -> None: + provider = _make_provider() + sc = ServiceClient( + provider, "compute", + resource_base="https://ecs.eu-de.otc.t-systems.com/v2.1/proj-123", + ) + url = sc.service_url("servers") + assert url == "https://ecs.eu-de.otc.t-systems.com/v2.1/proj-123/servers" + + +# ====================================================================== +# HTTP methods +# ====================================================================== + + +class TestHttpMethods: + """Test that convenience methods delegate correctly to provider.""" + + def _service_client(self, handler: Any) -> ServiceClient: + """Build a ServiceClient with mocked transport.""" + provider = _make_provider(handler) + return ServiceClient( + provider, endpoint_override="https://api.example.com/v1", + ) + + def test_get(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={"items": []}) + + sc = self._service_client(handler) + resp = sc.get("resources") + + assert resp.status_code == 200 + assert len(captured) == 1 + assert captured[0].method == "GET" + assert str(captured[0].url) == "https://api.example.com/v1/resources" + + def test_post_with_json(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(201, json={"id": "new-1"}) + + sc = self._service_client(handler) + resp = sc.post("resources", json={"name": "test"}) + + assert resp.status_code == 201 + body = json.loads(captured[0].content) + assert body["name"] == "test" + + def test_put(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={"updated": True}) + + sc = self._service_client(handler) + resp = sc.put("resources/123", json={"name": "updated"}) + + assert captured[0].method == "PUT" + assert "resources/123" in str(captured[0].url) + + def test_patch(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={}) + + sc = self._service_client(handler) + sc.patch("resources/123", json={"field": "val"}) + + assert captured[0].method == "PATCH" + + def test_delete(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(204, text="") + + sc = self._service_client(handler) + resp = sc.delete("resources/123") + + assert resp.status_code == 204 + assert captured[0].method == "DELETE" + + def test_delete_with_body(self) -> None: + """Go SDK has DeleteWithBody — our delete() supports json param.""" + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={}) + + sc = self._service_client(handler) + sc.delete("resources/batch", json={"ids": ["a", "b"]}) + + body = json.loads(captured[0].content) + assert body["ids"] == ["a", "b"] + + def test_head(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(204, text="") + + sc = self._service_client(handler) + sc.head("resources/123", ok_codes=[204]) + + assert captured[0].method == "HEAD" + + def test_error_propagates(self) -> None: + """Non-ok status from provider should raise HttpError.""" + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(404, text="Not found") + + sc = self._service_client(handler) + with pytest.raises(NotFoundError): + sc.get("missing") + + +# ====================================================================== +# Headers +# ====================================================================== + + +class TestHeaders: + """Test header merging between service and request levels.""" + + def test_extra_headers_sent(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={}) + + provider = _make_provider(handler) + sc = ServiceClient( + provider, + endpoint_override="https://api.example.com/v1", + extra_headers={"X-Service-Level": "important"}, + ) + sc.get("stuff") + + assert captured[0].headers["x-service-level"] == "important" + + def test_per_request_headers_override(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={}) + + provider = _make_provider(handler) + sc = ServiceClient( + provider, + endpoint_override="https://api.example.com/v1", + extra_headers={"X-Level": "service"}, + ) + sc.get("stuff", headers={"X-Level": "request"}) + + # Per-request header wins + assert captured[0].headers["x-level"] == "request" + + def test_auth_header_present(self) -> None: + """Token auth header should be set by provider.""" + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={}) + + provider = _make_provider(handler) + sc = ServiceClient( + provider, endpoint_override="https://api.example.com/v1", + ) + sc.get("stuff") + + assert captured[0].headers["x-auth-token"] == "test-token" + + +# ====================================================================== +# Custom ok_codes +# ====================================================================== + + +class TestCustomOkCodes: + def test_custom_ok_codes(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(204, text="") + + provider = _make_provider(handler) + sc = ServiceClient( + provider, endpoint_override="https://api.example.com/v1", + ) + resp = sc.post("action", ok_codes=[204]) + assert resp.status_code == 204 diff --git a/tests/unit/core/test_signer.py b/tests/unit/core/test_signer.py new file mode 100644 index 0000000..fea6d33 --- /dev/null +++ b/tests/unit/core/test_signer.py @@ -0,0 +1,409 @@ +"""Tests for sdk.core.signer.""" + +from datetime import datetime, timezone + +import httpx +import pytest + +from sdk.core.signer import ( + SIGN_ALGORITHM_HMAC_SHA256, + SignOptions, + _build_sign_key, + _build_sign_params, + _canonical_path, + _canonical_query, + _derive_signing_key, + _format_date, + _format_datetime, + _hash_sha256, + _hmac_sha256, + _signed_headers_string, + _use_payload_for_query, + re_sign_request, + sign_request, +) + +# Fixed timestamp for reproducible tests +FIXED_TIME = datetime(2024, 4, 15, 10, 30, 0, tzinfo=timezone.utc) +FIXED_DT_STR = "20240415T103000Z" +FIXED_DATE_STR = "20240415" + +OPTS = SignOptions( + access_key="TESTAKXXXXXXXX", + secret_key="TESTSKXXXXXXXXXXXXXXXXXXXXXXXX", + region_name="eu-de", + service_name="dns", +) + + +class TestFormatting: + def test_format_datetime(self): + assert _format_datetime(FIXED_TIME) == FIXED_DT_STR + + def test_format_date(self): + assert _format_date(FIXED_TIME) == FIXED_DATE_STR + + +class TestCryptoPrimitives: + def test_hash_sha256_empty(self): + result = _hash_sha256(b"") + assert result == ( + "e3b0c44298fc1c149afbf4c8996fb924" + "27ae41e4649b934ca495991b7852b855" + ) + + def test_hash_sha256_data(self): + result = _hash_sha256(b"hello") + assert result == ( + "2cf24dba5fb0a30e26e83b2ac5b9e29e" + "1b161e5c1fa7425e73043362938b9824" + ) + + def test_hmac_sha256(self): + result = _hmac_sha256("data", b"secret") + assert len(result) == 32 # SHA-256 always 32 bytes + + def test_build_sign_key_deterministic(self): + params1 = _build_sign_params(OPTS, FIXED_TIME) + params2 = _build_sign_params(OPTS, FIXED_TIME) + key1 = _build_sign_key(params1) + key2 = _build_sign_key(params2) + assert key1 == key2 + assert len(key1) == 32 + + def test_build_sign_key_different_date(self): + ts1 = datetime(2024, 4, 15, 10, 0, 0, tzinfo=timezone.utc) + ts2 = datetime(2024, 4, 16, 10, 0, 0, tzinfo=timezone.utc) + params1 = _build_sign_params(OPTS, ts1) + params2 = _build_sign_params(OPTS, ts2) + key1 = _build_sign_key(params1) + key2 = _build_sign_key(params2) + assert key1 != key2 + + +class TestCanonicalPath: + def test_simple_path(self): + req = httpx.Request("GET", "https://example.com/v2/zones") + path = _canonical_path(req) + assert path == "/v2/zones" + + def test_root_path(self): + req = httpx.Request("GET", "https://example.com/") + path = _canonical_path(req) + assert path == "/" + + def test_trailing_slash_preserved(self): + req = httpx.Request("GET", "https://example.com/v2/zones/") + path = _canonical_path(req) + assert path == "/v2/zones/" + + def test_no_double_encoding(self): + req = httpx.Request("GET", "https://example.com/v1/vpcs/some%20path") + path = _canonical_path(req) + assert "%2520" not in path + assert "%20" in path + + +class TestCanonicalQuery: + def test_no_params(self): + req = httpx.Request("GET", "https://example.com/v2/zones") + assert _canonical_query(req) == "" + + def test_sorted_params(self): + req = httpx.Request( + "GET", "https://example.com/v2/zones?name=test&limit=10", + ) + qs = _canonical_query(req) + assert "limit" in qs + assert "name" in qs + # 'limit' should come before 'name' alphabetically + assert qs.index("limit") < qs.index("name") + + def test_special_chars_encoded(self): + req = httpx.Request( + "GET", "https://example.com/test?key=hello world", + ) + qs = _canonical_query(req) + assert "hello%20world" in qs + + def test_duplicate_keys_preserved(self): + req = httpx.Request( + "GET", "https://example.com/v1?tag=b&tag=a&name=test", + ) + qs = _canonical_query(req) + assert "tag=b" in qs + assert "tag=a" in qs + assert "name=test" in qs + + def test_post_no_body_returns_empty(self): + req = httpx.Request( + "POST", "https://example.com/v1?action=start", + ) + assert _canonical_query(req) == "" + + +class TestUsePayloadForQuery: + def test_post_no_body(self): + req = httpx.Request( + "POST", "https://example.com/v1?action=start", + ) + assert _use_payload_for_query(req) is True + + def test_post_with_body(self): + req = httpx.Request( + "POST", "https://example.com/v1", + content=b'{"name": "test"}', + ) + assert _use_payload_for_query(req) is False + + def test_get_never_uses_payload(self): + req = httpx.Request("GET", "https://example.com/v1?x=1") + assert _use_payload_for_query(req) is False + + +class TestSignRequest: + def test_adds_authorization_header(self): + req = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + sign_request(req, OPTS, timestamp=FIXED_TIME) + assert "authorization" in req.headers + assert req.headers["authorization"].startswith( + SIGN_ALGORITHM_HMAC_SHA256, + ) + + def test_adds_sdk_date_header(self): + req = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + sign_request(req, OPTS, timestamp=FIXED_TIME) + assert req.headers["x-sdk-date"] == FIXED_DT_STR + + def test_adds_host_header(self): + req = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + sign_request(req, OPTS, timestamp=FIXED_TIME) + assert req.headers["host"] == "dns.eu-de.otc.t-systems.com" + + def test_authorization_contains_credential(self): + req = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + sign_request(req, OPTS, timestamp=FIXED_TIME) + auth = req.headers["authorization"] + assert f"Credential={OPTS.access_key}/" in auth + + def test_authorization_contains_signed_headers(self): + req = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + sign_request(req, OPTS, timestamp=FIXED_TIME) + auth = req.headers["authorization"] + assert "SignedHeaders=" in auth + assert "host" in auth + assert "x-sdk-date" in auth + + def test_authorization_contains_signature(self): + req = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + sign_request(req, OPTS, timestamp=FIXED_TIME) + auth = req.headers["authorization"] + assert "Signature=" in auth + # Signature is hex, 64 chars + sig = auth.split("Signature=")[1] + assert len(sig) == 64 + + def test_deterministic_signature(self): + """Same request + same timestamp = same signature.""" + req1 = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + req2 = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + sign_request(req1, OPTS, timestamp=FIXED_TIME) + sign_request(req2, OPTS, timestamp=FIXED_TIME) + assert req1.headers["authorization"] == req2.headers["authorization"] + + def test_different_timestamp_different_signature(self): + ts1 = datetime(2024, 4, 15, 10, 0, 0, tzinfo=timezone.utc) + ts2 = datetime(2024, 4, 15, 11, 0, 0, tzinfo=timezone.utc) + req1 = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + req2 = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + sign_request(req1, OPTS, timestamp=ts1) + sign_request(req2, OPTS, timestamp=ts2) + assert req1.headers["authorization"] != req2.headers["authorization"] + + def test_post_with_body(self): + req = httpx.Request( + "POST", + "https://dns.eu-de.otc.t-systems.com/v2/zones", + json={"name": "example.com.", "zone_type": "public"}, + ) + sign_request(req, OPTS, timestamp=FIXED_TIME) + assert req.headers["authorization"].startswith( + SIGN_ALGORITHM_HMAC_SHA256, + ) + + def test_scope_format(self): + req = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + sign_request(req, OPTS, timestamp=FIXED_TIME) + auth = req.headers["authorization"] + expected_scope = f"{FIXED_DATE_STR}/eu-de/dns/sdk_request" + assert expected_scope in auth + + def test_whitespace_trimmed_from_keys(self): + opts = SignOptions( + access_key=" AK_PADDED ", + secret_key=" SK_PADDED ", + region_name="eu-de", + service_name="dns", + ) + req = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + sign_request(req, opts, timestamp=FIXED_TIME) + auth = req.headers["authorization"] + assert "Credential=AK_PADDED/" in auth + + +class TestReSignRequest: + def test_re_sign_overwrites_date(self): + req = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + sign_request(req, OPTS, timestamp=FIXED_TIME) + old_date = req.headers["x-sdk-date"] + + ts2 = datetime(2024, 4, 15, 10, 31, 0, tzinfo=timezone.utc) + re_sign_request(req, OPTS, timestamp=ts2) + assert req.headers["x-sdk-date"] != old_date + assert req.headers["x-sdk-date"] == "20240415T103100Z" + + def test_re_sign_produces_new_authorization(self): + req = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + sign_request(req, OPTS, timestamp=FIXED_TIME) + old_auth = req.headers["authorization"] + + ts2 = datetime(2024, 4, 15, 10, 31, 0, tzinfo=timezone.utc) + re_sign_request(req, OPTS, timestamp=ts2) + assert req.headers["authorization"] != old_auth + + +class TestAlgorithmValidation: + def test_unsupported_algorithm_raises(self): + opts = SignOptions( + access_key="AK", + secret_key="SK", + sign_algorithm="UNSUPPORTED-ALG", + ) + req = httpx.Request("GET", "https://example.com/test") + with pytest.raises(ValueError, match="Unsupported"): + sign_request(req, opts) + + def test_default_algorithm(self): + req = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + sign_request(req, OPTS, timestamp=FIXED_TIME) + auth = req.headers["authorization"] + assert auth.startswith(SIGN_ALGORITHM_HMAC_SHA256) + + +class TestSignKeyCache: + def test_cached_key_matches_uncached(self): + opts_cached = SignOptions( + access_key="AK", + secret_key="SK_CACHE_TEST", + region_name="eu-de", + service_name="vpc", + enable_cache_sign_key=True, + ) + opts_uncached = SignOptions( + access_key="AK", + secret_key="SK_CACHE_TEST", + region_name="eu-de", + service_name="vpc", + enable_cache_sign_key=False, + ) + params_cached = _build_sign_params(opts_cached, FIXED_TIME) + params_uncached = _build_sign_params(opts_uncached, FIXED_TIME) + + key_cached = _derive_signing_key(params_cached) + key_uncached = _derive_signing_key(params_uncached) + assert key_cached == key_uncached + + def test_cache_returns_same_key_same_day(self): + opts = SignOptions( + access_key="AK", + secret_key="SK_SAME_DAY", + region_name="eu-de", + service_name="vpc", + enable_cache_sign_key=True, + ) + ts1 = datetime(2024, 6, 15, 8, 0, 0, tzinfo=timezone.utc) + ts2 = datetime(2024, 6, 15, 20, 0, 0, tzinfo=timezone.utc) + + params1 = _build_sign_params(opts, ts1) + params2 = _build_sign_params(opts, ts2) + + key1 = _derive_signing_key(params1) + key2 = _derive_signing_key(params2) + assert key1 == key2 + + def test_cache_invalidates_on_new_day(self): + opts = SignOptions( + access_key="AK", + secret_key="SK_NEW_DAY", + region_name="eu-de", + service_name="vpc", + enable_cache_sign_key=True, + ) + ts1 = datetime(2024, 6, 15, 23, 0, 0, tzinfo=timezone.utc) + ts2 = datetime(2024, 6, 16, 1, 0, 0, tzinfo=timezone.utc) + + params1 = _build_sign_params(opts, ts1) + params2 = _build_sign_params(opts, ts2) + + key1 = _derive_signing_key(params1) + key2 = _derive_signing_key(params2) + assert key1 != key2 + + +class TestTimeOffset: + def test_offset_shifts_signing_time(self): + opts_no_offset = SignOptions( + access_key="AK", + secret_key="SK", + region_name="eu-de", + service_name="vpc", + time_offset_seconds=0, + ) + opts_with_offset = SignOptions( + access_key="AK", + secret_key="SK", + region_name="eu-de", + service_name="vpc", + time_offset_seconds=3600, + ) + req1 = httpx.Request( + "GET", "https://vpc.eu-de.otc.t-systems.com/v1/vpcs", + ) + req2 = httpx.Request( + "GET", "https://vpc.eu-de.otc.t-systems.com/v1/vpcs", + ) + sign_request(req1, opts_no_offset, timestamp=FIXED_TIME) + sign_request(req2, opts_with_offset, timestamp=FIXED_TIME) + # Same timestamp but offset makes them different + assert req1.headers["authorization"] != req2.headers["authorization"] + assert req1.headers["x-sdk-date"] != req2.headers["x-sdk-date"] diff --git a/tests/unit/core/test_smoke.py b/tests/unit/core/test_smoke.py new file mode 100644 index 0000000..f4d2307 --- /dev/null +++ b/tests/unit/core/test_smoke.py @@ -0,0 +1,6 @@ +"""Smoke test — verify the package is importable.""" +import sdk + +def test_import(): + + assert sdk.__version__ == "0.1.0" \ No newline at end of file diff --git a/tests/unit/services/__init__.py b/tests/unit/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/services/vpc/__init__.py b/tests/unit/services/vpc/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/services/vpc/v1/vpcs/__init__.py b/tests/unit/services/vpc/v1/vpcs/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/services/vpc/v1/vpcs/conftest.py b/tests/unit/services/vpc/v1/vpcs/conftest.py new file mode 100644 index 0000000..2e09cf4 --- /dev/null +++ b/tests/unit/services/vpc/v1/vpcs/conftest.py @@ -0,0 +1,30 @@ +"""VPC-specific fixtures for vpcs unit tests. +""" + +from __future__ import annotations + +import pytest + + +@pytest.fixture +def vpc_payload(): + """Minimal valid VPC payload as returned by the T Cloud Public API.""" + return { + "id": "vpc-id-1", + "name": "test-vpc", + "description": "", + "cidr": "192.168.0.0/16", + "status": "OK", + "enterprise_project_id": "0", + "routes": [], + "enable_shared_snat": False, + "tenant_id": "tenant-1", + "created_at": "2024-01-01T00:00:00", + "updated_at": "2024-01-01T00:00:00", + } + + +@pytest.fixture +def vpc_response(vpc_payload): + """Full API response wrapping the VPC payload.""" + return {"vpc": vpc_payload} \ No newline at end of file diff --git a/tests/unit/services/vpc/v1/vpcs/test_create.py b/tests/unit/services/vpc/v1/vpcs/test_create.py new file mode 100644 index 0000000..52e6a07 --- /dev/null +++ b/tests/unit/services/vpc/v1/vpcs/test_create.py @@ -0,0 +1,38 @@ +"""Tests for ``vpcs.create``.""" + +from __future__ import annotations + +from sdk.services.vpc.v1.vpcs import CreateVpcOpts, Vpc, create + +from tests.unit.conftest import make_response + + +def test_create_calls_post_with_wrapped_body(mock_client, vpc_response): + mock_client.post.return_value = make_response(vpc_response) + + opts = CreateVpcOpts(name="my-vpc", cidr="192.168.0.0/16") + create(mock_client, opts) + + mock_client.post.assert_called_once_with( + "vpcs", + json={"vpc": {"name": "my-vpc", "cidr": "192.168.0.0/16"}}, + ) + + +def test_create_returns_parsed_vpc(mock_client, vpc_response): + mock_client.post.return_value = make_response(vpc_response) + + result = create(mock_client, CreateVpcOpts(name="my-vpc")) + + assert isinstance(result, Vpc) + assert result.id == "vpc-id-1" + assert result.name == "test-vpc" + assert result.cidr == "192.168.0.0/16" + + +def test_create_empty_opts_sends_empty_wrapped_body(mock_client, vpc_response): + mock_client.post.return_value = make_response(vpc_response) + + create(mock_client, CreateVpcOpts()) + + mock_client.post.assert_called_once_with("vpcs", json={"vpc": {}}) diff --git a/tests/unit/services/vpc/v1/vpcs/test_delete.py b/tests/unit/services/vpc/v1/vpcs/test_delete.py new file mode 100644 index 0000000..0f6e6f9 --- /dev/null +++ b/tests/unit/services/vpc/v1/vpcs/test_delete.py @@ -0,0 +1,20 @@ +"""Tests for ``vpcs.delete``.""" + +from __future__ import annotations + +from sdk.services.vpc.v1.vpcs import delete + + +def test_delete_calls_delete_with_resource_url(mock_client): + delete(mock_client, "vpc-id-1") + mock_client.delete.assert_called_once_with("vpcs/vpc-id-1") + + +def test_delete_returns_none(mock_client): + result = delete(mock_client, "vpc-id-1") + assert result is None + + +def test_delete_passes_id_into_url(mock_client): + delete(mock_client, "abc-123-xyz") + mock_client.delete.assert_called_once_with("vpcs/abc-123-xyz") diff --git a/tests/unit/services/vpc/v1/vpcs/test_get.py b/tests/unit/services/vpc/v1/vpcs/test_get.py new file mode 100644 index 0000000..bb7ba54 --- /dev/null +++ b/tests/unit/services/vpc/v1/vpcs/test_get.py @@ -0,0 +1,33 @@ +"""Tests for ``vpcs.get``.""" + +from __future__ import annotations + +from sdk.services.vpc.v1.vpcs import Vpc, get + +from tests.unit.conftest import make_response + + +def test_get_calls_get_with_resource_url(mock_client, vpc_response): + mock_client.get.return_value = make_response(vpc_response) + + get(mock_client, "vpc-id-1") + + mock_client.get.assert_called_once_with("vpcs/vpc-id-1") + + +def test_get_returns_parsed_vpc(mock_client, vpc_response): + mock_client.get.return_value = make_response(vpc_response) + + result = get(mock_client, "vpc-id-1") + + assert isinstance(result, Vpc) + assert result.id == "vpc-id-1" + assert result.name == "test-vpc" + + +def test_get_passes_id_into_url(mock_client, vpc_response): + mock_client.get.return_value = make_response(vpc_response) + + get(mock_client, "abc-123-xyz") + + mock_client.get.assert_called_once_with("vpcs/abc-123-xyz") diff --git a/tests/unit/services/vpc/v1/vpcs/test_list.py b/tests/unit/services/vpc/v1/vpcs/test_list.py new file mode 100644 index 0000000..a1d3f6f --- /dev/null +++ b/tests/unit/services/vpc/v1/vpcs/test_list.py @@ -0,0 +1,98 @@ +"""Tests for ``vpcs.list``. + +``marker_paginate`` is mocked at the module level since pagination +itself is covered by its own unit tests in ``tests/unit/core``. +These tests only verify that ``list()`` wires its arguments correctly. +""" + +from __future__ import annotations + +from sdk.services.vpc.v1.vpcs import ListVpcsOpts, Vpc, list as vpcs_list + + +def test_list_passes_correct_path_and_model(mocker, mock_client): + paginate = mocker.patch( + "sdk.services.vpc.v1.vpcs.list.marker_paginate", + return_value=iter([]), + ) + + vpcs_list(mock_client) + + paginate.assert_called_once() + kwargs = paginate.call_args.kwargs + assert kwargs["client"] is mock_client + assert kwargs["path"] == "vpcs" + assert kwargs["items_key"] == "vpcs" + assert kwargs["model"] is Vpc + assert kwargs["marker_key"] == "id" + + +def test_list_without_opts_passes_no_params_and_zero_limit(mocker, mock_client): + paginate = mocker.patch( + "sdk.services.vpc.v1.vpcs.list.marker_paginate", + return_value=iter([]), + ) + + vpcs_list(mock_client) + + kwargs = paginate.call_args.kwargs + assert kwargs["params"] is None + assert kwargs["limit"] == 0 + + +def test_list_with_opts_passes_query_params(mocker, mock_client): + paginate = mocker.patch( + "sdk.services.vpc.v1.vpcs.list.marker_paginate", + return_value=iter([]), + ) + + opts = ListVpcsOpts(id="vpc-1", marker="m") + vpcs_list(mock_client, opts) + + kwargs = paginate.call_args.kwargs + assert kwargs["params"] == {"id": "vpc-1", "marker": "m"} + + +def test_list_with_positive_limit_forwards_it(mocker, mock_client): + paginate = mocker.patch( + "sdk.services.vpc.v1.vpcs.list.marker_paginate", + return_value=iter([]), + ) + + vpcs_list(mock_client, ListVpcsOpts(limit=50)) + + kwargs = paginate.call_args.kwargs + assert kwargs["limit"] == 50 + # And it appears in params too — that's correct per the T Cloud Public API. + assert kwargs["params"] == {"limit": "50"} + + +def test_list_with_none_limit_sends_zero(mocker, mock_client): + """``limit=None`` (default) keeps the explicit-limit guard at zero. + + This is the lesson from the marker_paginate infinite-loop bug: + forwarding a falsy limit blindly is dangerous, so the function + coerces to 0 when the caller did not set one. + """ + paginate = mocker.patch( + "sdk.services.vpc.v1.vpcs.list.marker_paginate", + return_value=iter([]), + ) + + vpcs_list(mock_client, ListVpcsOpts(marker="m")) + + kwargs = paginate.call_args.kwargs + assert kwargs["limit"] == 0 + + +def test_list_returns_paginate_result(mocker, mock_client): + """``list()`` is a thin pass-through: returns whatever paginate yields.""" + sentinel = iter(["one", "two"]) + mocker.patch( + "sdk.services.vpc.v1.vpcs.list.marker_paginate", + return_value=sentinel, + ) + + result = vpcs_list(mock_client) + + assert list(result) == ["one", "two"] diff --git a/tests/unit/services/vpc/v1/vpcs/test_opts.py b/tests/unit/services/vpc/v1/vpcs/test_opts.py new file mode 100644 index 0000000..1ea3664 --- /dev/null +++ b/tests/unit/services/vpc/v1/vpcs/test_opts.py @@ -0,0 +1,108 @@ +"""Tests for VPC v1 Opts. + +The generic ``BaseOpts``/``BaseQueryOpts`` semantics are covered in +``tests/unit/core/test_opts.py``. These tests pin VPC-specific behaviour: +the wrapper key, the field set, and the route nested model. +""" + +from __future__ import annotations + +from sdk.services.vpc.v1.vpcs import ( + CreateVpcOpts, + ListVpcsOpts, + Route, + UpdateVpcOpts, +) + + +# --- CreateVpcOpts --- + + +def test_create_opts_empty_returns_wrapped_empty_body(): + assert CreateVpcOpts().to_request_body() == {"vpc": {}} + + +def test_create_opts_full(): + opts = CreateVpcOpts( + name="my-vpc", + description="test", + cidr="192.168.0.0/16", + enterprise_project_id="ep-1", + ) + assert opts.to_request_body() == { + "vpc": { + "name": "my-vpc", + "description": "test", + "cidr": "192.168.0.0/16", + "enterprise_project_id": "ep-1", + } + } + + +def test_create_opts_drops_none_fields(): + opts = CreateVpcOpts(name="my-vpc") + assert opts.to_request_body() == {"vpc": {"name": "my-vpc"}} + + +# --- UpdateVpcOpts --- + + +def test_update_opts_explicit_empty_string_clears_field(): + """``description=""`` reaches the wire so the server clears the field.""" + opts = UpdateVpcOpts(description="") + assert opts.to_request_body() == {"vpc": {"description": ""}} + + +def test_update_opts_with_routes(): + opts = UpdateVpcOpts( + routes=[ + Route(destination="10.0.0.0/8", nexthop="192.168.1.1"), + Route(destination="172.16.0.0/12", nexthop="192.168.1.2"), + ], + ) + assert opts.to_request_body() == { + "vpc": { + "routes": [ + {"destination": "10.0.0.0/8", "nexthop": "192.168.1.1"}, + {"destination": "172.16.0.0/12", "nexthop": "192.168.1.2"}, + ] + } + } + + +def test_update_opts_empty_routes_list(): + """Empty list is meaningful: clear all routes.""" + opts = UpdateVpcOpts(routes=[]) + assert opts.to_request_body() == {"vpc": {"routes": []}} + + +def test_update_opts_none_routes_omitted(): + opts = UpdateVpcOpts(name="x") + assert opts.to_request_body() == {"vpc": {"name": "x"}} + + +# --- ListVpcsOpts --- + + +def test_list_opts_empty(): + assert ListVpcsOpts().to_query_params() == {} + + +def test_list_opts_full(): + opts = ListVpcsOpts( + id="vpc-1", + limit=20, + marker="abc", + enterprise_project_id="ep-1", + ) + assert opts.to_query_params() == { + "id": "vpc-1", + "limit": "20", + "marker": "abc", + "enterprise_project_id": "ep-1", + } + + +def test_list_opts_partial(): + opts = ListVpcsOpts(limit=10) + assert opts.to_query_params() == {"limit": "10"} \ No newline at end of file diff --git a/tests/unit/services/vpc/v1/vpcs/test_update.py b/tests/unit/services/vpc/v1/vpcs/test_update.py new file mode 100644 index 0000000..6040df6 --- /dev/null +++ b/tests/unit/services/vpc/v1/vpcs/test_update.py @@ -0,0 +1,73 @@ +"""Tests for ``vpcs.update``.""" + +from __future__ import annotations + +from sdk.services.vpc.v1.vpcs import Route, UpdateVpcOpts, Vpc, update + +from tests.unit.conftest import make_response + + +def test_update_calls_put_with_resource_url(mock_client, vpc_response): + mock_client.put.return_value = make_response(vpc_response) + + update(mock_client, "vpc-id-1", UpdateVpcOpts(name="renamed")) + + mock_client.put.assert_called_once_with( + "vpcs/vpc-id-1", + json={"vpc": {"name": "renamed"}}, + ) + + +def test_update_clear_description_sends_empty_string(mock_client, vpc_response): + """Sending ``description=""`` clears the field on the server.""" + mock_client.put.return_value = make_response(vpc_response) + + update(mock_client, "vpc-id-1", UpdateVpcOpts(description="")) + + mock_client.put.assert_called_once_with( + "vpcs/vpc-id-1", + json={"vpc": {"description": ""}}, + ) + + +def test_update_with_routes_serializes_nested(mock_client, vpc_response): + mock_client.put.return_value = make_response(vpc_response) + + update( + mock_client, + "vpc-id-1", + UpdateVpcOpts( + routes=[Route(destination="10.0.0.0/8", nexthop="192.168.1.1")], + ), + ) + + mock_client.put.assert_called_once_with( + "vpcs/vpc-id-1", + json={ + "vpc": { + "routes": [ + {"destination": "10.0.0.0/8", "nexthop": "192.168.1.1"}, + ] + } + }, + ) + + +def test_update_returns_parsed_vpc(mock_client, vpc_response): + mock_client.put.return_value = make_response(vpc_response) + + result = update(mock_client, "vpc-id-1", UpdateVpcOpts(name="renamed")) + + assert isinstance(result, Vpc) + assert result.id == "vpc-id-1" + + +def test_update_empty_opts_sends_empty_wrapped_body(mock_client, vpc_response): + mock_client.put.return_value = make_response(vpc_response) + + update(mock_client, "vpc-id-1", UpdateVpcOpts()) + + mock_client.put.assert_called_once_with( + "vpcs/vpc-id-1", + json={"vpc": {}}, + ) diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..08420b8 --- /dev/null +++ b/uv.lock @@ -0,0 +1,755 @@ +version = 1 +revision = 3 +requires-python = ">=3.13" + +[[package]] +name = "alabaster" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/f8/d9c74d0daf3f742840fd818d69cfae176fa332022fd44e3469487d5a9420/alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e", size = 24210, upload-time = "2024-07-26T18:15:03.762Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/b3/6b4067be973ae96ba0d615946e314c5ae35f9f993eca561b356540bb0c2b/alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b", size = 13929, upload-time = "2024-07-26T18:15:02.05Z" }, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anyio" +version = "4.12.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" }, +] + +[[package]] +name = "babel" +version = "2.18.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/b2/51899539b6ceeeb420d40ed3cd4b7a40519404f9baf3d4ac99dc413a834b/babel-2.18.0.tar.gz", hash = "sha256:b80b99a14bd085fcacfa15c9165f651fbb3406e66cc603abf11c5750937c992d", size = 9959554, upload-time = "2026-02-01T12:30:56.078Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/f5/21d2de20e8b8b0408f0681956ca2c69f1320a3848ac50e6e7f39c6159675/babel-2.18.0-py3-none-any.whl", hash = "sha256:e2b422b277c2b9a9630c1d7903c2a00d0830c409c59ac8cae9081c92f1aeba35", size = 10196845, upload-time = "2026-02-01T12:30:53.445Z" }, +] + +[[package]] +name = "certifi" +version = "2026.1.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268, upload-time = "2026-01-04T02:42:41.825Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" }, + { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" }, + { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" }, + { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" }, + { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" }, + { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" }, + { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" }, + { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" }, + { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" }, + { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" }, + { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" }, + { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" }, + { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" }, + { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" }, + { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" }, + { url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" }, + { url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" }, + { url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" }, + { url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" }, + { url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" }, + { url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" }, + { url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" }, + { url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" }, + { url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" }, + { url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" }, + { url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" }, + { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" }, + { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "docutils" +version = "0.22.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/b6/03bb70946330e88ffec97aefd3ea75ba575cb2e762061e0e62a213befee8/docutils-0.22.4.tar.gz", hash = "sha256:4db53b1fde9abecbb74d91230d32ab626d94f6badfc575d6db9194a49df29968", size = 2291750, upload-time = "2025-12-18T19:00:26.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/02/10/5da547df7a391dcde17f59520a231527b8571e6f46fc8efb02ccb370ab12/docutils-0.22.4-py3-none-any.whl", hash = "sha256:d0013f540772d1420576855455d050a2180186c91c15779301ac2ccb3eeb68de", size = 633196, upload-time = "2025-12-18T19:00:18.077Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/84/62473fb57d61e31fef6e36d64a179c8781605429fd927b5dd608c997be31/imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a", size = 1280026, upload-time = "2022-07-01T12:21:05.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/62/85c4c919272577931d407be5ba5d71c20f0b616d31a0befe0ae45bb79abd/imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b", size = 8769, upload-time = "2022-07-01T12:21:02.467Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + +[[package]] +name = "librt" +version = "0.8.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/56/9c/b4b0c54d84da4a94b37bd44151e46d5e583c9534c7e02250b961b1b6d8a8/librt-0.8.1.tar.gz", hash = "sha256:be46a14693955b3bd96014ccbdb8339ee8c9346fbe11c1b78901b55125f14c73", size = 177471, upload-time = "2026-02-17T16:13:06.101Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/3c/f614c8e4eaac7cbf2bbdf9528790b21d89e277ee20d57dc6e559c626105f/librt-0.8.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7e6bad1cd94f6764e1e21950542f818a09316645337fd5ab9a7acc45d99a8f35", size = 66529, upload-time = "2026-02-17T16:11:57.809Z" }, + { url = "https://files.pythonhosted.org/packages/ab/96/5836544a45100ae411eda07d29e3d99448e5258b6e9c8059deb92945f5c2/librt-0.8.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cf450f498c30af55551ba4f66b9123b7185362ec8b625a773b3d39aa1a717583", size = 68669, upload-time = "2026-02-17T16:11:58.843Z" }, + { url = "https://files.pythonhosted.org/packages/06/53/f0b992b57af6d5531bf4677d75c44f095f2366a1741fb695ee462ae04b05/librt-0.8.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:eca45e982fa074090057132e30585a7e8674e9e885d402eae85633e9f449ce6c", size = 199279, upload-time = "2026-02-17T16:11:59.862Z" }, + { url = "https://files.pythonhosted.org/packages/f3/ad/4848cc16e268d14280d8168aee4f31cea92bbd2b79ce33d3e166f2b4e4fc/librt-0.8.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0c3811485fccfda840861905b8c70bba5ec094e02825598bb9d4ca3936857a04", size = 210288, upload-time = "2026-02-17T16:12:00.954Z" }, + { url = "https://files.pythonhosted.org/packages/52/05/27fdc2e95de26273d83b96742d8d3b7345f2ea2bdbd2405cc504644f2096/librt-0.8.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e4af413908f77294605e28cfd98063f54b2c790561383971d2f52d113d9c363", size = 224809, upload-time = "2026-02-17T16:12:02.108Z" }, + { url = "https://files.pythonhosted.org/packages/7a/d0/78200a45ba3240cb042bc597d6f2accba9193a2c57d0356268cbbe2d0925/librt-0.8.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:5212a5bd7fae98dae95710032902edcd2ec4dc994e883294f75c857b83f9aba0", size = 218075, upload-time = "2026-02-17T16:12:03.631Z" }, + { url = "https://files.pythonhosted.org/packages/af/72/a210839fa74c90474897124c064ffca07f8d4b347b6574d309686aae7ca6/librt-0.8.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e692aa2d1d604e6ca12d35e51fdc36f4cda6345e28e36374579f7ef3611b3012", size = 225486, upload-time = "2026-02-17T16:12:04.725Z" }, + { url = "https://files.pythonhosted.org/packages/a3/c1/a03cc63722339ddbf087485f253493e2b013039f5b707e8e6016141130fa/librt-0.8.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4be2a5c926b9770c9e08e717f05737a269b9d0ebc5d2f0060f0fe3fe9ce47acb", size = 218219, upload-time = "2026-02-17T16:12:05.828Z" }, + { url = "https://files.pythonhosted.org/packages/58/f5/fff6108af0acf941c6f274a946aea0e484bd10cd2dc37610287ce49388c5/librt-0.8.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:fd1a720332ea335ceb544cf0a03f81df92abd4bb887679fd1e460976b0e6214b", size = 218750, upload-time = "2026-02-17T16:12:07.09Z" }, + { url = "https://files.pythonhosted.org/packages/71/67/5a387bfef30ec1e4b4f30562c8586566faf87e47d696768c19feb49e3646/librt-0.8.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:93c2af9e01e0ef80d95ae3c720be101227edae5f2fe7e3dc63d8857fadfc5a1d", size = 241624, upload-time = "2026-02-17T16:12:08.43Z" }, + { url = "https://files.pythonhosted.org/packages/d4/be/24f8502db11d405232ac1162eb98069ca49c3306c1d75c6ccc61d9af8789/librt-0.8.1-cp313-cp313-win32.whl", hash = "sha256:086a32dbb71336627e78cc1d6ee305a68d038ef7d4c39aaff41ae8c9aa46e91a", size = 54969, upload-time = "2026-02-17T16:12:09.633Z" }, + { url = "https://files.pythonhosted.org/packages/5c/73/c9fdf6cb2a529c1a092ce769a12d88c8cca991194dfe641b6af12fa964d2/librt-0.8.1-cp313-cp313-win_amd64.whl", hash = "sha256:e11769a1dbda4da7b00a76cfffa67aa47cfa66921d2724539eee4b9ede780b79", size = 62000, upload-time = "2026-02-17T16:12:10.632Z" }, + { url = "https://files.pythonhosted.org/packages/d3/97/68f80ca3ac4924f250cdfa6e20142a803e5e50fca96ef5148c52ee8c10ea/librt-0.8.1-cp313-cp313-win_arm64.whl", hash = "sha256:924817ab3141aca17893386ee13261f1d100d1ef410d70afe4389f2359fea4f0", size = 52495, upload-time = "2026-02-17T16:12:11.633Z" }, + { url = "https://files.pythonhosted.org/packages/c9/6a/907ef6800f7bca71b525a05f1839b21f708c09043b1c6aa77b6b827b3996/librt-0.8.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6cfa7fe54fd4d1f47130017351a959fe5804bda7a0bc7e07a2cdbc3fdd28d34f", size = 66081, upload-time = "2026-02-17T16:12:12.766Z" }, + { url = "https://files.pythonhosted.org/packages/1b/18/25e991cd5640c9fb0f8d91b18797b29066b792f17bf8493da183bf5caabe/librt-0.8.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:228c2409c079f8c11fb2e5d7b277077f694cb93443eb760e00b3b83cb8b3176c", size = 68309, upload-time = "2026-02-17T16:12:13.756Z" }, + { url = "https://files.pythonhosted.org/packages/a4/36/46820d03f058cfb5a9de5940640ba03165ed8aded69e0733c417bb04df34/librt-0.8.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7aae78ab5e3206181780e56912d1b9bb9f90a7249ce12f0e8bf531d0462dd0fc", size = 196804, upload-time = "2026-02-17T16:12:14.818Z" }, + { url = "https://files.pythonhosted.org/packages/59/18/5dd0d3b87b8ff9c061849fbdb347758d1f724b9a82241aa908e0ec54ccd0/librt-0.8.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:172d57ec04346b047ca6af181e1ea4858086c80bdf455f61994c4aa6fc3f866c", size = 206907, upload-time = "2026-02-17T16:12:16.513Z" }, + { url = "https://files.pythonhosted.org/packages/d1/96/ef04902aad1424fd7299b62d1890e803e6ab4018c3044dca5922319c4b97/librt-0.8.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6b1977c4ea97ce5eb7755a78fae68d87e4102e4aaf54985e8b56806849cc06a3", size = 221217, upload-time = "2026-02-17T16:12:17.906Z" }, + { url = "https://files.pythonhosted.org/packages/6d/ff/7e01f2dda84a8f5d280637a2e5827210a8acca9a567a54507ef1c75b342d/librt-0.8.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:10c42e1f6fd06733ef65ae7bebce2872bcafd8d6e6b0a08fe0a05a23b044fb14", size = 214622, upload-time = "2026-02-17T16:12:19.108Z" }, + { url = "https://files.pythonhosted.org/packages/1e/8c/5b093d08a13946034fed57619742f790faf77058558b14ca36a6e331161e/librt-0.8.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4c8dfa264b9193c4ee19113c985c95f876fae5e51f731494fc4e0cf594990ba7", size = 221987, upload-time = "2026-02-17T16:12:20.331Z" }, + { url = "https://files.pythonhosted.org/packages/d3/cc/86b0b3b151d40920ad45a94ce0171dec1aebba8a9d72bb3fa00c73ab25dd/librt-0.8.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:01170b6729a438f0dedc4a26ed342e3dc4f02d1000b4b19f980e1877f0c297e6", size = 215132, upload-time = "2026-02-17T16:12:21.54Z" }, + { url = "https://files.pythonhosted.org/packages/fc/be/8588164a46edf1e69858d952654e216a9a91174688eeefb9efbb38a9c799/librt-0.8.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:7b02679a0d783bdae30d443025b94465d8c3dc512f32f5b5031f93f57ac32071", size = 215195, upload-time = "2026-02-17T16:12:23.073Z" }, + { url = "https://files.pythonhosted.org/packages/f5/f2/0b9279bea735c734d69344ecfe056c1ba211694a72df10f568745c899c76/librt-0.8.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:190b109bb69592a3401fe1ffdea41a2e73370ace2ffdc4a0e8e2b39cdea81b78", size = 237946, upload-time = "2026-02-17T16:12:24.275Z" }, + { url = "https://files.pythonhosted.org/packages/e9/cc/5f2a34fbc8aeb35314a3641f9956fa9051a947424652fad9882be7a97949/librt-0.8.1-cp314-cp314-win32.whl", hash = "sha256:e70a57ecf89a0f64c24e37f38d3fe217a58169d2fe6ed6d70554964042474023", size = 50689, upload-time = "2026-02-17T16:12:25.766Z" }, + { url = "https://files.pythonhosted.org/packages/a0/76/cd4d010ab2147339ca2b93e959c3686e964edc6de66ddacc935c325883d7/librt-0.8.1-cp314-cp314-win_amd64.whl", hash = "sha256:7e2f3edca35664499fbb36e4770650c4bd4a08abc1f4458eab9df4ec56389730", size = 57875, upload-time = "2026-02-17T16:12:27.465Z" }, + { url = "https://files.pythonhosted.org/packages/84/0f/2143cb3c3ca48bd3379dcd11817163ca50781927c4537345d608b5045998/librt-0.8.1-cp314-cp314-win_arm64.whl", hash = "sha256:0d2f82168e55ddefd27c01c654ce52379c0750ddc31ee86b4b266bcf4d65f2a3", size = 48058, upload-time = "2026-02-17T16:12:28.556Z" }, + { url = "https://files.pythonhosted.org/packages/d2/0e/9b23a87e37baf00311c3efe6b48d6b6c168c29902dfc3f04c338372fd7db/librt-0.8.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2c74a2da57a094bd48d03fa5d196da83d2815678385d2978657499063709abe1", size = 68313, upload-time = "2026-02-17T16:12:29.659Z" }, + { url = "https://files.pythonhosted.org/packages/db/9a/859c41e5a4f1c84200a7d2b92f586aa27133c8243b6cac9926f6e54d01b9/librt-0.8.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a355d99c4c0d8e5b770313b8b247411ed40949ca44e33e46a4789b9293a907ee", size = 70994, upload-time = "2026-02-17T16:12:31.516Z" }, + { url = "https://files.pythonhosted.org/packages/4c/28/10605366ee599ed34223ac2bf66404c6fb59399f47108215d16d5ad751a8/librt-0.8.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:2eb345e8b33fb748227409c9f1233d4df354d6e54091f0e8fc53acdb2ffedeb7", size = 220770, upload-time = "2026-02-17T16:12:33.294Z" }, + { url = "https://files.pythonhosted.org/packages/af/8d/16ed8fd452dafae9c48d17a6bc1ee3e818fd40ef718d149a8eff2c9f4ea2/librt-0.8.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9be2f15e53ce4e83cc08adc29b26fb5978db62ef2a366fbdf716c8a6c8901040", size = 235409, upload-time = "2026-02-17T16:12:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/89/1b/7bdf3e49349c134b25db816e4a3db6b94a47ac69d7d46b1e682c2c4949be/librt-0.8.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:785ae29c1f5c6e7c2cde2c7c0e148147f4503da3abc5d44d482068da5322fd9e", size = 246473, upload-time = "2026-02-17T16:12:36.656Z" }, + { url = "https://files.pythonhosted.org/packages/4e/8a/91fab8e4fd2a24930a17188c7af5380eb27b203d72101c9cc000dbdfd95a/librt-0.8.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1d3a7da44baf692f0c6aeb5b2a09c5e6fc7a703bca9ffa337ddd2e2da53f7732", size = 238866, upload-time = "2026-02-17T16:12:37.849Z" }, + { url = "https://files.pythonhosted.org/packages/b9/e0/c45a098843fc7c07e18a7f8a24ca8496aecbf7bdcd54980c6ca1aaa79a8e/librt-0.8.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5fc48998000cbc39ec0d5311312dda93ecf92b39aaf184c5e817d5d440b29624", size = 250248, upload-time = "2026-02-17T16:12:39.445Z" }, + { url = "https://files.pythonhosted.org/packages/82/30/07627de23036640c952cce0c1fe78972e77d7d2f8fd54fa5ef4554ff4a56/librt-0.8.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:e96baa6820280077a78244b2e06e416480ed859bbd8e5d641cf5742919d8beb4", size = 240629, upload-time = "2026-02-17T16:12:40.889Z" }, + { url = "https://files.pythonhosted.org/packages/fb/c1/55bfe1ee3542eba055616f9098eaf6eddb966efb0ca0f44eaa4aba327307/librt-0.8.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:31362dbfe297b23590530007062c32c6f6176f6099646bb2c95ab1b00a57c382", size = 239615, upload-time = "2026-02-17T16:12:42.446Z" }, + { url = "https://files.pythonhosted.org/packages/2b/39/191d3d28abc26c9099b19852e6c99f7f6d400b82fa5a4e80291bd3803e19/librt-0.8.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cc3656283d11540ab0ea01978378e73e10002145117055e03722417aeab30994", size = 263001, upload-time = "2026-02-17T16:12:43.627Z" }, + { url = "https://files.pythonhosted.org/packages/b9/eb/7697f60fbe7042ab4e88f4ee6af496b7f222fffb0a4e3593ef1f29f81652/librt-0.8.1-cp314-cp314t-win32.whl", hash = "sha256:738f08021b3142c2918c03692608baed43bc51144c29e35807682f8070ee2a3a", size = 51328, upload-time = "2026-02-17T16:12:45.148Z" }, + { url = "https://files.pythonhosted.org/packages/7c/72/34bf2eb7a15414a23e5e70ecb9440c1d3179f393d9349338a91e2781c0fb/librt-0.8.1-cp314-cp314t-win_amd64.whl", hash = "sha256:89815a22daf9c51884fb5dbe4f1ef65ee6a146e0b6a8df05f753e2e4a9359bf4", size = 58722, upload-time = "2026-02-17T16:12:46.85Z" }, + { url = "https://files.pythonhosted.org/packages/b2/c8/d148e041732d631fc76036f8b30fae4e77b027a1e95b7a84bb522481a940/librt-0.8.1-cp314-cp314t-win_arm64.whl", hash = "sha256:bf512a71a23504ed08103a13c941f763db13fb11177beb3d9244c98c29fb4a61", size = 48755, upload-time = "2026-02-17T16:12:47.943Z" }, +] + +[[package]] +name = "markupsafe" +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" }, + { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" }, + { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" }, + { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" }, + { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" }, + { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" }, + { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" }, + { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" }, + { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" }, + { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" }, + { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" }, + { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" }, + { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" }, + { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" }, + { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" }, + { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" }, + { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" }, + { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" }, + { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" }, + { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" }, + { url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619, upload-time = "2025-09-27T18:37:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029, upload-time = "2025-09-27T18:37:07.213Z" }, + { url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408, upload-time = "2025-09-27T18:37:09.572Z" }, + { url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97", size = 23005, upload-time = "2025-09-27T18:37:10.58Z" }, + { url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d", size = 22048, upload-time = "2025-09-27T18:37:11.547Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821, upload-time = "2025-09-27T18:37:12.48Z" }, + { url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606, upload-time = "2025-09-27T18:37:13.485Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043, upload-time = "2025-09-27T18:37:14.408Z" }, + { url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747, upload-time = "2025-09-27T18:37:15.36Z" }, + { url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341, upload-time = "2025-09-27T18:37:16.496Z" }, + { url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073, upload-time = "2025-09-27T18:37:17.476Z" }, + { url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661, upload-time = "2025-09-27T18:37:18.453Z" }, + { url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069, upload-time = "2025-09-27T18:37:19.332Z" }, + { url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670, upload-time = "2025-09-27T18:37:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50", size = 23598, upload-time = "2025-09-27T18:37:21.177Z" }, + { url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e", size = 23261, upload-time = "2025-09-27T18:37:22.167Z" }, + { url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835, upload-time = "2025-09-27T18:37:23.296Z" }, + { url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733, upload-time = "2025-09-27T18:37:24.237Z" }, + { url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672, upload-time = "2025-09-27T18:37:25.271Z" }, + { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819, upload-time = "2025-09-27T18:37:26.285Z" }, + { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426, upload-time = "2025-09-27T18:37:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" }, +] + +[[package]] +name = "mypy" +version = "1.19.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "librt", marker = "platform_python_implementation != 'PyPy'" }, + { name = "mypy-extensions" }, + { name = "pathspec" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f5/db/4efed9504bc01309ab9c2da7e352cc223569f05478012b5d9ece38fd44d2/mypy-1.19.1.tar.gz", hash = "sha256:19d88bb05303fe63f71dd2c6270daca27cb9401c4ca8255fe50d1d920e0eb9ba", size = 3582404, upload-time = "2025-12-15T05:03:48.42Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/9f/a6abae693f7a0c697dbb435aac52e958dc8da44e92e08ba88d2e42326176/mypy-1.19.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e3157c7594ff2ef1634ee058aafc56a82db665c9438fd41b390f3bde1ab12250", size = 13201927, upload-time = "2025-12-15T05:02:29.138Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a4/45c35ccf6e1c65afc23a069f50e2c66f46bd3798cbe0d680c12d12935caa/mypy-1.19.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdb12f69bcc02700c2b47e070238f42cb87f18c0bc1fc4cdb4fb2bc5fd7a3b8b", size = 12206730, upload-time = "2025-12-15T05:03:01.325Z" }, + { url = "https://files.pythonhosted.org/packages/05/bb/cdcf89678e26b187650512620eec8368fded4cfd99cfcb431e4cdfd19dec/mypy-1.19.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f859fb09d9583a985be9a493d5cfc5515b56b08f7447759a0c5deaf68d80506e", size = 12724581, upload-time = "2025-12-15T05:03:20.087Z" }, + { url = "https://files.pythonhosted.org/packages/d1/32/dd260d52babf67bad8e6770f8e1102021877ce0edea106e72df5626bb0ec/mypy-1.19.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9a6538e0415310aad77cb94004ca6482330fece18036b5f360b62c45814c4ef", size = 13616252, upload-time = "2025-12-15T05:02:49.036Z" }, + { url = "https://files.pythonhosted.org/packages/71/d0/5e60a9d2e3bd48432ae2b454b7ef2b62a960ab51292b1eda2a95edd78198/mypy-1.19.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:da4869fc5e7f62a88f3fe0b5c919d1d9f7ea3cef92d3689de2823fd27e40aa75", size = 13840848, upload-time = "2025-12-15T05:02:55.95Z" }, + { url = "https://files.pythonhosted.org/packages/98/76/d32051fa65ecf6cc8c6610956473abdc9b4c43301107476ac03559507843/mypy-1.19.1-cp313-cp313-win_amd64.whl", hash = "sha256:016f2246209095e8eda7538944daa1d60e1e8134d98983b9fc1e92c1fc0cb8dd", size = 10135510, upload-time = "2025-12-15T05:02:58.438Z" }, + { url = "https://files.pythonhosted.org/packages/de/eb/b83e75f4c820c4247a58580ef86fcd35165028f191e7e1ba57128c52782d/mypy-1.19.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:06e6170bd5836770e8104c8fdd58e5e725cfeb309f0a6c681a811f557e97eac1", size = 13199744, upload-time = "2025-12-15T05:03:30.823Z" }, + { url = "https://files.pythonhosted.org/packages/94/28/52785ab7bfa165f87fcbb61547a93f98bb20e7f82f90f165a1f69bce7b3d/mypy-1.19.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:804bd67b8054a85447c8954215a906d6eff9cabeabe493fb6334b24f4bfff718", size = 12215815, upload-time = "2025-12-15T05:02:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/0a/c6/bdd60774a0dbfb05122e3e925f2e9e846c009e479dcec4821dad881f5b52/mypy-1.19.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:21761006a7f497cb0d4de3d8ef4ca70532256688b0523eee02baf9eec895e27b", size = 12740047, upload-time = "2025-12-15T05:03:33.168Z" }, + { url = "https://files.pythonhosted.org/packages/32/2a/66ba933fe6c76bd40d1fe916a83f04fed253152f451a877520b3c4a5e41e/mypy-1.19.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:28902ee51f12e0f19e1e16fbe2f8f06b6637f482c459dd393efddd0ec7f82045", size = 13601998, upload-time = "2025-12-15T05:03:13.056Z" }, + { url = "https://files.pythonhosted.org/packages/e3/da/5055c63e377c5c2418760411fd6a63ee2b96cf95397259038756c042574f/mypy-1.19.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:481daf36a4c443332e2ae9c137dfee878fcea781a2e3f895d54bd3002a900957", size = 13807476, upload-time = "2025-12-15T05:03:17.977Z" }, + { url = "https://files.pythonhosted.org/packages/cd/09/4ebd873390a063176f06b0dbf1f7783dd87bd120eae7727fa4ae4179b685/mypy-1.19.1-cp314-cp314-win_amd64.whl", hash = "sha256:8bb5c6f6d043655e055be9b542aa5f3bdd30e4f3589163e85f93f3640060509f", size = 10281872, upload-time = "2025-12-15T05:03:05.549Z" }, + { url = "https://files.pythonhosted.org/packages/8d/f4/4ce9a05ce5ded1de3ec1c1d96cf9f9504a04e54ce0ed55cfa38619a32b8d/mypy-1.19.1-py3-none-any.whl", hash = "sha256:f1235f5ea01b7db5468d53ece6aaddf1ad0b88d9e7462b86ef96fe04995d7247", size = 2471239, upload-time = "2025-12-15T05:03:07.248Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + +[[package]] +name = "packaging" +version = "26.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, +] + +[[package]] +name = "pathspec" +version = "1.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fa/36/e27608899f9b8d4dff0617b2d9ab17ca5608956ca44461ac14ac48b44015/pathspec-1.0.4.tar.gz", hash = "sha256:0210e2ae8a21a9137c0d470578cb0e595af87edaa6ebf12ff176f14a02e0e645", size = 131200, upload-time = "2026-01-27T03:59:46.938Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl", hash = "sha256:fb6ae2fd4e7c921a165808a552060e722767cfa526f99ca5156ed2ce45a5c723", size = 55206, upload-time = "2026-01-27T03:59:45.137Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pydantic" +version = "2.12.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, + { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, + { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, + { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, + { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, + { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, + { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, + { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, + { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, + { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, + { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, + { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, + { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, + { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, + { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pytest" +version = "9.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, +] + +[[package]] +name = "pytest-httpx" +version = "0.36.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/5574834da9499066fa1a5ea9c336f94dba2eae02298d36dab192fcf95c86/pytest_httpx-0.36.0.tar.gz", hash = "sha256:9edb66a5fd4388ce3c343189bc67e7e1cb50b07c2e3fc83b97d511975e8a831b", size = 56793, upload-time = "2025-12-02T16:34:57.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e2/d2/1eb1ea9c84f0d2033eb0b49675afdc71aa4ea801b74615f00f3c33b725e3/pytest_httpx-0.36.0-py3-none-any.whl", hash = "sha256:bd4c120bb80e142df856e825ec9f17981effb84d159f9fa29ed97e2357c3a9c8", size = 20229, upload-time = "2025-12-02T16:34:56.45Z" }, +] + +[[package]] +name = "pytest-mock" +version = "3.15.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/68/14/eb014d26be205d38ad5ad20d9a80f7d201472e08167f0bb4361e251084a9/pytest_mock-3.15.1.tar.gz", hash = "sha256:1849a238f6f396da19762269de72cb1814ab44416fa73a8686deac10b0d87a0f", size = 34036, upload-time = "2025-09-16T16:37:27.081Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/cc/06253936f4a7fa2e0f48dfe6d851d9c56df896a9ab09ac019d70b760619c/pytest_mock-3.15.1-py3-none-any.whl", hash = "sha256:0a25e2eb88fe5168d535041d09a4529a188176ae608a6d249ee65abc0949630d", size = 10095, upload-time = "2025-09-16T16:37:25.734Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, +] + +[[package]] +name = "requests" +version = "2.32.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, +] + +[[package]] +name = "roman-numerals" +version = "4.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/f9/41dc953bbeb056c17d5f7a519f50fdf010bd0553be2d630bc69d1e022703/roman_numerals-4.1.0.tar.gz", hash = "sha256:1af8b147eb1405d5839e78aeb93131690495fe9da5c91856cb33ad55a7f1e5b2", size = 9077, upload-time = "2025-12-17T18:25:34.381Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/54/6f679c435d28e0a568d8e8a7c0a93a09010818634c3c3907fc98d8983770/roman_numerals-4.1.0-py3-none-any.whl", hash = "sha256:647ba99caddc2cc1e55a51e4360689115551bf4476d90e8162cf8c345fe233c7", size = 7676, upload-time = "2025-12-17T18:25:33.098Z" }, +] + +[[package]] +name = "ruff" +version = "0.15.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/06/04/eab13a954e763b0606f460443fcbf6bb5a0faf06890ea3754ff16523dce5/ruff-0.15.2.tar.gz", hash = "sha256:14b965afee0969e68bb871eba625343b8673375f457af4abe98553e8bbb98342", size = 4558148, upload-time = "2026-02-19T22:32:20.271Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2f/70/3a4dc6d09b13cb3e695f28307e5d889b2e1a66b7af9c5e257e796695b0e6/ruff-0.15.2-py3-none-linux_armv6l.whl", hash = "sha256:120691a6fdae2f16d65435648160f5b81a9625288f75544dc40637436b5d3c0d", size = 10430565, upload-time = "2026-02-19T22:32:41.824Z" }, + { url = "https://files.pythonhosted.org/packages/71/0b/bb8457b56185ece1305c666dc895832946d24055be90692381c31d57466d/ruff-0.15.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:a89056d831256099658b6bba4037ac6dd06f49d194199215befe2bb10457ea5e", size = 10820354, upload-time = "2026-02-19T22:32:07.366Z" }, + { url = "https://files.pythonhosted.org/packages/2d/c1/e0532d7f9c9e0b14c46f61b14afd563298b8b83f337b6789ddd987e46121/ruff-0.15.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e36dee3a64be0ebd23c86ffa3aa3fd3ac9a712ff295e192243f814a830b6bd87", size = 10170767, upload-time = "2026-02-19T22:32:13.188Z" }, + { url = "https://files.pythonhosted.org/packages/47/e8/da1aa341d3af017a21c7a62fb5ec31d4e7ad0a93ab80e3a508316efbcb23/ruff-0.15.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9fb47b6d9764677f8c0a193c0943ce9a05d6763523f132325af8a858eadc2b9", size = 10529591, upload-time = "2026-02-19T22:32:02.547Z" }, + { url = "https://files.pythonhosted.org/packages/93/74/184fbf38e9f3510231fbc5e437e808f0b48c42d1df9434b208821efcd8d6/ruff-0.15.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f376990f9d0d6442ea9014b19621d8f2aaf2b8e39fdbfc79220b7f0c596c9b80", size = 10260771, upload-time = "2026-02-19T22:32:36.938Z" }, + { url = "https://files.pythonhosted.org/packages/05/ac/605c20b8e059a0bc4b42360414baa4892ff278cec1c91fff4be0dceedefd/ruff-0.15.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2dcc987551952d73cbf5c88d9fdee815618d497e4df86cd4c4824cc59d5dd75f", size = 11045791, upload-time = "2026-02-19T22:32:31.642Z" }, + { url = "https://files.pythonhosted.org/packages/fd/52/db6e419908f45a894924d410ac77d64bdd98ff86901d833364251bd08e22/ruff-0.15.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42a47fd785cbe8c01b9ff45031af875d101b040ad8f4de7bbb716487c74c9a77", size = 11879271, upload-time = "2026-02-19T22:32:29.305Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d8/7992b18f2008bdc9231d0f10b16df7dda964dbf639e2b8b4c1b4e91b83af/ruff-0.15.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cbe9f49354866e575b4c6943856989f966421870e85cd2ac94dccb0a9dcb2fea", size = 11303707, upload-time = "2026-02-19T22:32:22.492Z" }, + { url = "https://files.pythonhosted.org/packages/d7/02/849b46184bcfdd4b64cde61752cc9a146c54759ed036edd11857e9b8443b/ruff-0.15.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7a672c82b5f9887576087d97be5ce439f04bbaf548ee987b92d3a7dede41d3a", size = 11149151, upload-time = "2026-02-19T22:32:44.234Z" }, + { url = "https://files.pythonhosted.org/packages/70/04/f5284e388bab60d1d3b99614a5a9aeb03e0f333847e2429bebd2aaa1feec/ruff-0.15.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:72ecc64f46f7019e2bcc3cdc05d4a7da958b629a5ab7033195e11a438403d956", size = 11091132, upload-time = "2026-02-19T22:32:24.691Z" }, + { url = "https://files.pythonhosted.org/packages/fa/ae/88d844a21110e14d92cf73d57363fab59b727ebeabe78009b9ccb23500af/ruff-0.15.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:8dcf243b15b561c655c1ef2f2b0050e5d50db37fe90115507f6ff37d865dc8b4", size = 10504717, upload-time = "2026-02-19T22:32:26.75Z" }, + { url = "https://files.pythonhosted.org/packages/64/27/867076a6ada7f2b9c8292884ab44d08fd2ba71bd2b5364d4136f3cd537e1/ruff-0.15.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:dab6941c862c05739774677c6273166d2510d254dac0695c0e3f5efa1b5585de", size = 10263122, upload-time = "2026-02-19T22:32:10.036Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ef/faf9321d550f8ebf0c6373696e70d1758e20ccdc3951ad7af00c0956be7c/ruff-0.15.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1b9164f57fc36058e9a6806eb92af185b0697c9fe4c7c52caa431c6554521e5c", size = 10735295, upload-time = "2026-02-19T22:32:39.227Z" }, + { url = "https://files.pythonhosted.org/packages/2f/55/e8089fec62e050ba84d71b70e7834b97709ca9b7aba10c1a0b196e493f97/ruff-0.15.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:80d24fcae24d42659db7e335b9e1531697a7102c19185b8dc4a028b952865fd8", size = 11241641, upload-time = "2026-02-19T22:32:34.617Z" }, + { url = "https://files.pythonhosted.org/packages/23/01/1c30526460f4d23222d0fabd5888868262fd0e2b71a00570ca26483cd993/ruff-0.15.2-py3-none-win32.whl", hash = "sha256:fd5ff9e5f519a7e1bd99cbe8daa324010a74f5e2ebc97c6242c08f26f3714f6f", size = 10507885, upload-time = "2026-02-19T22:32:15.635Z" }, + { url = "https://files.pythonhosted.org/packages/5c/10/3d18e3bbdf8fc50bbb4ac3cc45970aa5a9753c5cb51bf9ed9a3cd8b79fa3/ruff-0.15.2-py3-none-win_amd64.whl", hash = "sha256:d20014e3dfa400f3ff84830dfb5755ece2de45ab62ecea4af6b7262d0fb4f7c5", size = 11623725, upload-time = "2026-02-19T22:32:04.947Z" }, + { url = "https://files.pythonhosted.org/packages/6d/78/097c0798b1dab9f8affe73da9642bb4500e098cb27fd8dc9724816ac747b/ruff-0.15.2-py3-none-win_arm64.whl", hash = "sha256:cabddc5822acdc8f7b5527b36ceac55cc51eec7b1946e60181de8fe83ca8876e", size = 10941649, upload-time = "2026-02-19T22:32:18.108Z" }, +] + +[[package]] +name = "sdk" +version = "0.1.0" +source = { editable = "." } +dependencies = [ + { name = "httpx" }, + { name = "pydantic" }, + { name = "pyyaml" }, +] + +[package.dev-dependencies] +dev = [ + { name = "mypy" }, + { name = "pytest" }, + { name = "pytest-httpx" }, + { name = "pytest-mock" }, + { name = "ruff" }, +] +docs = [ + { name = "sphinx" }, + { name = "sphinx-autodoc-typehints" }, + { name = "sphinx-rtd-theme" }, +] + +[package.metadata] +requires-dist = [ + { name = "httpx", specifier = ">=0.28,<1" }, + { name = "pydantic", specifier = ">=2.12,<3" }, + { name = "pyyaml", specifier = ">=6.0.2,<7" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "mypy", specifier = ">=1.19" }, + { name = "pytest", specifier = ">=9.0.2,<10" }, + { name = "pytest-httpx", specifier = ">=0.36" }, + { name = "pytest-mock", specifier = ">=3.14" }, + { name = "ruff", specifier = ">=0.15.2" }, +] +docs = [ + { name = "sphinx", specifier = ">=8.0" }, + { name = "sphinx-autodoc-typehints", specifier = ">=2.0" }, + { name = "sphinx-rtd-theme", specifier = ">=3.0" }, +] + +[[package]] +name = "snowballstemmer" +version = "3.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/75/a7/9810d872919697c9d01295633f5d574fb416d47e535f258272ca1f01f447/snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895", size = 105575, upload-time = "2025-05-09T16:34:51.843Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/78/3565d011c61f5a43488987ee32b6f3f656e7f107ac2782dd57bdd7d91d9a/snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064", size = 103274, upload-time = "2025-05-09T16:34:50.371Z" }, +] + +[[package]] +name = "sphinx" +version = "9.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "alabaster" }, + { name = "babel" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "docutils" }, + { name = "imagesize" }, + { name = "jinja2" }, + { name = "packaging" }, + { name = "pygments" }, + { name = "requests" }, + { name = "roman-numerals" }, + { name = "snowballstemmer" }, + { name = "sphinxcontrib-applehelp" }, + { name = "sphinxcontrib-devhelp" }, + { name = "sphinxcontrib-htmlhelp" }, + { name = "sphinxcontrib-jsmath" }, + { name = "sphinxcontrib-qthelp" }, + { name = "sphinxcontrib-serializinghtml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cd/bd/f08eb0f4eed5c83f1ba2a3bd18f7745a2b1525fad70660a1c00224ec468a/sphinx-9.1.0.tar.gz", hash = "sha256:7741722357dd75f8190766926071fed3bdc211c74dd2d7d4df5404da95930ddb", size = 8718324, upload-time = "2025-12-31T15:09:27.646Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/73/f7/b1884cb3188ab181fc81fa00c266699dab600f927a964df02ec3d5d1916a/sphinx-9.1.0-py3-none-any.whl", hash = "sha256:c84fdd4e782504495fe4f2c0b3413d6c2bf388589bb352d439b2a3bb99991978", size = 3921742, upload-time = "2025-12-31T15:09:25.561Z" }, +] + +[[package]] +name = "sphinx-autodoc-typehints" +version = "3.6.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sphinx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/64/5f/ebcaed1a67e623e4a7622808a8be6b0fd8344313e185f62e85a26b0ce26a/sphinx_autodoc_typehints-3.6.3.tar.gz", hash = "sha256:6c387b47d9ad5e75b157810af5bad46901f0a22708ed5e4adf466885a9c60910", size = 38288, upload-time = "2026-02-18T04:22:08.384Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/bd/2b853836d152e40a27655828fdc02c5128f294ac452ad9a13424bb7f92fa/sphinx_autodoc_typehints-3.6.3-py3-none-any.whl", hash = "sha256:46ebc68fa85b320d55887a8d836a01e12e3b7744da973e70af8cedc74072aad5", size = 20882, upload-time = "2026-02-18T04:22:07.238Z" }, +] + +[[package]] +name = "sphinx-rtd-theme" +version = "3.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils" }, + { name = "sphinx" }, + { name = "sphinxcontrib-jquery" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/84/68/a1bfbf38c0f7bccc9b10bbf76b94606f64acb1552ae394f0b8285bfaea25/sphinx_rtd_theme-3.1.0.tar.gz", hash = "sha256:b44276f2c276e909239a4f6c955aa667aaafeb78597923b1c60babc76db78e4c", size = 7620915, upload-time = "2026-01-12T16:03:31.17Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/c7/b5c8015d823bfda1a346adb2c634a2101d50bb75d421eb6dcb31acd25ebc/sphinx_rtd_theme-3.1.0-py2.py3-none-any.whl", hash = "sha256:1785824ae8e6632060490f67cf3a72d404a85d2d9fc26bce3619944de5682b89", size = 7655617, upload-time = "2026-01-12T16:03:28.101Z" }, +] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/6e/b837e84a1a704953c62ef8776d45c3e8d759876b4a84fe14eba2859106fe/sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1", size = 20053, upload-time = "2024-07-29T01:09:00.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/85/9ebeae2f76e9e77b952f4b274c27238156eae7979c5421fba91a28f4970d/sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5", size = 119300, upload-time = "2024-07-29T01:08:58.99Z" }, +] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/d2/5beee64d3e4e747f316bae86b55943f51e82bb86ecd325883ef65741e7da/sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad", size = 12967, upload-time = "2024-07-29T01:09:23.417Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/35/7a/987e583882f985fe4d7323774889ec58049171828b58c2217e7f79cdf44e/sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2", size = 82530, upload-time = "2024-07-29T01:09:21.945Z" }, +] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/93/983afd9aa001e5201eab16b5a444ed5b9b0a7a010541e0ddfbbfd0b2470c/sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9", size = 22617, upload-time = "2024-07-29T01:09:37.889Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/7b/18a8c0bcec9182c05a0b3ec2a776bba4ead82750a55ff798e8d406dae604/sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8", size = 98705, upload-time = "2024-07-29T01:09:36.407Z" }, +] + +[[package]] +name = "sphinxcontrib-jquery" +version = "4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sphinx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/de/f3/aa67467e051df70a6330fe7770894b3e4f09436dea6881ae0b4f3d87cad8/sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a", size = 122331, upload-time = "2023-03-14T15:01:01.944Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/85/749bd22d1a68db7291c89e2ebca53f4306c3f205853cf31e9de279034c3c/sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae", size = 121104, upload-time = "2023-03-14T15:01:00.356Z" }, +] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/e8/9ed3830aeed71f17c026a07a5097edcf44b692850ef215b161b8ad875729/sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8", size = 5787, upload-time = "2019-01-21T16:10:16.347Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/42/4c8646762ee83602e3fb3fbe774c2fac12f317deb0b5dbeeedd2d3ba4b77/sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", size = 5071, upload-time = "2019-01-21T16:10:14.333Z" }, +] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/68/bc/9104308fc285eb3e0b31b67688235db556cd5b0ef31d96f30e45f2e51cae/sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab", size = 17165, upload-time = "2024-07-29T01:09:56.435Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/83/859ecdd180cacc13b1f7e857abf8582a64552ea7a061057a6c716e790fce/sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb", size = 88743, upload-time = "2024-07-29T01:09:54.885Z" }, +] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3b/44/6716b257b0aa6bfd51a1b31665d1c205fb12cb5ad56de752dfa15657de2f/sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d", size = 16080, upload-time = "2024-07-29T01:10:09.332Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/a7/d2782e4e3f77c8450f727ba74a8f12756d5ba823d81b941f1b04da9d033a/sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331", size = 92072, upload-time = "2024-07-29T01:10:08.203Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + +[[package]] +name = "urllib3" +version = "2.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, +]