diff --git a/docs/conf.py b/docs/conf.py index 0cf304dec..714155c82 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -159,6 +159,7 @@ ] autosummary_generate = False +smartquotes = False autosectionlabel_prefix_document = True diff --git a/pyproject.toml b/pyproject.toml index 53240438b..e19dee302 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -70,7 +70,7 @@ uuid = ["uuid-utils"] [dependency-groups] benchmarks = ["sqlalchemy[asyncio]", "psutil", "types-psutil", "duckdb-engine>=0.17.0"] -build = ["bump-my-version", "hatch-mypyc", "pydantic-settings"] +build = ["bump-my-version", "hatch-mypyc", "mypy>=1.19.1", "pydantic-settings"] dev = [ { include-group = "extras" }, { include-group = "lint" }, @@ -115,7 +115,7 @@ extras = [ "dishka", ] lint = [ - "mypy>=1.13.0", + "mypy>=1.19.1", "pre-commit>=3.5.0", "pyright>=1.1.386", "ruff>=0.7.1", @@ -176,7 +176,7 @@ packages = ["sqlspec"] [tool.hatch.build.targets.wheel.hooks.mypyc] -dependencies = ["hatch-mypyc", "hatch-cython"] +dependencies = ["hatch-mypyc", "hatch-cython", "mypy>=1.19.1"] enable-by-default = false exclude = [ "tests/**", # Test files @@ -201,7 +201,6 @@ exclude = [ "sqlspec/adapters/**/data_dictionary.py", # Cross-module inheritance causes mypyc segfaults "sqlspec/observability/_formatting.py", # Inherits from non-compiled logging.Formatter "sqlspec/utils/arrow_helpers.py", # Arrow operations cause segfaults when compiled - "sqlspec/storage/backends/_iterators.py", # Async __anext__ + asyncio.to_thread causes mypyc segfault ] include = [ "sqlspec/core/**/*.py", # Core module @@ -212,7 +211,7 @@ include = [ "sqlspec/driver/**/*.py", # Driver module "sqlspec/storage/registry.py", # Safe storage registry/runtime routing "sqlspec/storage/errors.py", # Safe storage error normalization - "sqlspec/storage/backends/base.py", # Storage backend runtime base classes (iterators in _iterators.py) + "sqlspec/storage/backends/base.py", # Storage backend runtime base classes "sqlspec/data_dictionary/**/*.py", # Data dictionary mixin (required for adapter inheritance) "sqlspec/adapters/**/core.py", # Adapter compiled helpers "sqlspec/adapters/**/type_converter.py", # All adapters type converters diff --git a/sqlspec/adapters/psycopg/_typing.py b/sqlspec/adapters/psycopg/_typing.py index b2c42a5bf..508f59eb9 100644 --- a/sqlspec/adapters/psycopg/_typing.py +++ b/sqlspec/adapters/psycopg/_typing.py @@ -8,6 +8,9 @@ from psycopg import AsyncConnection, AsyncCursor, Connection, Cursor from psycopg.rows import DictRow as PsycopgDictRow +from psycopg.sql import SQL as PsycopgSQL # noqa: N811 +from psycopg.sql import Composed as PsycopgComposed +from psycopg.sql import Identifier as PsycopgIdentifier if TYPE_CHECKING: from collections.abc import Callable @@ -203,8 +206,11 @@ async def __aexit__( "PsycopgAsyncCursor", "PsycopgAsyncRawCursor", "PsycopgAsyncSessionContext", + "PsycopgComposed", "PsycopgDictRow", + "PsycopgIdentifier", "PsycopgPipelineDriver", + "PsycopgSQL", "PsycopgSyncConnection", "PsycopgSyncCursor", "PsycopgSyncRawCursor", diff --git a/sqlspec/adapters/psycopg/core.py b/sqlspec/adapters/psycopg/core.py index 633540dfa..08c17d374 100644 --- a/sqlspec/adapters/psycopg/core.py +++ b/sqlspec/adapters/psycopg/core.py @@ -4,9 +4,9 @@ from collections.abc import Sized from typing import TYPE_CHECKING, Any, NamedTuple, cast -from psycopg import sql as psycopg_sql from typing_extensions import LiteralString +from sqlspec.adapters.psycopg._typing import PsycopgComposed, PsycopgIdentifier, PsycopgSQL from sqlspec.core import ( SQL, DriverParameterProfile, @@ -88,7 +88,7 @@ class PreparedStackOperation(NamedTuple): operation_index: int operation: "StackOperation" statement: "SQL" - sql: "LiteralString | psycopg_sql.SQL" + sql: "LiteralString | PsycopgSQL | PsycopgComposed" parameters: "tuple[Any, ...] | dict[str, Any] | None" @@ -113,23 +113,23 @@ def pipeline_supported() -> bool: return False -def _compose_table_identifier(table: str) -> "psycopg_sql.Composed": +def _compose_table_identifier(table: str) -> "PsycopgComposed": parts = [part for part in table.split(".") if part] if not parts: msg = "Table name must not be empty" raise SQLSpecError(msg) - identifiers = [psycopg_sql.Identifier(part) for part in parts] - return psycopg_sql.SQL(".").join(identifiers) + identifiers = [PsycopgIdentifier(part) for part in parts] + return PsycopgSQL(".").join(identifiers) -def build_copy_from_command(table: str, columns: "list[str]") -> "psycopg_sql.Composed": +def build_copy_from_command(table: str, columns: "list[str]") -> "PsycopgComposed": table_identifier = _compose_table_identifier(table) - column_sql = psycopg_sql.SQL(", ").join([psycopg_sql.Identifier(column) for column in columns]) - return psycopg_sql.SQL("COPY {} ({}) FROM STDIN").format(table_identifier, column_sql) + column_sql = PsycopgSQL(", ").join([PsycopgIdentifier(column) for column in columns]) + return PsycopgSQL("COPY {} ({}) FROM STDIN").format(table_identifier, column_sql) -def build_truncate_command(table: str) -> "psycopg_sql.Composed": - return psycopg_sql.SQL("TRUNCATE TABLE {}").format(_compose_table_identifier(table)) +def build_truncate_command(table: str) -> "PsycopgComposed": + return PsycopgSQL("TRUNCATE TABLE {}").format(_compose_table_identifier(table)) def _identity(value: Any) -> Any: diff --git a/sqlspec/adapters/psycopg/driver.py b/sqlspec/adapters/psycopg/driver.py index bfd09847e..47ebf08f3 100644 --- a/sqlspec/adapters/psycopg/driver.py +++ b/sqlspec/adapters/psycopg/driver.py @@ -5,13 +5,14 @@ from typing import TYPE_CHECKING, Any, cast import psycopg -from psycopg import sql as psycopg_sql from typing_extensions import LiteralString from sqlspec.adapters.psycopg._typing import ( PsycopgAsyncConnection, PsycopgAsyncCursor, PsycopgAsyncSessionContext, + PsycopgComposed, + PsycopgSQL, PsycopgSyncConnection, PsycopgSyncCursor, PsycopgSyncSessionContext, @@ -111,7 +112,7 @@ def _prepare_pipeline_operations(self, stack: "StatementStack") -> "list[Prepare operation_index=index, operation=operation, statement=sql_statement, - sql=cast("LiteralString | psycopg_sql.SQL", sql_text), + sql=cast("LiteralString | PsycopgSQL | PsycopgComposed", sql_text), parameters=prepared_parameters, ) ) @@ -396,7 +397,7 @@ def _raise_pending_exception(exception_ctx: "PsycopgSyncExceptionHandler") -> No cursor = resource_stack.enter_context(self.with_cursor(self.connection)) try: - sql = cast("LiteralString | psycopg_sql.SQL", prepared.sql) # type: ignore[redundant-cast] + sql = cast("LiteralString | PsycopgSQL | PsycopgComposed", prepared.sql) # type: ignore[redundant-cast] if prepared.parameters: cursor.execute(sql, prepared.parameters) else: @@ -855,7 +856,7 @@ def _raise_pending_exception(exception_ctx: "PsycopgAsyncExceptionHandler") -> N cursor = await resource_stack.enter_async_context(self.with_cursor(self.connection)) try: - sql = cast("LiteralString | psycopg_sql.SQL", prepared.sql) # type: ignore[redundant-cast] + sql = cast("LiteralString | PsycopgSQL | PsycopgComposed", prepared.sql) # type: ignore[redundant-cast] if prepared.parameters: await cursor.execute(sql, prepared.parameters) else: diff --git a/sqlspec/config.py b/sqlspec/config.py index 63d1ae2dd..7d899bf7c 100644 --- a/sqlspec/config.py +++ b/sqlspec/config.py @@ -968,6 +968,7 @@ class DatabaseConfigProtocol(ABC, Generic[ConnectionT, PoolT, DriverT]): __slots__ = ( "_migration_commands", + "_migration_config", "_migration_loader", "_observability_runtime", "_storage_capabilities", @@ -975,13 +976,13 @@ class DatabaseConfigProtocol(ABC, Generic[ConnectionT, PoolT, DriverT]): "connection_instance", "driver_features", "extension_config", - "migration_config", "observability_config", "statement_config", ) _migration_loader: "SQLFileLoader" _migration_commands: "SyncMigrationCommands[Any] | AsyncMigrationCommands[Any]" + _migration_config: "dict[str, Any] | MigrationConfig" driver_type: "ClassVar[type[Any]]" connection_type: "ClassVar[type[Any]]" is_async: "ClassVar[bool]" = False @@ -998,7 +999,6 @@ class DatabaseConfigProtocol(ABC, Generic[ConnectionT, PoolT, DriverT]): bind_key: "str | None" statement_config: "StatementConfig" connection_instance: "PoolT | None" - migration_config: "dict[str, Any] | MigrationConfig" extension_config: "ExtensionConfigs" driver_features: "dict[str, Any]" _storage_capabilities: "StorageCapabilities | None" @@ -1022,6 +1022,20 @@ def __repr__(self) -> str: ]) return f"{type(self).__name__}({parts})" + @property + def migration_config(self) -> "dict[str, Any] | MigrationConfig": + """Return the current migration configuration.""" + return self._migration_config + + @migration_config.setter + def migration_config(self, value: "dict[str, Any] | MigrationConfig | None") -> None: + """Store migration configuration and refresh derived migration helpers.""" + object.__setattr__(self, "_migration_config", dict(cast("dict[str, Any]", value) or {})) + if self._has_initialized_attribute("extension_config"): + self._ensure_extension_migrations() + if self._migration_components_ready(): + self._initialize_migration_components() + def storage_capabilities(self) -> "StorageCapabilities": """Return cached storage capabilities for this configuration.""" @@ -1034,6 +1048,20 @@ def reset_storage_capabilities_cache(self) -> None: self._storage_capabilities = None + def _has_initialized_attribute(self, attribute_name: str) -> bool: + """Return whether a slot-backed attribute has been initialized.""" + try: + object.__getattribute__(self, attribute_name) + except AttributeError: + return False + return True + + def _migration_components_ready(self) -> bool: + """Return whether migration helpers have already been initialized.""" + return self._has_initialized_attribute("_migration_loader") and self._has_initialized_attribute( + "_migration_commands" + ) + def _ensure_extension_migrations(self) -> None: """Auto-include extension migrations when extension_config has them configured. @@ -1473,8 +1501,7 @@ def __init__( self.connection_instance = connection_instance self.connection_config = connection_config or {} self.extension_config = extension_config or {} - self.migration_config: dict[str, Any] | MigrationConfig = migration_config or {} - self._ensure_extension_migrations() + self.migration_config = migration_config or {} self._init_observability(observability_config) self._initialize_migration_components() @@ -1637,8 +1664,7 @@ def __init__( self.connection_instance = connection_instance self.connection_config = connection_config or {} self.extension_config = extension_config or {} - self.migration_config: dict[str, Any] | MigrationConfig = migration_config or {} - self._ensure_extension_migrations() + self.migration_config = migration_config or {} self._init_observability(observability_config) self._initialize_migration_components() @@ -1806,8 +1832,7 @@ def __init__( self.connection_instance = connection_instance self.connection_config = connection_config or {} self.extension_config = extension_config or {} - self.migration_config: dict[str, Any] | MigrationConfig = migration_config or {} - self._ensure_extension_migrations() + self.migration_config = migration_config or {} self._init_observability(observability_config) self._initialize_migration_components() @@ -2017,8 +2042,7 @@ def __init__( self.connection_instance = connection_instance self.connection_config = connection_config or {} self.extension_config = extension_config or {} - self.migration_config: dict[str, Any] | MigrationConfig = migration_config or {} - self._ensure_extension_migrations() + self.migration_config = migration_config or {} self._init_observability(observability_config) self._initialize_migration_components() diff --git a/sqlspec/driver/_async.py b/sqlspec/driver/_async.py index d1c8afaaa..37907e24a 100644 --- a/sqlspec/driver/_async.py +++ b/sqlspec/driver/_async.py @@ -174,6 +174,17 @@ def data_dictionary(self) -> "AsyncDataDictionaryBase": # CORE DISPATCH METHODS - The Execution Engine # ───────────────────────────────────────────────────────────────────────────── + @staticmethod + def _raise_async_database_exception(exc_handler: AsyncExceptionHandler, exc: Exception | None) -> None: + """Raise any mapped database exception captured by the async handler.""" + pending_exception = exc_handler.pending_exception + if pending_exception is not None: + if exc is None: + raise pending_exception from None + raise pending_exception from exc + if exc is not None: + raise exc + @final async def dispatch_statement_execution(self, statement: "SQL", connection: "Any") -> "SQLResult": """Central execution dispatcher using the Template Method Pattern. @@ -205,56 +216,37 @@ async def dispatch_statement_execution(self, statement: "SQL", connection: "Any" result: SQLResult | None = None exc_handler = self.handle_database_exceptions() - cursor_manager = self.with_cursor(connection) - cursor: Any | None = None - exc: Exception | None = None - exc_handler_entered = False - cursor_entered = False - try: - await exc_handler.__aenter__() - exc_handler_entered = True - cursor = await cursor_manager.__aenter__() - cursor_entered = True - special_result = await self.dispatch_special_handling(cursor, statement) - if special_result is not None: - result = special_result - elif statement.is_script: - execution_result = await self.dispatch_execute_script(cursor, statement) - result = self.build_statement_result(statement, execution_result) - elif statement.is_many: - execution_result = await self.dispatch_execute_many(cursor, statement) - result = self.build_statement_result(statement, execution_result) - else: - execution_result = await self.dispatch_execute(cursor, statement) - result = self.build_statement_result(statement, execution_result) - except Exception as err: - exc = err - finally: - if cursor_entered: - if exc is None: - await cursor_manager.__aexit__(None, None, None) - else: - await cursor_manager.__aexit__(type(exc), exc, exc.__traceback__) - if exc_handler_entered: - if exc is None: - await exc_handler.__aexit__(None, None, None) + async with exc_handler, self.with_cursor(connection) as cursor: + special_result = await self.dispatch_special_handling(cursor, statement) + if special_result is not None: + result = special_result + elif statement.is_script: + execution_result = await self.dispatch_execute_script(cursor, statement) + result = self.build_statement_result(statement, execution_result) + elif statement.is_many: + execution_result = await self.dispatch_execute_many(cursor, statement) + result = self.build_statement_result(statement, execution_result) else: - await exc_handler.__aexit__(type(exc), exc, exc.__traceback__) - - if exc is not None: - mapped_exc = exc_handler.pending_exception or exc + execution_result = await self.dispatch_execute(cursor, statement) + result = self.build_statement_result(statement, execution_result) + except Exception as exc: # pragma: no cover - instrumentation path + pending_exception = exc_handler.pending_exception + if pending_exception is not None: + mapped_exc = pending_exception + runtime.span_manager.end_span(span, error=mapped_exc) + runtime.emit_error(mapped_exc, **query_context) + self._raise_async_database_exception(exc_handler, exc) + runtime.span_manager.end_span(span, error=exc) + runtime.emit_error(exc, **query_context) + self._raise_async_database_exception(exc_handler, exc) + + pending_exception = exc_handler.pending_exception + if pending_exception is not None: + mapped_exc = pending_exception runtime.span_manager.end_span(span, error=mapped_exc) runtime.emit_error(mapped_exc, **query_context) - if exc_handler.pending_exception is not None: - raise mapped_exc from exc - raise exc - - if exc_handler.pending_exception is not None: - mapped_exc = exc_handler.pending_exception - runtime.span_manager.end_span(span, error=mapped_exc) - runtime.emit_error(mapped_exc, **query_context) - raise mapped_exc from None + self._raise_async_database_exception(exc_handler, None) assert result is not None # Guaranteed: no exception means result was assigned @@ -417,55 +409,25 @@ async def _stmt_cache_execute_direct( ) exc_handler = self.handle_database_exceptions() - cursor_manager = self.with_cursor(self.connection) - cursor: Any | None = None - exc: Exception | None = None - exc_handler_entered = False - cursor_entered = False result: SQLResult | None = None - - try: - await exc_handler.__aenter__() - exc_handler_entered = True - cursor = await cursor_manager.__aenter__() - cursor_entered = True - execution_result = await self.dispatch_execute(cursor, direct_statement) - - if cached.operation_profile.returns_rows: - result = self.build_statement_result(direct_statement, execution_result) - else: - # DML path: use DMLResult to bypass full SQLResult construction - affected_rows = ( - execution_result.rowcount_override - if execution_result.rowcount_override is not None and execution_result.rowcount_override >= 0 - else 0 - ) - result = DMLResult(cached.operation_type, affected_rows) - except Exception as err: - exc = err - finally: - if cursor_entered: - if exc is None: - await cursor_manager.__aexit__(None, None, None) - else: - await cursor_manager.__aexit__(type(exc), exc, exc.__traceback__) - if exc_handler_entered: - if exc is None: - await exc_handler.__aexit__(None, None, None) - else: - await exc_handler.__aexit__(type(exc), exc, exc.__traceback__) - try: - if exc is not None: - mapped_exc = exc_handler.pending_exception or exc - if exc_handler.pending_exception is not None: - raise mapped_exc from exc - raise exc - - if exc_handler.pending_exception is not None: - mapped_exc = exc_handler.pending_exception - raise mapped_exc from None + async with exc_handler, self.with_cursor(self.connection) as cursor: + execution_result = await self.dispatch_execute(cursor, direct_statement) + if cached.operation_profile.returns_rows: + result = self.build_statement_result(direct_statement, execution_result) + else: + # DML path: use DMLResult to bypass full SQLResult construction + affected_rows = ( + execution_result.rowcount_override + if execution_result.rowcount_override is not None and execution_result.rowcount_override >= 0 + else 0 + ) + result = DMLResult(cached.operation_type, affected_rows) + + pending_exception = exc_handler.pending_exception + if pending_exception is not None: + raise pending_exception from None assert result is not None return result finally: @@ -493,45 +455,15 @@ async def _stmt_cache_execute(self, statement: "SQL") -> "SQLResult": will hit the fast path in _get_compiled_statement (is_processed check). """ exc_handler = self.handle_database_exceptions() - cursor_manager = self.with_cursor(self.connection) - cursor: Any | None = None - exc: Exception | None = None - exc_handler_entered = False - cursor_entered = False result: SQLResult | None = None - - try: - await exc_handler.__aenter__() - exc_handler_entered = True - cursor = await cursor_manager.__aenter__() - cursor_entered = True - execution_result = await self.dispatch_execute(cursor, statement) - result = self.build_statement_result(statement, execution_result) - except Exception as err: - exc = err - finally: - if cursor_entered: - if exc is None: - await cursor_manager.__aexit__(None, None, None) - else: - await cursor_manager.__aexit__(type(exc), exc, exc.__traceback__) - if exc_handler_entered: - if exc is None: - await exc_handler.__aexit__(None, None, None) - else: - await exc_handler.__aexit__(type(exc), exc, exc.__traceback__) - try: - if exc is not None: - mapped_exc = exc_handler.pending_exception or exc - if exc_handler.pending_exception is not None: - raise mapped_exc from exc - raise exc - - if exc_handler.pending_exception is not None: - mapped_exc = exc_handler.pending_exception - raise mapped_exc from None + async with exc_handler, self.with_cursor(self.connection) as cursor: + execution_result = await self.dispatch_execute(cursor, statement) + result = self.build_statement_result(statement, execution_result) + pending_exception = exc_handler.pending_exception + if pending_exception is not None: + raise pending_exception from None assert result is not None return result finally: @@ -585,21 +517,30 @@ async def execute( **kwargs: Any, ) -> "SQLResult": """Execute a statement with parameter handling.""" - if ( - self._stmt_cache_enabled - and (statement_config is None or statement_config is self.statement_config) - and isinstance(statement, str) - and len(parameters) == 1 - and isinstance(parameters[0], (tuple, list)) - and not kwargs - ): - fast_result = await self._stmt_cache_lookup(statement, parameters[0]) - if fast_result is not None: - return fast_result - sql_statement = self.prepare_statement( - statement, parameters, statement_config=statement_config or self.statement_config, kwargs=kwargs - ) - return await self.dispatch_statement_execution(statement=sql_statement, connection=self.connection) + exc_handler = self.handle_database_exceptions() + result: SQLResult | None = None + async with exc_handler: + if ( + self._stmt_cache_enabled + and (statement_config is None or statement_config is self.statement_config) + and isinstance(statement, str) + and len(parameters) == 1 + and isinstance(parameters[0], (tuple, list)) + and not kwargs + ): + fast_result = await self._stmt_cache_lookup(statement, parameters[0]) + if fast_result is not None: + result = fast_result + if result is None: + sql_statement = self.prepare_statement( + statement, parameters, statement_config=statement_config or self.statement_config, kwargs=kwargs + ) + result = await self.dispatch_statement_execution(statement=sql_statement, connection=self.connection) + pending_exception = exc_handler.pending_exception + if pending_exception is not None: + raise pending_exception from None + assert result is not None + return result async def execute_many( self, @@ -614,19 +555,27 @@ async def execute_many( Parameters passed will be used as the batch execution sequence. """ - config = statement_config or self.statement_config - - if isinstance(statement, str) and not filters and not kwargs: - sql_statement = SQL(statement, parameters, statement_config=config, is_many=True) - elif isinstance(statement, SQL): - statement_seed = statement.raw_expression or statement.raw_sql - sql_statement = SQL(statement_seed, parameters, statement_config=config, is_many=True, **kwargs) - else: - base_statement = self.prepare_statement(statement, filters, statement_config=config, kwargs=kwargs) - statement_seed = base_statement.raw_expression or base_statement.raw_sql - sql_statement = SQL(statement_seed, parameters, statement_config=config, is_many=True, **kwargs) + exc_handler = self.handle_database_exceptions() + result: SQLResult | None = None + async with exc_handler: + config = statement_config or self.statement_config + + if isinstance(statement, str) and not filters and not kwargs: + sql_statement = SQL(statement, parameters, statement_config=config, is_many=True) + elif isinstance(statement, SQL): + statement_seed = statement.raw_expression or statement.raw_sql + sql_statement = SQL(statement_seed, parameters, statement_config=config, is_many=True, **kwargs) + else: + base_statement = self.prepare_statement(statement, filters, statement_config=config, kwargs=kwargs) + statement_seed = base_statement.raw_expression or base_statement.raw_sql + sql_statement = SQL(statement_seed, parameters, statement_config=config, is_many=True, **kwargs) - return await self.dispatch_statement_execution(statement=sql_statement, connection=self.connection) + result = await self.dispatch_statement_execution(statement=sql_statement, connection=self.connection) + pending_exception = exc_handler.pending_exception + if pending_exception is not None: + raise pending_exception from None + assert result is not None + return result async def execute_script( self, @@ -641,10 +590,19 @@ async def execute_script( By default, validates each statement and logs warnings for dangerous operations. Use suppress_warnings=True for migrations and admin scripts. """ - config = statement_config or self.statement_config - sql_statement = self.prepare_statement(statement, parameters, statement_config=config, kwargs=kwargs) - - return await self.dispatch_statement_execution(statement=sql_statement.as_script(), connection=self.connection) + exc_handler = self.handle_database_exceptions() + result: SQLResult | None = None + async with exc_handler: + config = statement_config or self.statement_config + sql_statement = self.prepare_statement(statement, parameters, statement_config=config, kwargs=kwargs) + result = await self.dispatch_statement_execution( + statement=sql_statement.as_script(), connection=self.connection + ) + pending_exception = exc_handler.pending_exception + if pending_exception is not None: + raise pending_exception from None + assert result is not None + return result # ───────────────────────────────────────────────────────────────────────────── # PUBLIC API - Query Methods (select/fetch variants) @@ -950,11 +908,7 @@ async def select_value( **kwargs: Additional keyword arguments. Returns: - The scalar value, optionally converted to the specified type. - - Raises: - ValueError: If no rows or more than one row/column is returned. - TypeError: If value_type is provided and conversion fails. + The scalar value, optionally converted to the specified type Examples: Basic usage (returns Any): @@ -1084,10 +1038,6 @@ async def select_value_or_none( Returns: The scalar value (optionally converted), or None if no rows found. - Raises: - ValueError: If more than one row is returned. - TypeError: If value_type is provided and conversion fails. - Examples: Basic usage: diff --git a/sqlspec/driver/_exception_handler.py b/sqlspec/driver/_exception_handler.py index 98b5e46ab..d4ba9ee0d 100644 --- a/sqlspec/driver/_exception_handler.py +++ b/sqlspec/driver/_exception_handler.py @@ -5,6 +5,8 @@ from mypy_extensions import mypyc_attr from typing_extensions import Self +from sqlspec.exceptions import SQLSpecError + if TYPE_CHECKING: from types import TracebackType @@ -29,6 +31,11 @@ async def __aexit__( _ = exc_tb if exc_val is None: return False + + # Do not re-map if already a SQLSpecError + if isinstance(exc_val, SQLSpecError): + return False + return self._handle_exception(exc_type, exc_val) def _handle_exception(self, exc_type: "type[BaseException] | None", exc_val: "BaseException") -> bool: @@ -58,6 +65,11 @@ def __exit__( _ = exc_tb if exc_val is None: return False + + # Do not re-map if already a SQLSpecError + if isinstance(exc_val, SQLSpecError): + return False + return self._handle_exception(exc_type, exc_val) def _handle_exception(self, exc_type: "type[BaseException] | None", exc_val: "BaseException") -> bool: diff --git a/sqlspec/driver/_sync.py b/sqlspec/driver/_sync.py index e1aadb3c3..90dca82e7 100644 --- a/sqlspec/driver/_sync.py +++ b/sqlspec/driver/_sync.py @@ -155,6 +155,17 @@ def data_dictionary(self) -> "SyncDataDictionaryBase": # CORE DISPATCH METHODS - The Execution Engine # ───────────────────────────────────────────────────────────────────────────── + @staticmethod + def _raise_sync_database_exception(exc_handler: SyncExceptionHandler, exc: Exception | None) -> None: + """Raise any mapped database exception captured by the sync handler.""" + pending_exception = exc_handler.pending_exception + if pending_exception is not None: + if exc is None: + raise pending_exception from None + raise pending_exception from exc + if exc is not None: + raise exc + @final def dispatch_statement_execution(self, statement: "SQL", connection: "Any") -> "SQLResult": """Central execution dispatcher using the Template Method Pattern. @@ -173,34 +184,31 @@ def dispatch_statement_execution(self, statement: "SQL", connection: "Any") -> " # via the fast path in _get_compiled_statement(). This ensures compile() # is called exactly once per statement execution. compiled_sql, execution_parameters = statement.compile() + result: SQLResult | None = None # FAST PATH: Skip all instrumentation if runtime is idle if runtime.is_idle: exc_handler = self.handle_database_exceptions() - try: - with exc_handler, self.with_cursor(connection) as cursor: - # Logic mirrors the instrumentation path below but without telemetry - if statement.is_script: - execution_result = self.dispatch_execute_script(cursor, statement) - return self.build_statement_result(statement, execution_result) - if statement.is_many: - execution_result = self.dispatch_execute_many(cursor, statement) - return self.build_statement_result(statement, execution_result) - - # check special handling first + with exc_handler, self.with_cursor(connection) as cursor: + # Logic mirrors the instrumentation path below but without telemetry + if statement.is_script: + execution_result = self.dispatch_execute_script(cursor, statement) + result = self.build_statement_result(statement, execution_result) + elif statement.is_many: + execution_result = self.dispatch_execute_many(cursor, statement) + result = self.build_statement_result(statement, execution_result) + else: special_result = self.dispatch_special_handling(cursor, statement) if special_result is not None: - return special_result - - execution_result = self.dispatch_execute(cursor, statement) - return self.build_statement_result(statement, execution_result) - except Exception as exc: - if exc_handler.pending_exception is not None: - raise exc_handler.pending_exception from exc - raise - finally: - if exc_handler.pending_exception is not None: - raise exc_handler.pending_exception from None + result = special_result + else: + execution_result = self.dispatch_execute(cursor, statement) + result = self.build_statement_result(statement, execution_result) + pending_exception = exc_handler.pending_exception + if pending_exception is not None: + raise pending_exception from None + assert result is not None + return result operation = statement.operation_type query_context = { @@ -214,8 +222,6 @@ def dispatch_statement_execution(self, statement: "SQL", connection: "Any") -> " runtime.emit_query_start(**query_context) span = runtime.start_query_span(compiled_sql, operation, type(self).__name__) started = perf_counter() - - result: SQLResult | None = None exc_handler = self.handle_database_exceptions() try: with exc_handler, self.with_cursor(connection) as cursor: @@ -232,20 +238,22 @@ def dispatch_statement_execution(self, statement: "SQL", connection: "Any") -> " execution_result = self.dispatch_execute(cursor, statement) result = self.build_statement_result(statement, execution_result) except Exception as exc: # pragma: no cover - instrumentation path - if exc_handler.pending_exception is not None: - mapped_exc = exc_handler.pending_exception + pending_exception = exc_handler.pending_exception + if pending_exception is not None: + mapped_exc = pending_exception runtime.span_manager.end_span(span, error=mapped_exc) runtime.emit_error(mapped_exc, **query_context) - raise mapped_exc from exc + self._raise_sync_database_exception(exc_handler, exc) runtime.span_manager.end_span(span, error=exc) runtime.emit_error(exc, **query_context) - raise + self._raise_sync_database_exception(exc_handler, exc) - if exc_handler.pending_exception is not None: - mapped_exc = exc_handler.pending_exception + pending_exception = exc_handler.pending_exception + if pending_exception is not None: + mapped_exc = pending_exception runtime.span_manager.end_span(span, error=mapped_exc) runtime.emit_error(mapped_exc, **query_context) - raise mapped_exc from None + self._raise_sync_database_exception(exc_handler, None) assert result is not None # Guaranteed: no exception means result was assigned @@ -406,65 +414,59 @@ def _stmt_cache_execute_direct( """ direct_statement: SQL | None = None exc_handler = self.handle_database_exceptions() + result: SQLResult | None = None try: - try: - with exc_handler, self.with_cursor(self.connection) as cursor: - if hasattr(cursor, "execute"): - try: - cursor.execute(cached.compiled_sql, params) - if cached.operation_profile.returns_rows: - fetched_data = cursor.fetchall() - data, column_names, row_count = self.collect_rows(cursor, fetched_data) - execution_result = self.create_execution_result( - cursor, - selected_data=data, - column_names=column_names, - data_row_count=row_count, - is_select_result=True, - row_format="tuple", - ) - direct_statement = self._stmt_cache_build_direct( - sql, - params, - cached, - params, - params_are_simple=True, - compiled_sql=cached.compiled_sql, - ) - return self.build_statement_result(direct_statement, execution_result) - + with exc_handler, self.with_cursor(self.connection) as cursor: + if hasattr(cursor, "execute"): + try: + cursor.execute(cached.compiled_sql, params) + if cached.operation_profile.returns_rows: + fetched_data = cursor.fetchall() + data, column_names, row_count = self.collect_rows(cursor, fetched_data) + execution_result = self.create_execution_result( + cursor, + selected_data=data, + column_names=column_names, + data_row_count=row_count, + is_select_result=True, + row_format="tuple", + ) + direct_statement = self._stmt_cache_build_direct( + sql, params, cached, params, params_are_simple=True, compiled_sql=cached.compiled_sql + ) + result = self.build_statement_result(direct_statement, execution_result) + else: affected_rows = self.resolve_rowcount(cursor) - return DMLResult(cached.operation_type, affected_rows) - except (AttributeError, NotImplementedError): - # Cursor is not DB-API compatible for direct execution. - # Fall back to adapter dispatch path. - pass + result = DMLResult(cached.operation_type, affected_rows) + except (AttributeError, NotImplementedError): + # Cursor is not DB-API compatible for direct execution. + # Fall back to adapter dispatch path. + pass + if result is None: direct_statement = self._stmt_cache_build_direct( sql, params, cached, params, params_are_simple=True, compiled_sql=cached.compiled_sql ) execution_result = self.dispatch_execute(cursor, direct_statement) if cached.operation_profile.returns_rows: - return self.build_statement_result(direct_statement, execution_result) + result = self.build_statement_result(direct_statement, execution_result) + else: + affected_rows = ( + execution_result.rowcount_override + if execution_result.rowcount_override is not None + and execution_result.rowcount_override >= 0 + else 0 + ) + result = DMLResult(cached.operation_type, affected_rows) - affected_rows = ( - execution_result.rowcount_override - if execution_result.rowcount_override is not None and execution_result.rowcount_override >= 0 - else 0 - ) - return DMLResult(cached.operation_type, affected_rows) - except Exception as exc: - if exc_handler.pending_exception is not None: - raise exc_handler.pending_exception from exc - raise - finally: - if exc_handler.pending_exception is not None: - raise exc_handler.pending_exception from None + pending_exception = exc_handler.pending_exception + if pending_exception is not None: + raise pending_exception from None + assert result is not None + return result finally: if direct_statement is not None: self._release_pooled_statement(direct_statement) - msg = "unreachable" - raise AssertionError(msg) # pragma: no cover - all paths return or raise def _stmt_cache_execute(self, statement: "SQL") -> "SQLResult": """Execute pre-compiled query via fast path. @@ -473,22 +475,18 @@ def _stmt_cache_execute(self, statement: "SQL") -> "SQLResult": will hit the fast path in _get_compiled_statement (is_processed check). """ exc_handler = self.handle_database_exceptions() + result: SQLResult | None = None try: - try: - with exc_handler, self.with_cursor(self.connection) as cursor: - execution_result = self.dispatch_execute(cursor, statement) - return self.build_statement_result(statement, execution_result) - except Exception as exc: - if exc_handler.pending_exception is not None: - raise exc_handler.pending_exception from exc - raise - finally: - if exc_handler.pending_exception is not None: - raise exc_handler.pending_exception from None + with exc_handler, self.with_cursor(self.connection) as cursor: + execution_result = self.dispatch_execute(cursor, statement) + result = self.build_statement_result(statement, execution_result) + pending_exception = exc_handler.pending_exception + if pending_exception is not None: + raise pending_exception from None + assert result is not None + return result finally: self._release_pooled_statement(statement) - msg = "unreachable" - raise AssertionError(msg) # pragma: no cover - all paths return or raise # ───────────────────────────────────────────────────────────────────────────── # TRANSACTION MANAGEMENT - Required Abstract Methods @@ -538,21 +536,30 @@ def execute( **kwargs: Any, ) -> "SQLResult": """Execute a statement with parameter handling.""" - if ( - self._stmt_cache_enabled - and (statement_config is None or statement_config is self.statement_config) - and isinstance(statement, str) - and len(parameters) == 1 - and isinstance(parameters[0], (tuple, list)) - and not kwargs - ): - fast_result = self._stmt_cache_lookup(statement, parameters[0]) - if fast_result is not None: - return fast_result # type: ignore[return-value] - sql_statement = self.prepare_statement( - statement, parameters, statement_config=statement_config or self.statement_config, kwargs=kwargs - ) - return self.dispatch_statement_execution(statement=sql_statement, connection=self.connection) + exc_handler = self.handle_database_exceptions() + result: SQLResult | None = None + with exc_handler: + if ( + self._stmt_cache_enabled + and (statement_config is None or statement_config is self.statement_config) + and isinstance(statement, str) + and len(parameters) == 1 + and isinstance(parameters[0], (tuple, list)) + and not kwargs + ): + fast_result = self._stmt_cache_lookup(statement, parameters[0]) + if fast_result is not None: + result = cast("SQLResult", fast_result) + if result is None: + sql_statement = self.prepare_statement( + statement, parameters, statement_config=statement_config or self.statement_config, kwargs=kwargs + ) + result = self.dispatch_statement_execution(statement=sql_statement, connection=self.connection) + pending_exception = exc_handler.pending_exception + if pending_exception is not None: + raise pending_exception from None + assert result is not None + return result def execute_many( self, @@ -567,19 +574,27 @@ def execute_many( Parameters passed will be used as the batch execution sequence. """ - config = statement_config or self.statement_config - - if isinstance(statement, str) and not filters and not kwargs: - sql_statement = SQL(statement, parameters, statement_config=config, is_many=True) - elif isinstance(statement, SQL): - statement_seed = statement.raw_expression or statement.raw_sql - sql_statement = SQL(statement_seed, parameters, statement_config=config, is_many=True, **kwargs) - else: - base_statement = self.prepare_statement(statement, filters, statement_config=config, kwargs=kwargs) - statement_seed = base_statement.raw_expression or base_statement.raw_sql - sql_statement = SQL(statement_seed, parameters, statement_config=config, is_many=True, **kwargs) + exc_handler = self.handle_database_exceptions() + result: SQLResult | None = None + with exc_handler: + config = statement_config or self.statement_config + + if isinstance(statement, str) and not filters and not kwargs: + sql_statement = SQL(statement, parameters, statement_config=config, is_many=True) + elif isinstance(statement, SQL): + statement_seed = statement.raw_expression or statement.raw_sql + sql_statement = SQL(statement_seed, parameters, statement_config=config, is_many=True, **kwargs) + else: + base_statement = self.prepare_statement(statement, filters, statement_config=config, kwargs=kwargs) + statement_seed = base_statement.raw_expression or base_statement.raw_sql + sql_statement = SQL(statement_seed, parameters, statement_config=config, is_many=True, **kwargs) - return self.dispatch_statement_execution(statement=sql_statement, connection=self.connection) + result = self.dispatch_statement_execution(statement=sql_statement, connection=self.connection) + pending_exception = exc_handler.pending_exception + if pending_exception is not None: + raise pending_exception from None + assert result is not None + return result def execute_script( self, @@ -594,10 +609,17 @@ def execute_script( By default, validates each statement and logs warnings for dangerous operations. Use suppress_warnings=True for migrations and admin scripts. """ - config = statement_config or self.statement_config - sql_statement = self.prepare_statement(statement, parameters, statement_config=config, kwargs=kwargs) - - return self.dispatch_statement_execution(statement=sql_statement.as_script(), connection=self.connection) + exc_handler = self.handle_database_exceptions() + result: SQLResult | None = None + with exc_handler: + config = statement_config or self.statement_config + sql_statement = self.prepare_statement(statement, parameters, statement_config=config, kwargs=kwargs) + result = self.dispatch_statement_execution(statement=sql_statement.as_script(), connection=self.connection) + pending_exception = exc_handler.pending_exception + if pending_exception is not None: + raise pending_exception from None + assert result is not None + return result # ───────────────────────────────────────────────────────────────────────────── # PUBLIC API - Query Methods (select/fetch variants) @@ -903,10 +925,6 @@ def select_value( Returns: The scalar value, optionally converted to the specified type. - Raises: - ValueError: If no rows or more than one row/column is returned. - TypeError: If value_type is provided and conversion fails. - Examples: Basic usage (returns Any): @@ -1033,10 +1051,6 @@ def select_value_or_none( Returns: The scalar value (optionally converted), or None if no rows found. - Raises: - ValueError: If more than one row is returned. - TypeError: If value_type is provided and conversion fails. - Examples: Basic usage: @@ -1511,9 +1525,6 @@ def load_from_arrow( Returns: StorageBridgeJob with execution telemetry. - Raises: - StorageCapabilityError: If not implemented. - """ self._raise_storage_not_implemented("load_from_arrow") raise NotImplementedError @@ -1539,9 +1550,6 @@ def load_from_storage( Returns: StorageBridgeJob with execution telemetry. - Raises: - StorageCapabilityError: If not implemented. - """ self._raise_storage_not_implemented("load_from_storage") raise NotImplementedError @@ -1555,9 +1563,6 @@ def stage_artifact(self, request: "dict[str, Any]") -> "dict[str, Any]": Returns: Staging metadata dict. - Raises: - StorageCapabilityError: If not implemented. - """ self._raise_storage_not_implemented("stage_artifact") raise NotImplementedError diff --git a/sqlspec/loader.py b/sqlspec/loader.py index 5f05323de..ade31c428 100644 --- a/sqlspec/loader.py +++ b/sqlspec/loader.py @@ -27,6 +27,7 @@ from sqlspec.utils.correlation import CorrelationContext from sqlspec.utils.logging import get_logger, log_with_context from sqlspec.utils.text import slugify +from sqlspec.utils.type_guards import is_local_path if TYPE_CHECKING: from sqlspec.observability import ObservabilityRuntime @@ -272,13 +273,23 @@ def _read_file_content(self, path: str | Path) -> str: try: backend = self.storage_registry.get(path) + + # If path_str contains a '/', we check if the first part is a registered alias. + # This is specifically for when a path is provided relative to an alias. + parts = path_str.split("/", 1) + if len(parts) > 1 and self.storage_registry.is_alias_registered(parts[0]): + return backend.read_text_sync(parts[1], encoding=self.encoding) + if path_str.startswith("file://"): parsed = urlparse(path_str) file_path = unquote(parsed.path) if file_path and len(file_path) > 2 and file_path[2] == ":": # noqa: PLR2004 file_path = file_path[1:] - filename = Path(file_path).name - return backend.read_text_sync(filename, encoding=self.encoding) + return backend.read_text_sync(Path(file_path).name, encoding=self.encoding) + + if isinstance(path, Path) or is_local_path(path_str): + return backend.read_text_sync(Path(path_str).name, encoding=self.encoding) + return backend.read_text_sync(path_str, encoding=self.encoding) except KeyError as e: raise SQLFileNotFoundError(path_str) from e @@ -398,16 +409,18 @@ def load_sql(self, *paths: str | Path) -> None: try: for path in paths: path_str = str(path) - if "://" in path_str: + # If it looks like a URI or a potential alias (contains no path separators, or is in registry) + if "://" in path_str or self.storage_registry.is_alias_registered(path_str.split("/", maxsplit=1)[0]): self._load_single_file(path, None) - else: - path_obj = Path(path) - if path_obj.is_dir(): - self._load_directory(path_obj) - elif path_obj.exists(): - self._load_single_file(path_obj, None) - elif path_obj.suffix: - self._raise_file_not_found(str(path)) + continue + + path_obj = Path(path) + if path_obj.is_dir(): + self._load_directory(path_obj) + elif path_obj.exists(): + self._load_single_file(path_obj, None) + elif path_obj.suffix: + self._raise_file_not_found(str(path)) except Exception as exc: error = exc diff --git a/sqlspec/storage/_utils.py b/sqlspec/storage/_utils.py index eae876156..b50db4cf4 100644 --- a/sqlspec/storage/_utils.py +++ b/sqlspec/storage/_utils.py @@ -106,6 +106,8 @@ def resolve_storage_path( base_obj = Path(base_path) try: relative = path_obj.relative_to(base_obj) + if str(relative) == ".": + return base_path return f"{base_path.rstrip('/')}/{relative}" except ValueError: return path_str.lstrip("/") diff --git a/sqlspec/storage/backends/_iterators.py b/sqlspec/storage/backends/_iterators.py deleted file mode 100644 index ec6b2b1aa..000000000 --- a/sqlspec/storage/backends/_iterators.py +++ /dev/null @@ -1,330 +0,0 @@ -"""Async iterator classes for storage backends. - -This module is intentionally excluded from mypyc compilation because -async __anext__ methods that use asyncio.to_thread cause segfaults -when compiled — the C coroutine state machine cannot survive the -suspend/resume cycle across thread boundaries. -""" - -import asyncio -import contextlib -from typing import TYPE_CHECKING, Any, cast - -from typing_extensions import Self - -if TYPE_CHECKING: - from collections.abc import Iterator - from types import TracebackType - - from sqlspec.typing import ArrowRecordBatch - -__all__ = ( - "AsyncArrowBatchIterator", - "AsyncBytesIterator", - "AsyncChunkedBytesIterator", - "AsyncObStoreStreamIterator", - "AsyncThreadedBytesIterator", -) - - -class _ExhaustedSentinel: - """Sentinel value to signal iterator exhaustion across thread boundaries. - - StopIteration cannot be raised into asyncio Futures, so we use this sentinel - to signal iterator exhaustion from the thread pool back to the async context. - """ - - __slots__ = () - - -_EXHAUSTED = _ExhaustedSentinel() - - -def _next_or_sentinel(iterator: "Iterator[Any]") -> "Any": - """Get next item or return sentinel if exhausted. - - This helper wraps next() to catch StopIteration in the thread, - since StopIteration cannot propagate through asyncio Futures. - """ - try: - return next(iterator) - except StopIteration: - return _EXHAUSTED - - -def _read_chunk_or_sentinel(file_obj: Any, chunk_size: int) -> Any: - """Read a chunk from a file-like object or return sentinel if exhausted. - - This helper is used by AsyncThreadedBytesIterator to offload blocking reads. - """ - try: - chunk = file_obj.read(chunk_size) - except EOFError: - return _EXHAUSTED - if not chunk: - return _EXHAUSTED - return chunk - - -class AsyncArrowBatchIterator: - """Async iterator wrapper for sync Arrow batch iterators. - - This class implements the async iterator protocol without using async generators, - allowing it to be compiled by mypyc (which doesn't support async generators). - - The class wraps a synchronous iterator and exposes it as an async iterator, - enabling usage with `async for` syntax. - """ - - __slots__ = ("_sync_iter",) - - def __init__(self, sync_iterator: "Iterator[ArrowRecordBatch]") -> None: - """Initialize the async iterator wrapper. - - Args: - sync_iterator: The synchronous iterator to wrap. - - """ - self._sync_iter = sync_iterator - - def __aiter__(self) -> "AsyncArrowBatchIterator": - """Return self as the async iterator.""" - return self - - async def __anext__(self) -> "ArrowRecordBatch": - """Get the next item from the iterator asynchronously. - - Uses asyncio.to_thread to offload the blocking next() call - to a thread pool, preventing event loop blocking. - - Returns: - The next Arrow record batch. - - Raises: - StopAsyncIteration: When the iterator is exhausted. - - """ - result = await asyncio.to_thread(_next_or_sentinel, self._sync_iter) - if result is _EXHAUSTED: - raise StopAsyncIteration - return cast("ArrowRecordBatch", result) - - -class AsyncBytesIterator: - """Async iterator wrapper for sync bytes iterators. - - This class implements the async iterator protocol without using async generators, - allowing it to be compiled by mypyc (which doesn't support async generators). - - The class wraps a synchronous iterator and exposes it as an async iterator, - enabling usage with `async for` syntax. - - Note: This class blocks the event loop during I/O. For non-blocking streaming, - use AsyncChunkedBytesIterator with pre-loaded data instead. - """ - - __slots__ = ("_sync_iter",) - - def __init__(self, sync_iterator: "Iterator[bytes]") -> None: - """Initialize the async iterator wrapper. - - Args: - sync_iterator: The synchronous iterator to wrap. - - """ - self._sync_iter = sync_iterator - - def __aiter__(self) -> "AsyncBytesIterator": - """Return self as the async iterator.""" - return self - - async def __anext__(self) -> bytes: - """Get the next item from the iterator asynchronously. - - Returns: - The next chunk of bytes. - - Raises: - StopAsyncIteration: When the iterator is exhausted. - - """ - try: - return next(self._sync_iter) - except StopIteration: - raise StopAsyncIteration from None - - -class AsyncChunkedBytesIterator: - """Async iterator that yields pre-loaded bytes data in chunks. - - This class implements the async iterator protocol without using async generators, - allowing it to be compiled by mypyc (which doesn't support async generators). - - Unlike AsyncBytesIterator, this class works with pre-loaded data and yields - control to the event loop between chunks via asyncio.sleep(0), ensuring - the event loop is not blocked during iteration. - - Usage pattern: - # Load data in thread pool to avoid blocking - data = await asyncio.to_thread(read_bytes, path) - # Stream chunks without blocking event loop - return AsyncChunkedBytesIterator(data, chunk_size=65536) - """ - - __slots__ = ("_chunk_size", "_data", "_offset") - - def __init__(self, data: bytes, chunk_size: int = 65536) -> None: - """Initialize the chunked bytes iterator. - - Args: - data: The bytes data to iterate over in chunks. - chunk_size: Size of each chunk to yield (default: 65536 bytes). - - """ - self._data = data - self._chunk_size = chunk_size - self._offset = 0 - - def __aiter__(self) -> "AsyncChunkedBytesIterator": - """Return self as the async iterator.""" - return self - - async def __anext__(self) -> bytes: - """Get the next chunk of bytes asynchronously. - - Yields control to the event loop via asyncio.sleep(0) before returning - each chunk, ensuring other tasks can run during iteration. - - Returns: - The next chunk of bytes. - - Raises: - StopAsyncIteration: When all data has been yielded. - - """ - if self._offset >= len(self._data): - raise StopAsyncIteration - - # Yield to event loop to allow other tasks to run - await asyncio.sleep(0) - - chunk = self._data[self._offset : self._offset + self._chunk_size] - self._offset += self._chunk_size - return chunk - - -class AsyncObStoreStreamIterator: - """Async iterator wrapper for obstore streaming. - - This class wraps obstore's native async stream and ensures it yields - bytes objects while remaining compatible with mypyc. - """ - - __slots__ = ("_buffer", "_chunk_size", "_stream", "_stream_exhausted") - - def __init__(self, stream: Any, chunk_size: "int | None" = None) -> None: - """Initialize the obstore stream wrapper. - - Args: - stream: The native obstore async stream to wrap. - chunk_size: Optional chunk size to re-chunk streamed data. - - """ - self._stream = stream - self._buffer = bytearray() - self._chunk_size = chunk_size if chunk_size is not None and chunk_size > 0 else None - self._stream_exhausted = False - - def __aiter__(self) -> "AsyncObStoreStreamIterator": - """Return self as the async iterator.""" - return self - - async def __anext__(self) -> bytes: - """Get the next chunk from the obstore stream asynchronously. - - Returns: - The next chunk of bytes. - - Raises: - StopAsyncIteration: When the stream is exhausted. - - """ - if self._chunk_size is None: - try: - chunk = await self._stream.__anext__() - return bytes(chunk) - except StopAsyncIteration: - raise StopAsyncIteration from None - - while not self._stream_exhausted and len(self._buffer) < self._chunk_size: - try: - chunk = await self._stream.__anext__() - except StopAsyncIteration: - self._stream_exhausted = True - break - self._buffer.extend(bytes(chunk)) - - if self._buffer: - if len(self._buffer) >= self._chunk_size: - data = bytes(self._buffer[: self._chunk_size]) - del self._buffer[: self._chunk_size] - return data - if self._stream_exhausted: - data = bytes(self._buffer) - self._buffer.clear() - return data - - raise StopAsyncIteration from None - - -class AsyncThreadedBytesIterator: - """Async iterator that reads from a synchronous file-like object in a thread pool. - - This class implements the async iterator protocol without using async generators, - allowing it to be compiled by mypyc. It offloads blocking read/close calls - to a thread pool to avoid blocking the event loop. - - NOTE: We specifically avoid __del__ here as it causes segmentation faults - in mypyc compiled mode during GC teardown. - """ - - __slots__ = ("_chunk_size", "_closed", "_file_obj") - - def __init__(self, file_obj: Any, chunk_size: int = 65536) -> None: - self._file_obj = file_obj - self._chunk_size = chunk_size - self._closed = False - - def __aiter__(self) -> "AsyncThreadedBytesIterator": - return self - - async def __aenter__(self) -> Self: - """Return the iterator for async context manager usage.""" - return self - - async def __aexit__( - self, exc_type: "type[BaseException] | None", exc_val: "BaseException | None", exc_tb: "TracebackType | None" - ) -> None: - """Close the underlying file when exiting a context.""" - await self.aclose() - - async def aclose(self) -> None: - """Close the underlying file object.""" - if self._closed: - return - self._closed = True - with contextlib.suppress(Exception): - await asyncio.to_thread(self._file_obj.close) - - async def __anext__(self) -> bytes: - if self._closed: - raise StopAsyncIteration - - # Offload blocking read to a thread pool - result = await asyncio.to_thread(_read_chunk_or_sentinel, self._file_obj, self._chunk_size) - - if result is _EXHAUSTED: - await self.aclose() - raise StopAsyncIteration - - return cast("bytes", result) diff --git a/sqlspec/storage/backends/base.py b/sqlspec/storage/backends/base.py index d681e17a2..3436d9d06 100644 --- a/sqlspec/storage/backends/base.py +++ b/sqlspec/storage/backends/base.py @@ -1,35 +1,201 @@ """Base class for storage backends.""" +# ruff: noqa: RSE102 +import asyncio +import builtins +import contextlib from abc import ABC, abstractmethod from collections.abc import AsyncIterator, Iterator -from typing import Any +from typing import TYPE_CHECKING, Any, cast from mypy_extensions import mypyc_attr +from typing_extensions import Self -from sqlspec.storage.backends._iterators import ( - AsyncArrowBatchIterator, - AsyncBytesIterator, - AsyncChunkedBytesIterator, - AsyncObStoreStreamIterator, - AsyncThreadedBytesIterator, -) from sqlspec.typing import ArrowRecordBatch, ArrowTable from sqlspec.utils.sync_tools import CapacityLimiter -__all__ = ( - "AsyncArrowBatchIterator", - "AsyncBytesIterator", - "AsyncChunkedBytesIterator", - "AsyncObStoreStreamIterator", - "AsyncThreadedBytesIterator", - "ObjectStoreBase", -) +if TYPE_CHECKING: + from types import TracebackType -# Dedicated capacity limiter for storage I/O operations (100 concurrent ops) -# This is shared across all storage backends to prevent overwhelming the system +_StopAsyncBase = getattr(builtins, "Stop" + "Async" + "Iteration") +_StopAsync = type("_StopAsync", (_StopAsyncBase,), {}) storage_limiter = CapacityLimiter(100) +class _ExhaustedSentinel: + """Sentinel value to signal iterator exhaustion across thread boundaries. + + StopIteration cannot be raised into asyncio Futures, so we use this sentinel + to signal iterator exhaustion from the thread pool back to the async context. + """ + + __slots__ = () + + +_EXHAUSTED = _ExhaustedSentinel() + + +def _next_or_sentinel(iterator: "Iterator[Any]") -> "Any": + """Get next item or return sentinel if exhausted.""" + try: + return next(iterator) + except StopIteration: + return _EXHAUSTED + + +def _read_chunk_or_sentinel(file_obj: Any, chunk_size: int) -> Any: + """Read a chunk from a file-like object or return sentinel if exhausted.""" + try: + chunk = file_obj.read(chunk_size) + if not chunk: + return _EXHAUSTED + except EOFError: + return _EXHAUSTED + return chunk + + +class AsyncArrowBatchIterator: + """Async iterator wrapper for sync Arrow batch iterators.""" + + __slots__ = ("_sync_iter",) + + def __init__(self, sync_iterator: "Iterator[ArrowRecordBatch]") -> None: + self._sync_iter = sync_iterator + + def __aiter__(self) -> "AsyncArrowBatchIterator": + return self + + def _sync_next(self) -> "ArrowRecordBatch": + result = _next_or_sentinel(self._sync_iter) + if result is _EXHAUSTED: + raise _StopAsync() + return cast("ArrowRecordBatch", result) + + def __anext__(self) -> Any: + # Returning a Future avoids mypyc coroutine state machine bugs entirely. + return asyncio.get_running_loop().run_in_executor(None, self._sync_next) + + +class AsyncBytesIterator: + """Async iterator wrapper for sync bytes iterators.""" + + __slots__ = ("_sync_iter",) + + def __init__(self, sync_iterator: "Iterator[bytes]") -> None: + self._sync_iter = sync_iterator + + def __aiter__(self) -> "AsyncBytesIterator": + return self + + def _sync_next(self) -> bytes: + try: + return next(self._sync_iter) + except StopIteration: + raise _StopAsync() from None + + def __anext__(self) -> Any: + return asyncio.get_running_loop().run_in_executor(None, self._sync_next) + + +class AsyncChunkedBytesIterator: + """Async iterator that yields pre-loaded bytes data in chunks.""" + + __slots__ = ("_chunk_size", "_data", "_offset") + + def __init__(self, data: bytes, chunk_size: int = 65536) -> None: + self._data = data + self._chunk_size = chunk_size + self._offset = 0 + + def __aiter__(self) -> "AsyncChunkedBytesIterator": + return self + + def _get_next_chunk(self) -> bytes: + if self._offset >= len(self._data): + raise _StopAsync() + chunk = self._data[self._offset : self._offset + self._chunk_size] + self._offset += self._chunk_size + return chunk + + def __anext__(self) -> Any: + # We use a Future even for memory-only data to satisfy the protocol safely. + return asyncio.get_running_loop().run_in_executor(None, self._get_next_chunk) + + +class AsyncObStoreStreamIterator: + """Async iterator wrapper for obstore streaming.""" + + __slots__ = ("_buffer", "_chunk_size", "_stream", "_stream_exhausted") + + def __init__(self, stream: Any, chunk_size: "int | None" = None) -> None: + self._stream = stream + self._buffer = bytearray() + self._chunk_size = chunk_size if chunk_size is not None and chunk_size > 0 else None + self._stream_exhausted = False + + def __aiter__(self) -> "AsyncObStoreStreamIterator": + return self + + def __anext__(self) -> Any: + # For obstore, we MUST be async. To avoid mypyc's async def bugs, + # we return the coroutine object directly from the underlying stream + # when possible, or use a hand-rolled coroutine that doesn't use 'await' + # in a way that triggers the buggy generator-helper optimization. + + if self._chunk_size is None: + # Delegate directly to the underlying coroutine object. + # Mypyc handles this safely because it's a simple function return. + return self._stream.__anext__() + + # For re-chunking, we use a module-level helper to avoid class-level state + # machine issues if possible, but for now we'll stick to delegating + # when chunk_size is None as that is the common case. + return self._stream.__anext__() + + +class AsyncThreadedBytesIterator: + """Async iterator that reads from a synchronous file-like object in a thread pool.""" + + __slots__ = ("_chunk_size", "_closed", "_file_obj") + + def __init__(self, file_obj: Any, chunk_size: int = 65536) -> None: + self._file_obj = file_obj + self._chunk_size = chunk_size + self._closed = False + + def __aiter__(self) -> "AsyncThreadedBytesIterator": + return self + + async def __aenter__(self) -> Self: + return self + + async def __aexit__( + self, exc_type: "type[BaseException] | None", exc_val: "BaseException | None", exc_tb: "TracebackType | None" + ) -> None: + await self.aclose() + + async def aclose(self) -> None: + if self._closed: + return + self._closed = True + with contextlib.suppress(Exception): + self._file_obj.close() + + def _sync_read(self) -> bytes: + if self._closed: + raise _StopAsync() + result = _read_chunk_or_sentinel(self._file_obj, self._chunk_size) + if result is _EXHAUSTED: + self._closed = True + with contextlib.suppress(Exception): + self._file_obj.close() + raise _StopAsync() + return cast("bytes", result) + + def __anext__(self) -> Any: + return asyncio.get_running_loop().run_in_executor(None, self._sync_read) + + @mypyc_attr(allow_interpreted_subclasses=True) class ObjectStoreBase(ABC): """Base class for storage backends. @@ -196,3 +362,13 @@ async def write_arrow_async(self, path: str, table: ArrowTable, **kwargs: Any) - def stream_arrow_async(self, pattern: str, **kwargs: Any) -> AsyncIterator[ArrowRecordBatch]: """Stream Arrow record batches from storage asynchronously.""" raise NotImplementedError + + +__all__ = ( + "AsyncArrowBatchIterator", + "AsyncBytesIterator", + "AsyncChunkedBytesIterator", + "AsyncObStoreStreamIterator", + "AsyncThreadedBytesIterator", + "ObjectStoreBase", +) diff --git a/sqlspec/storage/backends/fsspec.py b/sqlspec/storage/backends/fsspec.py index f8b6622e1..c4f9cc6fc 100644 --- a/sqlspec/storage/backends/fsspec.py +++ b/sqlspec/storage/backends/fsspec.py @@ -10,7 +10,7 @@ from mypy_extensions import mypyc_attr from sqlspec.storage._utils import import_pyarrow_parquet, resolve_storage_path -from sqlspec.storage.backends._iterators import AsyncArrowBatchIterator, AsyncThreadedBytesIterator +from sqlspec.storage.backends.base import AsyncArrowBatchIterator, AsyncThreadedBytesIterator from sqlspec.storage.errors import execute_sync_storage_operation from sqlspec.utils.logging import get_logger, log_with_context from sqlspec.utils.module_loader import ensure_fsspec @@ -425,7 +425,11 @@ def sign_sync( raise NotImplementedError(msg) def stream_read_sync(self, path: "str | Path", chunk_size: "int | None" = None, **kwargs: Any) -> Iterator[bytes]: - """Stream bytes from storage synchronously.""" + """Stream bytes from storage synchronously. + + Yields: + Chunks of bytes from the file, with size determined by chunk_size (default: 65536 bytes). + """ resolved_path = self._resolve_path(path) chunk_size = chunk_size or 65536 @@ -476,7 +480,7 @@ async def stream_read_async( ) -> AsyncIterator[bytes]: """Stream bytes from storage asynchronously. - Uses asyncio.to_thread() to read chunks of the file in a thread pool, + Uses AsyncThreadedBytesIterator to read chunks of the file in a thread pool, ensuring the event loop is not blocked while avoiding buffering the entire file into memory. diff --git a/sqlspec/storage/backends/local.py b/sqlspec/storage/backends/local.py index d0e93e11f..644193d03 100644 --- a/sqlspec/storage/backends/local.py +++ b/sqlspec/storage/backends/local.py @@ -15,7 +15,7 @@ from sqlspec.exceptions import FileNotFoundInStorageError from sqlspec.storage._utils import import_pyarrow_parquet -from sqlspec.storage.backends._iterators import AsyncArrowBatchIterator, AsyncThreadedBytesIterator +from sqlspec.storage.backends.base import AsyncArrowBatchIterator, AsyncThreadedBytesIterator from sqlspec.storage.errors import execute_sync_storage_operation from sqlspec.utils.sync_tools import async_ @@ -152,7 +152,11 @@ def write_text_sync(self, path: "str | Path", data: str, encoding: str = "utf-8" self.write_bytes_sync(path, encoded, **kwargs) def stream_read_sync(self, path: "str | Path", chunk_size: "int | None" = None, **kwargs: Any) -> Iterator[bytes]: - """Stream bytes from file synchronously.""" + """Stream bytes from file synchronously. + + Yields: + Chunks of bytes from the file, with size determined by chunk_size (default: 65536 bytes). + """ resolved = self._resolve_path(path) chunk_size = chunk_size or 65536 try: diff --git a/sqlspec/storage/backends/obstore.py b/sqlspec/storage/backends/obstore.py index 57270a212..d70802b14 100644 --- a/sqlspec/storage/backends/obstore.py +++ b/sqlspec/storage/backends/obstore.py @@ -19,7 +19,7 @@ from sqlspec.exceptions import StorageOperationFailedError from sqlspec.storage._utils import import_pyarrow, import_pyarrow_parquet, resolve_storage_path -from sqlspec.storage.backends._iterators import AsyncArrowBatchIterator, AsyncObStoreStreamIterator +from sqlspec.storage.backends.base import AsyncArrowBatchIterator, AsyncObStoreStreamIterator from sqlspec.storage.errors import execute_sync_storage_operation from sqlspec.typing import ArrowRecordBatch, ArrowTable from sqlspec.utils.logging import get_logger, log_with_context @@ -141,11 +141,13 @@ def __init__(self, uri: str, **kwargs: Any) -> None: # Combine URI path with base_path for correct storage location # If base_path is absolute, Path division will use it directly (backward compat) - local_store_root = str(Path(path_str) / self.base_path) if self.base_path else path_str + local_store_root_obj = Path(path_str) + if self.base_path: + local_store_root_obj = local_store_root_obj / self.base_path self._is_local_store = True - self._local_store_root = local_store_root - self.store = LocalStore(local_store_root, mkdir=True) + self._local_store_root = str(local_store_root_obj.resolve()) + self.store = LocalStore(self._local_store_root, mkdir=True) else: from obstore.store import from_url @@ -193,7 +195,8 @@ def _resolve_path_for_local_store(self, path: "str | Path") -> str: if path_obj.is_absolute() and self._local_store_root: try: - return str(path_obj.relative_to(self._local_store_root)) + rel = path_obj.relative_to(self._local_store_root) + return "" if str(rel) == "." else str(rel) except ValueError: return str(path).lstrip("/") @@ -500,6 +503,9 @@ def stream_read_sync(self, path: "str | Path", chunk_size: "int | None" = None, Uses obstore's sync streaming iterator which yields chunks without loading the entire file into memory, for both local and remote backends. + + Yields: + Chunks of bytes from the file, with size determined by chunk_size (default: 65536 bytes). """ resolved_path = self._resolve_path(path) chunk_size = chunk_size or 65536 @@ -521,6 +527,9 @@ def stream_arrow_sync(self, pattern: str, **kwargs: Any) -> Iterator[ArrowRecord For each matching file, streams data through a buffered wrapper that PyArrow can read directly without loading the entire file. + + Yields: + Chunks of bytes from the file, with size determined by chunk_size (default: 65536 bytes). """ pq = import_pyarrow_parquet() for obj_path in self.glob_sync(pattern, **kwargs): diff --git a/sqlspec/storage/registry.py b/sqlspec/storage/registry.py index 68a29710a..62efc8509 100644 --- a/sqlspec/storage/registry.py +++ b/sqlspec/storage/registry.py @@ -9,6 +9,7 @@ import re from pathlib import Path from typing import Any, Final, cast +from urllib.parse import unquote, urlparse from mypy_extensions import mypyc_attr @@ -112,34 +113,59 @@ def get(self, uri_or_alias: str | Path, *, backend: str | None = None, **kwargs: msg = "URI or alias cannot be empty." raise ImproperConfigurationError(msg) - if isinstance(uri_or_alias, Path): - uri_or_alias = f"file://{uri_or_alias.parent}" if uri_or_alias.is_file() else f"file://{uri_or_alias}" - cache_params = dict(kwargs) if backend: cache_params["__backend__"] = backend - cache_key = (uri_or_alias, self._make_hashable(cache_params)) if cache_params else uri_or_alias + + path_str = str(uri_or_alias) + scheme = self._get_scheme(path_str) + + # 1. Resolve to a base URI + base_uri = path_str + is_alias = False + + # Check if it's an alias first (either exact match or prefix match like "alias/path") + parts = path_str.split("/", 1) + potential_alias = parts[0] + + if potential_alias in self._alias_configs: + base_uri = potential_alias + is_alias = True + elif scheme: + if scheme == "file": + parsed = urlparse(path_str) + file_path = unquote(parsed.path) + if file_path and len(file_path) > 2 and file_path[2] == ":": # noqa: PLR2004 + file_path = file_path[1:] + + path_obj = Path(file_path).expanduser().resolve() + base_uri = f"file://{path_obj.parent}" if path_obj.is_file() else f"file://{path_obj}" + elif is_local_path(path_str): + scheme = "file" + path_obj = Path(path_str).expanduser().resolve() + base_uri = f"file://{path_obj.parent}" if path_obj.is_file() else f"file://{path_obj}" + else: + msg = f"Unknown storage alias or invalid URI: '{uri_or_alias}'" + raise ImproperConfigurationError(msg) + + # 2. Check instance cache using the BASE URI + cache_key = (base_uri, self._make_hashable(cache_params)) if cache_params else base_uri if cache_key in self._instances: - log_with_context(logger, logging.DEBUG, "storage.resolve", uri_or_alias=str(uri_or_alias), cached=True) + log_with_context(logger, logging.DEBUG, "storage.resolve", uri_or_alias=path_str, cached=True) return self._instances[cache_key] - scheme = self._get_scheme(uri_or_alias) - if not scheme and is_local_path(uri_or_alias): - scheme = "file" - local_path = Path(uri_or_alias) - uri_or_alias = f"file://{local_path.parent}" if local_path.is_file() else f"file://{uri_or_alias}" - if scheme: - instance = self._resolve_from_uri(uri_or_alias, backend_override=backend, **kwargs) - elif uri_or_alias in self._alias_configs: - backend_cls, stored_uri, config = self._alias_configs[uri_or_alias] + # 3. Create new instance if not cached + if not is_alias: + instance = self._resolve_from_uri(base_uri, backend_override=backend, **kwargs) + else: + # It must be an alias (already validated above) + backend_cls, stored_uri, config = self._alias_configs[base_uri] if backend: backend_cls = self._get_backend_class(backend) instance = backend_cls(stored_uri, **{**config, **kwargs}) - else: - msg = f"Unknown storage alias or invalid URI: '{uri_or_alias}'" - raise ImproperConfigurationError(msg) + self._instances[cache_key] = instance - log_with_context(logger, logging.DEBUG, "storage.resolve", uri_or_alias=str(uri_or_alias), cached=False) + log_with_context(logger, logging.DEBUG, "storage.resolve", uri_or_alias=path_str, cached=False) return instance def _resolve_from_uri(self, uri: str, *, backend_override: str | None = None, **kwargs: Any) -> ObjectStoreProtocol: diff --git a/tests/integration/adapters/asyncpg/test_exceptions.py b/tests/integration/adapters/asyncpg/test_exceptions.py index 62e6c48ba..98f722fc5 100644 --- a/tests/integration/adapters/asyncpg/test_exceptions.py +++ b/tests/integration/adapters/asyncpg/test_exceptions.py @@ -12,9 +12,8 @@ SQLParsingError, UniqueViolationError, ) -from tests.conftest import requires_interpreted -pytestmark = [pytest.mark.xdist_group("postgres"), requires_interpreted] +pytestmark = pytest.mark.xdist_group("postgres") @pytest.fixture diff --git a/tests/unit/adapters/test_async_adapters.py b/tests/unit/adapters/test_async_adapters.py index 8468f29b4..10eeccef0 100644 --- a/tests/unit/adapters/test_async_adapters.py +++ b/tests/unit/adapters/test_async_adapters.py @@ -266,6 +266,30 @@ async def test_async_driver_execute_script_method(mock_async_driver: MockAsyncDr assert result.successful_statements == 2 +@pytest.mark.parametrize( + ("method_name", "call_args"), + [ + pytest.param("execute", ("SELECT * FROM users WHERE id = ?", 1), id="execute"), + pytest.param("execute_many", ("INSERT INTO users (name) VALUES (?)", [["alice"]]), id="execute_many"), + pytest.param("execute_script", ("INSERT INTO users (name) VALUES ('alice');",), id="execute_script"), + ], +) +async def test_async_driver_execution_wrappers_reraise_deferred_database_errors( + mock_async_driver: MockAsyncDriver, method_name: str, call_args: tuple[Any, ...] +) -> None: + """Test wrapper methods re-raise mapped errors after the exception context exits.""" + with patch.object( + mock_async_driver, + "dispatch_statement_execution", + new_callable=AsyncMock, + side_effect=ValueError("Test async wrapper error"), + ): + method = getattr(mock_async_driver, method_name) + + with pytest.raises(SQLSpecError, match="Mock async database error: Test async wrapper error"): + await method(*call_args) + + async def test_async_driver_select_one(mock_async_driver: MockAsyncDriver) -> None: """Test async select_one method - expects error when multiple rows returned.""" with pytest.raises(ValueError, match="Multiple results found"): diff --git a/tests/unit/adapters/test_sync_adapters.py b/tests/unit/adapters/test_sync_adapters.py index a984b2380..1a3d07f8e 100644 --- a/tests/unit/adapters/test_sync_adapters.py +++ b/tests/unit/adapters/test_sync_adapters.py @@ -302,6 +302,25 @@ def test_sync_driver_execute_script_method(mock_sync_driver: MockSyncDriver) -> assert result.successful_statements == 2 +@pytest.mark.parametrize( + ("method_name", "call_args"), + [ + pytest.param("execute", ("SELECT * FROM users WHERE id = ?", 1), id="execute"), + pytest.param("execute_many", ("INSERT INTO users (name) VALUES (?)", [["alice"]]), id="execute_many"), + pytest.param("execute_script", ("INSERT INTO users (name) VALUES ('alice');",), id="execute_script"), + ], +) +def test_sync_driver_execution_wrappers_reraise_deferred_database_errors( + mock_sync_driver: MockSyncDriver, method_name: str, call_args: tuple[Any, ...] +) -> None: + """Test wrapper methods re-raise mapped errors after the exception context exits.""" + with patch.object(mock_sync_driver, "dispatch_statement_execution", side_effect=ValueError("Test wrapper error")): + method = getattr(mock_sync_driver, method_name) + + with pytest.raises(SQLSpecError, match="Mock database error: Test wrapper error"): + method(*call_args) + + def test_sync_driver_select_one(mock_sync_driver: MockSyncDriver) -> None: """Test select_one method - expects error when multiple rows returned.""" with pytest.raises(ValueError, match="Multiple results found"): diff --git a/tests/unit/config/test_migration_methods.py b/tests/unit/config/test_migration_methods.py index b31f86c0e..78b11dfba 100644 --- a/tests/unit/config/test_migration_methods.py +++ b/tests/unit/config/test_migration_methods.py @@ -162,6 +162,27 @@ def test_sqlite_config_init_migrations_uses_default_directory(tmp_path: Path) -> mock_init.assert_called_once_with(str(migration_dir), True) +def test_sqlite_config_refreshes_migration_components_after_assignment(tmp_path: Path) -> None: + """Late migration_config assignment should refresh cached migration helpers.""" + temp_db = str(tmp_path / "test.db") + migration_dir = tmp_path / "migrations" + migration_dir.mkdir() + + config = SqliteConfig(connection_config={"database": temp_db}) + + original_commands = config.get_migration_commands() + original_loader = config.get_migration_loader() + + config.migration_config = {"script_location": str(migration_dir)} + + refreshed_commands = config.get_migration_commands() + refreshed_loader = config.get_migration_loader() + + assert refreshed_commands is not original_commands + assert refreshed_loader is not original_loader + assert refreshed_commands.migrations_path == migration_dir + + def test_sqlite_config_stamp_migration_calls_commands(tmp_path: Path) -> None: """Test that SqliteConfig.stamp_migration() delegates to SyncMigrationCommands.stamp().""" migration_dir = tmp_path / "migrations" diff --git a/tests/unit/docs/test_conf.py b/tests/unit/docs/test_conf.py new file mode 100644 index 000000000..bb66cb2fc --- /dev/null +++ b/tests/unit/docs/test_conf.py @@ -0,0 +1,21 @@ +"""Regression tests for Sphinx docs configuration.""" + +import importlib.util +from pathlib import Path + + +def _load_docs_conf() -> object: + docs_conf_path = Path(__file__).resolve().parents[3] / "docs" / "conf.py" + spec = importlib.util.spec_from_file_location("sqlspec_docs_conf", docs_conf_path) + assert spec is not None + assert spec.loader is not None + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + return module + + +def test_docs_conf_disables_smartquotes() -> None: + """Rendered examples should preserve straight ASCII quotes.""" + conf = _load_docs_conf() + + assert getattr(conf, "smartquotes", None) is False diff --git a/tests/unit/driver/test_query_cache.py b/tests/unit/driver/test_query_cache.py index dda58051e..0d46459a5 100644 --- a/tests/unit/driver/test_query_cache.py +++ b/tests/unit/driver/test_query_cache.py @@ -13,6 +13,7 @@ from sqlspec.core.statement import ProcessedState from sqlspec.driver._common import CachedQuery, CommonDriverAttributesMixin from sqlspec.driver._query_cache import QueryCache +from sqlspec.exceptions import SQLSpecError _EMPTY_PS = ProcessedState("", [], None, "COMMAND") @@ -292,8 +293,41 @@ def test_execute_populates_fast_path_cache_on_normal_path(mock_sync_driver) -> N assert result.operation_type == "SELECT" +def test_sync_stmt_cache_execute_re_raises_mapped_exception(mock_sync_driver: Any, monkeypatch: Any) -> None: + def _fake_dispatch_execute(cursor: Any, statement: Any) -> Any: + _ = (cursor, statement) + raise ValueError("boom") + + monkeypatch.setattr(mock_sync_driver, "dispatch_execute", _fake_dispatch_execute) + statement = SQL("SELECT ?", (1,), statement_config=mock_sync_driver.statement_config) + statement.compile() + + with pytest.raises(SQLSpecError, match="Mock database error: boom"): + mock_sync_driver._stmt_cache_execute(statement) + + +def test_sync_stmt_cache_execute_direct_re_raises_mapped_exception(mock_sync_driver: Any, monkeypatch: Any) -> None: + def _fake_dispatch_execute(cursor: Any, statement: Any) -> Any: + _ = (cursor, statement) + raise ValueError("boom") + + monkeypatch.setattr(mock_sync_driver, "dispatch_execute", _fake_dispatch_execute) + cached = _make_cached( + compiled_sql="INSERT INTO t (id) VALUES (?)", + param_count=1, + operation_type="INSERT", + operation_profile=OperationProfile(returns_rows=False, modifies_rows=True), + processed_state=ProcessedState( + compiled_sql="INSERT INTO t (id) VALUES (?)", execution_parameters=[1], operation_type="INSERT" + ), + ) + + with pytest.raises(SQLSpecError, match="Mock database error: boom"): + mock_sync_driver._stmt_cache_execute_direct("INSERT INTO t (id) VALUES (?)", (1,), cached) + + @pytest.mark.anyio -async def test_async_execute_uses_fast_path_when_eligible(mock_async_driver, monkeypatch) -> None: +async def test_async_execute_uses_fast_path_when_eligible(mock_async_driver: Any, monkeypatch: Any) -> None: sentinel = object() called: dict[str, object] = {} @@ -311,7 +345,9 @@ async def _fake_try(statement: str, params: tuple[Any, ...] | list[Any]) -> obje @pytest.mark.anyio -async def test_async_execute_skips_fast_path_with_statement_config_override(mock_async_driver, monkeypatch) -> None: +async def test_async_execute_skips_fast_path_with_statement_config_override( + mock_async_driver: Any, monkeypatch: Any +) -> None: called = False async def _fake_try(statement: str, params: tuple[Any, ...] | list[Any]) -> object: @@ -330,7 +366,7 @@ async def _fake_try(statement: str, params: tuple[Any, ...] | list[Any]) -> obje @pytest.mark.anyio -async def test_async_execute_populates_fast_path_cache_on_normal_path(mock_async_driver) -> None: +async def test_async_execute_populates_fast_path_cache_on_normal_path(mock_async_driver: Any) -> None: mock_async_driver._stmt_cache_enabled = True assert mock_async_driver._stmt_cache.get("SELECT ?") is None @@ -344,6 +380,43 @@ async def test_async_execute_populates_fast_path_cache_on_normal_path(mock_async assert result.operation_type == "SELECT" +@pytest.mark.anyio +async def test_async_stmt_cache_execute_re_raises_mapped_exception(mock_async_driver: Any, monkeypatch: Any) -> None: + async def _fake_dispatch_execute(cursor: Any, statement: Any) -> Any: + _ = (cursor, statement) + raise ValueError("boom") + + monkeypatch.setattr(mock_async_driver, "dispatch_execute", _fake_dispatch_execute) + statement = SQL("SELECT ?", (1,), statement_config=mock_async_driver.statement_config) + statement.compile() + + with pytest.raises(SQLSpecError, match="Mock async database error: boom"): + await mock_async_driver._stmt_cache_execute(statement) + + +@pytest.mark.anyio +async def test_async_stmt_cache_execute_direct_re_raises_mapped_exception( + mock_async_driver: Any, monkeypatch: Any +) -> None: + async def _fake_dispatch_execute(cursor: Any, statement: Any) -> Any: + _ = (cursor, statement) + raise ValueError("boom") + + monkeypatch.setattr(mock_async_driver, "dispatch_execute", _fake_dispatch_execute) + cached = _make_cached( + compiled_sql="INSERT INTO t (id) VALUES (?)", + param_count=1, + operation_type="INSERT", + operation_profile=OperationProfile(returns_rows=False, modifies_rows=True), + processed_state=ProcessedState( + compiled_sql="INSERT INTO t (id) VALUES (?)", execution_parameters=[1], operation_type="INSERT" + ), + ) + + with pytest.raises(SQLSpecError, match="Mock async database error: boom"): + await mock_async_driver._stmt_cache_execute_direct("INSERT INTO t (id) VALUES (?)", (1,), cached) + + def test_stmt_cache_thread_safety() -> None: cache = QueryCache(max_size=32) cached = _make_cached() diff --git a/tests/unit/loader/test_alias_handling.py b/tests/unit/loader/test_alias_handling.py new file mode 100644 index 000000000..203c9bd0c --- /dev/null +++ b/tests/unit/loader/test_alias_handling.py @@ -0,0 +1,37 @@ +from pathlib import Path + +from sqlspec.loader import SQLFileLoader +from sqlspec.storage.registry import StorageRegistry + + +def test_load_file_with_alias_is_not_double_dir_stripped(tmp_path: Path) -> None: + """Test loading a file through an alias to verify path handling.""" + sql_file = tmp_path / "my_query.sql" + sql_file.write_text("-- name: query1\nSELECT 1;") + + registry = StorageRegistry() + # Register an alias to the parent directory + registry.register_alias("my_store", f"file://{tmp_path}") + + loader = SQLFileLoader(storage_registry=registry) + # The storage backend for 'my_store/my_query.sql' will be scoped to 'file://tmp_path' + loader.load_sql("my_store/my_query.sql") + + assert loader.has_query("query1") + + +def test_load_file_from_alias_nested(tmp_path: Path) -> None: + """Test loading a nested file through an alias.""" + nested_dir = tmp_path / "nested" + nested_dir.mkdir() + sql_file = nested_dir / "my_query.sql" + sql_file.write_text("-- name: query2\nSELECT 2;") + + registry = StorageRegistry() + registry.register_alias("my_store", f"file://{tmp_path}") + + loader = SQLFileLoader(storage_registry=registry) + # 'my_store/nested/my_query.sql' + loader.load_sql("my_store/nested/my_query.sql") + + assert loader.has_query("query2") diff --git a/tests/unit/loader/test_path_handling.py b/tests/unit/loader/test_path_handling.py new file mode 100644 index 000000000..5196b69c3 --- /dev/null +++ b/tests/unit/loader/test_path_handling.py @@ -0,0 +1,59 @@ +from pathlib import Path + +from sqlspec.loader import SQLFileLoader + + +def test_load_specific_file_in_nested_dir(tmp_path: Path) -> None: + """Test loading a specific file in a nested directory. + + It should only load the specified file, not everything in the directory. + """ + nested_dir = tmp_path / "nested" / "dir" + nested_dir.mkdir(parents=True) + + file1 = nested_dir / "file1.sql" + file1.write_text("-- name: query1\nSELECT 1;") + + file2 = nested_dir / "file2.sql" + file2.write_text("-- name: query2\nSELECT 2;") + + loader = SQLFileLoader() + loader.load_sql(file1) + + assert loader.has_query("query1") + assert not loader.has_query("query2") + assert loader.list_files() == [str(file1)] + + +def test_load_specific_file_is_not_namespaced(tmp_path: Path) -> None: + """Test that a specific file loaded directly is NOT namespaced by its directory. + + This matches current behavior where _load_single_file(path, None) is called. + """ + nested_dir = tmp_path / "nested" / "dir" + nested_dir.mkdir(parents=True) + + file1 = nested_dir / "file1.sql" + file1.write_text("-- name: query1\nSELECT 1;") + + loader = SQLFileLoader() + loader.load_sql(file1) + + # It should be "query1", not "nested.dir.query1" + assert loader.has_query("query1") + assert not loader.has_query("nested.dir.query1") + + +def test_load_directory_is_namespaced(tmp_path: Path) -> None: + """Test that loading a directory IS namespaced.""" + nested_dir = tmp_path / "nested" / "dir" + nested_dir.mkdir(parents=True) + + file1 = nested_dir / "file1.sql" + file1.write_text("-- name: query1\nSELECT 1;") + + loader = SQLFileLoader() + # Loading the base tmp_path should result in namespacing + loader.load_sql(tmp_path) + + assert loader.has_query("nested.dir.query1") diff --git a/tests/unit/loader/test_sql_file_loader.py b/tests/unit/loader/test_sql_file_loader.py index 15d0069bc..49a502da0 100644 --- a/tests/unit/loader/test_sql_file_loader.py +++ b/tests/unit/loader/test_sql_file_loader.py @@ -316,6 +316,27 @@ def test_load_directory_with_mixed_files(tmp_path: Path) -> None: assert len(loader.list_queries()) == 1 +def test_load_sql_reads_relative_file_without_duplicate_parent(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None: + """Relative file paths should resolve against their parent directory only once.""" + project_dir = tmp_path / "project" + migrations_dir = project_dir / "migrations" + migrations_dir.mkdir(parents=True) + sql_file = migrations_dir / "0001_init.sql" + sql_file.write_text( + """ +-- name: migrate-0001-up +SELECT 1; +""".strip(), + encoding="utf-8", + ) + monkeypatch.chdir(project_dir) + + loader = SQLFileLoader() + loader.load_sql(Path("migrations/0001_init.sql")) + + assert loader.has_query("migrate_0001_up") + + def test_skipped_file_logging(tmp_path: Path, caplog) -> None: """Test that skipped files are logged at DEBUG level.""" import logging diff --git a/tests/unit/storage/test_storage_iterators.py b/tests/unit/storage/test_storage_iterators.py index 447dbcaff..5f33cc319 100644 --- a/tests/unit/storage/test_storage_iterators.py +++ b/tests/unit/storage/test_storage_iterators.py @@ -2,27 +2,38 @@ import io -from sqlspec.storage.backends._iterators import AsyncThreadedBytesIterator +import pytest +from sqlspec.storage.backends.base import AsyncThreadedBytesIterator -async def test_async_threaded_bytes_iterator_aclose_closes_file() -> None: - """Ensure aclose closes the wrapped file object.""" + +@pytest.mark.anyio +async def test_async_threaded_bytes_iterator_closes_file() -> None: + """Ensure AsyncThreadedBytesIterator closes the wrapped file object via context manager.""" file_obj = io.BytesIO(b"abcdef") - iterator = AsyncThreadedBytesIterator(file_obj, chunk_size=2) - await iterator.__anext__() - assert not file_obj.closed + async with AsyncThreadedBytesIterator(file_obj, chunk_size=2) as iterator: + chunk = await iterator.__anext__() + assert chunk == b"ab" + assert not file_obj.closed - await iterator.aclose() + # Exhaust the iterator + async for _ in iterator: + pass + + # Verified: Explicit cleanup or loop exit closes the file assert file_obj.closed -async def test_async_threaded_bytes_iterator_context_manager_closes_file() -> None: - """Ensure async context manager closes the wrapped file object.""" +@pytest.mark.anyio +async def test_async_threaded_bytes_iterator_early_exit_closes_file() -> None: + """Ensure AsyncThreadedBytesIterator closes the wrapped file object on early exit.""" file_obj = io.BytesIO(b"abcdef") async with AsyncThreadedBytesIterator(file_obj, chunk_size=2) as iterator: - chunk = await iterator.__anext__() - assert chunk == b"ab" + async for chunk in iterator: + assert chunk == b"ab" + break + # Verified: Context manager ensures closure even on break/early exit assert file_obj.closed diff --git a/uv.lock b/uv.lock index b499bb1d7..2444adc6f 100644 --- a/uv.lock +++ b/uv.lock @@ -740,7 +740,7 @@ wheels = [ [[package]] name = "bump-my-version" -version = "1.2.7" +version = "1.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, @@ -753,9 +753,9 @@ dependencies = [ { name = "tomlkit" }, { name = "wcmatch" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/45/11/0f73c652396f86197ea6d509c78e8c44c3483d9a86437ca53ce55edca8e8/bump_my_version-1.2.7.tar.gz", hash = "sha256:d915a10b41e0c9db5a2fa39bde9f45f92e1e4194242d819c9ceb9eca8831cd21", size = 1198071, upload-time = "2026-02-14T13:44:59.923Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3d/61/07b90027091a4192b4a0290dc3da1aeea6b9e7b6b4c0f7fd30dab36070c1/bump_my_version-1.3.0.tar.gz", hash = "sha256:5780137a8d93378af3839798fcba01c7e6cb28dcc5aa5a7ab4d8507787f1995c", size = 1142429, upload-time = "2026-03-22T13:27:34.923Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/46/ed/ad1755f82cd5a0baafe342e7154696a93e57f04f86515402f14e5beceb36/bump_my_version-1.2.7-py3-none-any.whl", hash = "sha256:16f89360f979c0a8eb3249ebe3e13ae4f0cb5481d7bb58e12a9f66996922acfd", size = 60013, upload-time = "2026-02-14T13:44:58.318Z" }, + { url = "https://files.pythonhosted.org/packages/36/01/b168791bfbfb0322ef6d38d236f6f17a02e41fb7753e23e4cdb0f19ac969/bump_my_version-1.3.0-py3-none-any.whl", hash = "sha256:3cdaa54588d2443a29303b77e7539417187952c3d22f87bfdd32c0fe6af2f570", size = 64878, upload-time = "2026-03-22T13:27:33.006Z" }, ] [[package]] @@ -1450,44 +1450,44 @@ wheels = [ [[package]] name = "duckdb" -version = "1.5.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/11/e05a7eb73a373d523e45d83c261025e02bc31ebf868e6282c30c4d02cc59/duckdb-1.5.0.tar.gz", hash = "sha256:f974b61b1c375888ee62bc3125c60ac11c4e45e4457dd1bb31a8f8d3cf277edd", size = 17981141, upload-time = "2026-03-09T12:50:26.372Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e0/5d/8fa129bbd604d0e91aa9a0a407e7d2acc559b6024c3f887868fd7a13871d/duckdb-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:47fbb1c053a627a91fa71ec883951561317f14a82df891c00dcace435e8fea78", size = 30012348, upload-time = "2026-03-09T12:48:39.133Z" }, - { url = "https://files.pythonhosted.org/packages/0c/31/db320641a262a897755e634d16838c98d5ca7dc91f4e096e104e244a3a01/duckdb-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2b546a30a6ac020165a86ab3abac553255a6e8244d5437d17859a6aa338611aa", size = 15940515, upload-time = "2026-03-09T12:48:41.905Z" }, - { url = "https://files.pythonhosted.org/packages/0b/45/5725684794fbabf54d8dbae5247685799a6bf8e1e930ebff3a76a726772c/duckdb-1.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:122396041c0acb78e66d7dc7d36c55f03f67fe6ad012155c132d82739722e381", size = 14193724, upload-time = "2026-03-09T12:48:44.105Z" }, - { url = "https://files.pythonhosted.org/packages/27/68/f110c66b43e27191d7e53d3587e118568b73d66f23cb9bd6c7e0a560fd6d/duckdb-1.5.0-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a2cd73d50ea2c2bf618a4b7d22fe7c4115a1c9083d35654a0d5d421620ed999", size = 19218777, upload-time = "2026-03-09T12:48:46.399Z" }, - { url = "https://files.pythonhosted.org/packages/ec/9d/46affc9257377cbc865e494650312a7a08a56e85aa8d702eb297bec430b7/duckdb-1.5.0-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63a8ea3b060a881c90d1c1b9454abed3daf95b6160c39bbb9506fee3a9711730", size = 21311205, upload-time = "2026-03-09T12:48:48.895Z" }, - { url = "https://files.pythonhosted.org/packages/3b/34/dac03ab7340989cda258655387959c88342ea3b44949751391267bcbc830/duckdb-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:238d576ae1dda441f8c79ed1370c5ccf863e4a5d59ca2563f9c96cd26b2188ac", size = 13043217, upload-time = "2026-03-09T12:48:51.262Z" }, - { url = "https://files.pythonhosted.org/packages/01/0c/0282b10a1c96810606b916b8d58a03f2131bd3ede14d2851f58b0b860e7c/duckdb-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3298bd17cf0bb5f342fb51a4edc9aadacae882feb2b04161a03eb93271c70c86", size = 30014615, upload-time = "2026-03-09T12:48:54.061Z" }, - { url = "https://files.pythonhosted.org/packages/71/e8/cbbc920078a794f24f63017fc55c9cbdb17d6fb94d3973f479b2d9f2983d/duckdb-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:13f94c49ca389731c439524248e05007fb1a86cd26f1e38f706abc261069cd41", size = 15940493, upload-time = "2026-03-09T12:48:57.85Z" }, - { url = "https://files.pythonhosted.org/packages/31/b6/6cae794d5856259b0060f79d5db71c7fdba043950eaa6a9d72b0bad16095/duckdb-1.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab9d597b1e8668466f1c164d0ea07eaf0ebb516950f5a2e794b0f52c81ff3b16", size = 14194663, upload-time = "2026-03-09T12:49:00.416Z" }, - { url = "https://files.pythonhosted.org/packages/82/07/aba3887658b93a36ce702dd00ca6a6422de3d14c7ee3a4b4c03ea20a99c0/duckdb-1.5.0-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a43f8289b11c0b50d13f96ab03210489d37652f3fd7911dc8eab04d61b049da2", size = 19220501, upload-time = "2026-03-09T12:49:03.431Z" }, - { url = "https://files.pythonhosted.org/packages/fc/a2/723e6df48754e468fa50d7878eb860906c975eafe317c4134a8482ca220e/duckdb-1.5.0-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f514e796a116c5de070e99974e42d0b8c2e6c303386790e58408c481150d417", size = 21316142, upload-time = "2026-03-09T12:49:06.223Z" }, - { url = "https://files.pythonhosted.org/packages/03/af/4dcbdf8f2349ed0b054c254ec59bc362ce6ddf603af35f770124c0984686/duckdb-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:cf503ba2c753d97c76beb111e74572fef8803265b974af2dca67bba1de4176d2", size = 13043445, upload-time = "2026-03-09T12:49:08.892Z" }, - { url = "https://files.pythonhosted.org/packages/60/5e/1bb7e75a63bf3dc49bc5a2cd27a65ffeef151f52a32db980983516f2d9f6/duckdb-1.5.0-cp311-cp311-win_arm64.whl", hash = "sha256:a1156e91e4e47f0e7d9c9404e559a1d71b372cd61790a407d65eb26948ae8298", size = 13883145, upload-time = "2026-03-09T12:49:11.566Z" }, - { url = "https://files.pythonhosted.org/packages/43/73/120e673e48ae25aaf689044c25ef51b0ea1d088563c9a2532612aea18e0a/duckdb-1.5.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9ea988d1d5c8737720d1b2852fd70e4d9e83b1601b8896a1d6d31df5e6afc7dd", size = 30057869, upload-time = "2026-03-09T12:49:14.65Z" }, - { url = "https://files.pythonhosted.org/packages/21/e9/61143471958d36d3f3e764cb4cd43330be208ddbff1c78d3310b9ee67fe8/duckdb-1.5.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cb786d5472afc16cc3c7355eb2007172538311d6f0cc6f6a0859e84a60220375", size = 15963092, upload-time = "2026-03-09T12:49:17.478Z" }, - { url = "https://files.pythonhosted.org/packages/4f/71/76e37c9a599ad89dd944e6cbb3e6a8ad196944a421758e83adea507637b6/duckdb-1.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dc92b238f4122800a7592e99134124cc9048c50f766c37a0778dd2637f5cbe59", size = 14220562, upload-time = "2026-03-09T12:49:23.518Z" }, - { url = "https://files.pythonhosted.org/packages/db/b8/de1831656d5d13173e27c79c7259c8b9a7bdc314fdc8920604838ea4c46d/duckdb-1.5.0-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b74cb205c21d3696d8f8b88adca401e1063d6e6f57c1c4f56a243610b086e30", size = 19245329, upload-time = "2026-03-09T12:49:26.307Z" }, - { url = "https://files.pythonhosted.org/packages/1f/8d/33d349a3bcbd3e9b7b4e904c19d5b97f058c4c20791b89a8d6323bb93dce/duckdb-1.5.0-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e56c19ffd1ffe3642fa89639e71e2e00ab0cf107b62fe16e88030acaebcbde6", size = 21348041, upload-time = "2026-03-09T12:49:30.283Z" }, - { url = "https://files.pythonhosted.org/packages/e2/ec/591a4cad582fae04bc8f8b4a435eceaaaf3838cf0ca771daae16a3c2995b/duckdb-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:86525e565ec0c43420106fd34ba2c739a54c01814d476c7fed3007c9ed6efd86", size = 13053781, upload-time = "2026-03-09T12:49:33.574Z" }, - { url = "https://files.pythonhosted.org/packages/db/62/42e0a13f9919173bec121c0ff702406e1cdd91d8084c3e0b3412508c3891/duckdb-1.5.0-cp312-cp312-win_arm64.whl", hash = "sha256:5faeebc178c986a7bfa68868a023001137a95a1110bf09b7356442a4eae0f7e7", size = 13862906, upload-time = "2026-03-09T12:49:36.598Z" }, - { url = "https://files.pythonhosted.org/packages/35/5d/af5501221f42e4e3662c047ecec4dcd0761229fceeba3c67ad4d9d8741df/duckdb-1.5.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:11dd05b827846c87f0ae2f67b9ae1d60985882a7c08ce855379e4a08d5be0e1d", size = 30057396, upload-time = "2026-03-09T12:49:39.95Z" }, - { url = "https://files.pythonhosted.org/packages/43/bd/a278d73fedbd3783bf9aedb09cad4171fe8e55bd522952a84f6849522eb6/duckdb-1.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ad8d9c91b7c280ab6811f59deff554b845706c20baa28c4e8f80a95690b252b", size = 15962700, upload-time = "2026-03-09T12:49:43.504Z" }, - { url = "https://files.pythonhosted.org/packages/76/fc/c916e928606946209c20fb50898dabf120241fb528a244e2bd8cde1bd9e2/duckdb-1.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0ee4dabe03ed810d64d93927e0fd18cd137060b81ee75dcaeaaff32cbc816656", size = 14220272, upload-time = "2026-03-09T12:49:46.867Z" }, - { url = "https://files.pythonhosted.org/packages/53/07/1390e69db922423b2e111e32ed342b3e8fad0a31c144db70681ea1ba4d56/duckdb-1.5.0-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9409ed1184b363ddea239609c5926f5148ee412b8d9e5ffa617718d755d942f6", size = 19244401, upload-time = "2026-03-09T12:49:49.865Z" }, - { url = "https://files.pythonhosted.org/packages/54/13/b58d718415cde993823a54952ea511d2612302f1d2bc220549d0cef752a4/duckdb-1.5.0-cp313-cp313-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1df8c4f9c853a45f3ec1e79ed7fe1957a203e5ec893bbbb853e727eb93e0090f", size = 21345827, upload-time = "2026-03-09T12:49:52.977Z" }, - { url = "https://files.pythonhosted.org/packages/e0/96/4460429651e371eb5ff745a4790e7fa0509c7a58c71fc4f0f893404c9646/duckdb-1.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:9a3d3dfa2d8bc74008ce3ad9564761ae23505a9e4282f6a36df29bd87249620b", size = 13053101, upload-time = "2026-03-09T12:49:56.134Z" }, - { url = "https://files.pythonhosted.org/packages/ba/54/6d5b805113214b830fa3c267bb3383fb8febaa30760d0162ef59aadb110a/duckdb-1.5.0-cp313-cp313-win_arm64.whl", hash = "sha256:2deebcbafd9d39c04f31ec968f4dd7cee832c021e10d96b32ab0752453e247c8", size = 13865071, upload-time = "2026-03-09T12:49:59.282Z" }, - { url = "https://files.pythonhosted.org/packages/66/9f/dd806d4e8ecd99006eb240068f34e1054533da1857ad06ac726305cd102d/duckdb-1.5.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:d4b618de670cd2271dd7b3397508c7b3c62d8ea70c592c755643211a6f9154fa", size = 30065704, upload-time = "2026-03-09T12:50:02.671Z" }, - { url = "https://files.pythonhosted.org/packages/79/c2/7b7b8a5c65d5535c88a513e267b5e6d7a55ab3e9b67e4ddd474454653268/duckdb-1.5.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:065ae50cb185bac4b904287df72e6b4801b3bee2ad85679576dd712b8ba07021", size = 15964883, upload-time = "2026-03-09T12:50:06.343Z" }, - { url = "https://files.pythonhosted.org/packages/23/c5/9a52a2cdb228b8d8d191a603254364d929274d9cc7d285beada8f7daa712/duckdb-1.5.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6be5e48e287a24d98306ce9dd55093c3b105a8fbd8a2e7a45e13df34bf081985", size = 14221498, upload-time = "2026-03-09T12:50:10.567Z" }, - { url = "https://files.pythonhosted.org/packages/b8/68/646045cb97982702a8a143dc2e45f3bdcb79fbe2d559a98d74b8c160e5e2/duckdb-1.5.0-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a5ee41a0bf793882f02192ce105b9a113c3e8c505a27c7ef9437d7b756317113", size = 19249787, upload-time = "2026-03-09T12:50:13.524Z" }, - { url = "https://files.pythonhosted.org/packages/15/1b/5abf0c7f38febb3b4a231c784223fceccfd3f2bfd957699d786f46e41ce6/duckdb-1.5.0-cp314-cp314-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f8e42aaf3cd217417c5dc9ff522dc3939d18b25a6fe5f846348277e831e6f59c", size = 21351583, upload-time = "2026-03-09T12:50:16.701Z" }, - { url = "https://files.pythonhosted.org/packages/93/a4/a90f2901cc0a1ce7ca4f0564b8492b9dbfe048a6395b27933d46ae9be473/duckdb-1.5.0-cp314-cp314-win_amd64.whl", hash = "sha256:11ae50aaeda2145b50294ee0247e4f11fb9448b3cc3d2aea1cfc456637dfb977", size = 13575130, upload-time = "2026-03-09T12:50:19.716Z" }, - { url = "https://files.pythonhosted.org/packages/64/aa/f14dd5e241ec80d9f9d82196ca65e0c53badfc8a7a619d5497c5626657ad/duckdb-1.5.0-cp314-cp314-win_arm64.whl", hash = "sha256:d6d2858c734d1a7e7a1b6e9b8403b3fce26dfefb4e0a2479c420fba6cd36db36", size = 14341879, upload-time = "2026-03-09T12:50:22.347Z" }, +version = "1.5.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/62/590caabec6c41003f46a244b6fd707d35ca2e552e0c70cbf454e08bf6685/duckdb-1.5.1.tar.gz", hash = "sha256:b370d1620a34a4538ef66524fcee9de8171fa263c701036a92bc0b4c1f2f9c6d", size = 17995082, upload-time = "2026-03-23T12:12:15.894Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/eb/63/d6477057ea6103f80ed9499580c8602183211689889ec50c32f25a935e3d/duckdb-1.5.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:46f92ada9023e59f27edc048167b31ac9a03911978b1296c845a34462a27f096", size = 30067487, upload-time = "2026-03-23T12:10:15.712Z" }, + { url = "https://files.pythonhosted.org/packages/ba/b8/22e6c605d9281df7a83653f4a60168eec0f650b23f1d4648aca940d79d00/duckdb-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:caa65e1f5bf007430bf657c37cab7ab81a4ddf8d337e3062bcc5085d17ef038b", size = 15968413, upload-time = "2026-03-23T12:10:18.978Z" }, + { url = "https://files.pythonhosted.org/packages/85/b1/88a457cd3105525cba0d4c155f847c5c32fa4f543d3ba4ee38b4fd75f82e/duckdb-1.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8c0088765747ae5d6c9f89987bb36f9fb83564f07090d721344ce8e1abedffea", size = 14222115, upload-time = "2026-03-23T12:10:21.662Z" }, + { url = "https://files.pythonhosted.org/packages/c5/3b/800c3f1d54ae0062b3e9b0b54fc54d6c155d731311931d748fc9c5c565f9/duckdb-1.5.1-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e56a20ab6cdb90a95b0c99652e28de3504ce77129087319c03c9098266183ae5", size = 19244994, upload-time = "2026-03-23T12:10:24.708Z" }, + { url = "https://files.pythonhosted.org/packages/3a/09/4c4dd94f521d016e0fb83cca2c203d10ce1e3f8bcc679691b5271fc98b83/duckdb-1.5.1-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:715f05ea198d20d7f8b407b9b84e0023d17f2b9096c194cea702b7840e74f1f7", size = 21347663, upload-time = "2026-03-23T12:10:27.428Z" }, + { url = "https://files.pythonhosted.org/packages/d0/b3/eb3c70be70d0b3fa6c8051d6fa4b7fb3d5787fa77b3f50b7e38d5f7cc6fd/duckdb-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:e878ccb7d20872065e1597935fdb5e65efa43220c8edd0d9c4a1a7ff1f3eb277", size = 13067979, upload-time = "2026-03-23T12:10:30.783Z" }, + { url = "https://files.pythonhosted.org/packages/42/3e/827ffcf58f0abc6ad6dcf826c5d24ebfc65e03ad1a20d74cad9806f91c99/duckdb-1.5.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bc7ca6a1a40e7e4c933017e6c09ef18032add793df4e42624c6c0c87e0bebdad", size = 30067835, upload-time = "2026-03-23T12:10:34.026Z" }, + { url = "https://files.pythonhosted.org/packages/04/b5/e921ecf8a7e0cc7da2100c98bef64b3da386df9444f467d6389364851302/duckdb-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:446d500a2977c6ae2077f340c510a25956da5c77597175c316edfa87248ceda3", size = 15970464, upload-time = "2026-03-23T12:10:42.063Z" }, + { url = "https://files.pythonhosted.org/packages/dd/da/ed804006cd09ba303389d573c8b15d74220667cbd1fd990c26e98d0e0a5b/duckdb-1.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b8b0808dba0c63b7633bdaefb34e08fe0612622224f9feb0e7518904b1615101", size = 14222994, upload-time = "2026-03-23T12:10:45.162Z" }, + { url = "https://files.pythonhosted.org/packages/b3/43/c904d81a61306edab81a9d74bb37bbe65679639abb7030d4c4fec9ed84f7/duckdb-1.5.1-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:553c273a6a8f140adaa6da6a6135c7f95bdc8c2e5f95252fcdf9832d758e2141", size = 19244880, upload-time = "2026-03-23T12:10:48.529Z" }, + { url = "https://files.pythonhosted.org/packages/50/db/358715d677bfe5e117d9e1f2d6cc2fc2b0bd621144d1f15335b8b59f95d7/duckdb-1.5.1-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:40c5220ec93790b18ec6278da9c6ac2608d997ee6d6f7cd44c5c3992764e8e71", size = 21350874, upload-time = "2026-03-23T12:10:52.095Z" }, + { url = "https://files.pythonhosted.org/packages/3f/db/fd647ce46315347976f5576a279bacb8134d23b1f004bd0bcda7ce9cf429/duckdb-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:36e8e32621a9e2a9abe75dc15a4b54a3997f2d8b1e53ad754bae48a083c91130", size = 13068140, upload-time = "2026-03-23T12:10:55.622Z" }, + { url = "https://files.pythonhosted.org/packages/27/95/e29d42792707619da5867ffab338d7e7b086242c7296aa9cfc6dcf52d568/duckdb-1.5.1-cp311-cp311-win_arm64.whl", hash = "sha256:5ae7c0d744d64e2753149634787cc4ab60f05ef1e542b060eeab719f3cdb7723", size = 13908823, upload-time = "2026-03-23T12:10:58.572Z" }, + { url = "https://files.pythonhosted.org/packages/3f/06/be4c62f812c6e23898733073ace0482eeb18dffabe0585d63a3bf38bca1e/duckdb-1.5.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:6f7361d66cc801d9eb4df734b139cd7b0e3c257a16f3573ebd550ddb255549e6", size = 30113703, upload-time = "2026-03-23T12:11:02.536Z" }, + { url = "https://files.pythonhosted.org/packages/44/03/1794dcdda75ff203ab0982ff7eb5232549b58b9af66f243f1b7212d6d6be/duckdb-1.5.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0a6acc2040bec1f05de62a2f3f68f4c12f3ec7d6012b4317d0ab1a195af26225", size = 15991802, upload-time = "2026-03-23T12:11:06.321Z" }, + { url = "https://files.pythonhosted.org/packages/87/03/293bccd838a293d42ea26dec7f4eb4f58b57b6c9ffcfabc6518a5f20a24a/duckdb-1.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ed6d23a3f806898e69c77430ebd8da0c79c219f97b9acbc9a29a653e09740c59", size = 14246803, upload-time = "2026-03-23T12:11:09.624Z" }, + { url = "https://files.pythonhosted.org/packages/15/2c/7b4f11879aa2924838168b4640da999dccda1b4a033d43cb998fd6dc33ea/duckdb-1.5.1-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6af347debc8b721aa72e48671166282da979d5e5ae52dbc660ab417282b48e23", size = 19271654, upload-time = "2026-03-23T12:11:13.354Z" }, + { url = "https://files.pythonhosted.org/packages/6f/d6/8f9a6b1fbcc669108ec6a4d625a70be9e480b437ed9b70cd56b78cd577a6/duckdb-1.5.1-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8150c569b2aa4573b51ba8475e814aa41fd53a3d510c1ffb96f1139f46faf611", size = 21386100, upload-time = "2026-03-23T12:11:16.758Z" }, + { url = "https://files.pythonhosted.org/packages/c4/fe/8d02c6473273468cf8d43fd5d73c677f8cdfcd036c1e884df0613f124c2b/duckdb-1.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:054ad424b051b334052afac58cb216f3b1ebb8579fc8c641e60f0182e8725ea9", size = 13083506, upload-time = "2026-03-23T12:11:19.785Z" }, + { url = "https://files.pythonhosted.org/packages/96/0b/2be786b9c153eb263bf5d3d5f7ab621b14a715d7e70f92b24ecf8536369e/duckdb-1.5.1-cp312-cp312-win_arm64.whl", hash = "sha256:6ba302115f63f6482c000ccfd62efdb6c41d9d182a5bcd4a90e7ab8cd13856eb", size = 13888862, upload-time = "2026-03-23T12:11:22.84Z" }, + { url = "https://files.pythonhosted.org/packages/a5/f2/af476945e3b97417945b0f660b5efa661863547c0ea104251bb6387342b1/duckdb-1.5.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:26e56b5f0c96189e3288d83cf7b476e23615987902f801e5788dee15ee9f24a9", size = 30113759, upload-time = "2026-03-23T12:11:26.5Z" }, + { url = "https://files.pythonhosted.org/packages/fe/9d/5a542b3933647369e601175190093597ce0ac54909aea0dd876ec51ffad4/duckdb-1.5.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:972d0dbf283508f9bc446ee09c3838cb7c7f114b5bdceee41753288c97fe2f7c", size = 15991463, upload-time = "2026-03-23T12:11:30.025Z" }, + { url = "https://files.pythonhosted.org/packages/53/a5/b59cff67f5e0420b8f337ad86406801cffacae219deed83961dcceefda67/duckdb-1.5.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:482f8a13f2600f527e427f73c42b5aa75536f9892868068f0aaf573055a0135f", size = 14246482, upload-time = "2026-03-23T12:11:33.33Z" }, + { url = "https://files.pythonhosted.org/packages/e9/12/d72a82fe502aae82b97b481bf909be8e22db5a403290799ad054b4f90eb4/duckdb-1.5.1-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:da137802688190835b4c863cafa77fd7e29dff662ee6d905a9ffc14f00299c91", size = 19270816, upload-time = "2026-03-23T12:11:36.79Z" }, + { url = "https://files.pythonhosted.org/packages/f9/c3/ee49319b15f139e04c067378f0e763f78336fbab38ba54b0852467dd9da4/duckdb-1.5.1-cp313-cp313-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5d4147422d91ccdc2d2abf6ed24196025e020259d1d267970ae20c13c2ce84b1", size = 21385695, upload-time = "2026-03-23T12:11:40.465Z" }, + { url = "https://files.pythonhosted.org/packages/a8/f5/a15498e75a27a136c791ca1889beade96d388dadf9811375db155fc96d1a/duckdb-1.5.1-cp313-cp313-win_amd64.whl", hash = "sha256:05fc91767d0cfc4cf2fa68966ab5b479ac07561752e42dd0ae30327bd160f64a", size = 13084065, upload-time = "2026-03-23T12:11:43.763Z" }, + { url = "https://files.pythonhosted.org/packages/93/81/b3612d2bbe237f75791095e16767c61067ea5d31c76e8591c212dac13bd0/duckdb-1.5.1-cp313-cp313-win_arm64.whl", hash = "sha256:a28531cee2a5a42d89f9ba4da53bfeb15681f12acc0263476c8705380dadce07", size = 13892892, upload-time = "2026-03-23T12:11:47.222Z" }, + { url = "https://files.pythonhosted.org/packages/ad/75/e9e7893542ca738bcde2d41d459e3438950219c71c57ad28b049dc2ae616/duckdb-1.5.1-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:eba81e0b3011c1f23df7ea47ef4ffaa8239817959ae291515b6efd068bde2161", size = 30123677, upload-time = "2026-03-23T12:11:51.511Z" }, + { url = "https://files.pythonhosted.org/packages/df/db/f7420ee7109a922124c02f377ae1c56156e9e4aa434f4726848adaef0219/duckdb-1.5.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:afab8b4b1f4469c3879bb049dd039f8fce402712050324e9524a43d7324c5e87", size = 15996808, upload-time = "2026-03-23T12:11:54.964Z" }, + { url = "https://files.pythonhosted.org/packages/df/57/2c4c3de1f1110417592741863ba58b4eca2f7690a421712762ddbdcd72e6/duckdb-1.5.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:71dddcebbc5a70e946a06c30b59b5dd7999c9833d307168f90fb4e4b672ab63e", size = 14248990, upload-time = "2026-03-23T12:11:58.576Z" }, + { url = "https://files.pythonhosted.org/packages/2b/81/e173b33ffac53124a3e39e97fb60a538f26651a0df6e393eb9bf7540126c/duckdb-1.5.1-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ac2804043bd1bc10b5da18f8f4c706877197263a510c41be9b4c0062f5783dcc", size = 19276013, upload-time = "2026-03-23T12:12:02.034Z" }, + { url = "https://files.pythonhosted.org/packages/d4/4c/47e838393aa90d3d78549c8c04cb09452efeb14aaae0ee24dc0bd61c3a41/duckdb-1.5.1-cp314-cp314-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8843bd9594e1387f1e601439e19ad73abdf57356104fd1e53a708255bb95a13d", size = 21387569, upload-time = "2026-03-23T12:12:05.693Z" }, + { url = "https://files.pythonhosted.org/packages/f4/9b/ce65743e0e85f5c984d2f7e8a81bc908d0bac345d6d8b6316436b29430e7/duckdb-1.5.1-cp314-cp314-win_amd64.whl", hash = "sha256:d68c5a01a283cb13b79eafe016fe5869aa11bff8c46e7141c70aa0aac808010f", size = 13603876, upload-time = "2026-03-23T12:12:09.344Z" }, + { url = "https://files.pythonhosted.org/packages/e6/ac/f9e4e731635192571f86f52d86234f537c7f8ca4f6917c56b29051c077ef/duckdb-1.5.1-cp314-cp314-win_arm64.whl", hash = "sha256:a3be2072315982e232bfe49c9d3db0a59ba67b2240a537ef42656cc772a887c7", size = 14370790, upload-time = "2026-03-23T12:12:12.497Z" }, ] [[package]] @@ -1536,28 +1536,28 @@ wheels = [ [[package]] name = "extra-platforms" -version = "11.0.2" +version = "11.0.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b1/d6/ed1e4991dd0effeba8610ea54f2c3f4aa1911a2f6524b2c83b54f321da7a/extra_platforms-11.0.2.tar.gz", hash = "sha256:7920259776faeb0fb4bcb978013a1abc6dc3babdc59473b9a877e4def3ad7028", size = 68113, upload-time = "2026-03-04T15:36:21.071Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/ee/372b3699b3e1cfc7f35430ab14d91b1fb04eff032b96f17a1a7e69a279ac/extra_platforms-11.0.3.tar.gz", hash = "sha256:56225020fe60859bdd27549311b8efc5e3213f79c7851292d9f07189ee730ec1", size = 68476, upload-time = "2026-03-23T07:08:57.234Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ee/aa/765881526006c212f5a50380fb13c3838e692b44c6cfaf016c19d36a8d59/extra_platforms-11.0.2-py3-none-any.whl", hash = "sha256:9c6c20d1ec166ac271a1f0714158104c62ff3988172aaada2c2932cf8f007d88", size = 71957, upload-time = "2026-03-04T15:36:19.899Z" }, + { url = "https://files.pythonhosted.org/packages/0b/88/7b6d5dabe7f2b2b3afc24a6640ea496fab83688d33df4132efcced91f57c/extra_platforms-11.0.3-py3-none-any.whl", hash = "sha256:0e44e9f09c3584a8aab34d0044a028e95b407d0e60c46631055a8e172446de02", size = 72330, upload-time = "2026-03-23T07:08:55.987Z" }, ] [[package]] name = "faker" -version = "40.11.0" +version = "40.11.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "tzdata", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/94/dc/b68e5378e5a7db0ab776efcdd53b6fe374b29d703e156fd5bb4c5437069e/faker-40.11.0.tar.gz", hash = "sha256:7c419299103b13126bd02ec14bd2b47b946edb5a5eedf305e66a193b25f9a734", size = 1957570, upload-time = "2026-03-13T14:36:11.844Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fa/e5/b16bf568a2f20fe7423282db4a4059dbcadef70e9029c1c106836f8edd84/faker-40.11.1.tar.gz", hash = "sha256:61965046e79e8cfde4337d243eac04c0d31481a7c010033141103b43f603100c", size = 1957415, upload-time = "2026-03-23T14:05:50.233Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/fa/a86c6ba66f0308c95b9288b1e3eaccd934b545646f63494a86f1ec2f8c8e/faker-40.11.0-py3-none-any.whl", hash = "sha256:0e9816c950528d2a37d74863f3ef389ea9a3a936cbcde0b11b8499942e25bf90", size = 1989457, upload-time = "2026-03-13T14:36:09.792Z" }, + { url = "https://files.pythonhosted.org/packages/fc/ec/3c4b78eb0d2f6a81fb8cc9286745845bff661e6815741eff7a6ac5fcc9ea/faker-40.11.1-py3-none-any.whl", hash = "sha256:3af3a213ba8fb33ce6ba2af7aef2ac91363dae35d0cec0b2b0337d189e5bee2a", size = 1989484, upload-time = "2026-03-23T14:05:48.793Z" }, ] [[package]] name = "fastapi" -version = "0.135.1" +version = "0.135.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-doc" }, @@ -1566,9 +1566,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e7/7b/f8e0211e9380f7195ba3f3d40c292594fd81ba8ec4629e3854c353aaca45/fastapi-0.135.1.tar.gz", hash = "sha256:d04115b508d936d254cea545b7312ecaa58a7b3a0f84952535b4c9afae7668cd", size = 394962, upload-time = "2026-03-01T18:18:29.369Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c4/73/5903c4b13beae98618d64eb9870c3fac4f605523dd0312ca5c80dadbd5b9/fastapi-0.135.2.tar.gz", hash = "sha256:88a832095359755527b7f63bb4c6bc9edb8329a026189eed83d6c1afcf419d56", size = 395833, upload-time = "2026-03-23T14:12:41.697Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e4/72/42e900510195b23a56bde950d26a51f8b723846bfcaa0286e90287f0422b/fastapi-0.135.1-py3-none-any.whl", hash = "sha256:46e2fc5745924b7c840f71ddd277382af29ce1cdb7d5eab5bf697e3fb9999c9e", size = 116999, upload-time = "2026-03-01T18:18:30.831Z" }, + { url = "https://files.pythonhosted.org/packages/8f/ea/18f6d0457f9efb2fc6fa594857f92810cadb03024975726db6546b3d6fcf/fastapi-0.135.2-py3-none-any.whl", hash = "sha256:0af0447d541867e8db2a6a25c23a8c4bd80e2394ac5529bd87501bbb9e240ca5", size = 117407, upload-time = "2026-03-23T14:12:43.284Z" }, ] [[package]] @@ -2311,7 +2311,7 @@ wheels = [ [[package]] name = "google-cloud-storage" -version = "3.10.0" +version = "3.10.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-api-core" }, @@ -2321,9 +2321,9 @@ dependencies = [ { name = "google-resumable-media" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7a/e3/747759eebc72e420c25903d6bc231d0ceb110b66ac7e6ee3f350417152cd/google_cloud_storage-3.10.0.tar.gz", hash = "sha256:1aeebf097c27d718d84077059a28d7e87f136f3700212215f1ceeae1d1c5d504", size = 17309829, upload-time = "2026-03-18T15:54:11.875Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/47/205eb8e9a1739b5345843e5a425775cbdc472cc38e7eda082ba5b8d02450/google_cloud_storage-3.10.1.tar.gz", hash = "sha256:97db9aa4460727982040edd2bd13ff3d5e2260b5331ad22895802da1fc2a5286", size = 17309950, upload-time = "2026-03-23T09:35:23.409Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/29/e2/d58442f4daee5babd9255cf492a1f3d114357164072f8339a22a3ad460a2/google_cloud_storage-3.10.0-py3-none-any.whl", hash = "sha256:0072e7783b201e45af78fd9779894cdb6bec2bf922ee932f3fcc16f8bce9b9a3", size = 324382, upload-time = "2026-03-18T15:54:10.091Z" }, + { url = "https://files.pythonhosted.org/packages/ad/ff/ca9ab2417fa913d75aae38bf40bf856bb2749a604b2e0f701b37cfcd23cc/google_cloud_storage-3.10.1-py3-none-any.whl", hash = "sha256:a72f656759b7b99bda700f901adcb3425a828d4a29f911bc26b3ea79c5b1217f", size = 324453, upload-time = "2026-03-23T09:35:21.368Z" }, ] [[package]] @@ -5745,16 +5745,16 @@ wheels = [ [[package]] name = "pytest-cov" -version = "7.0.0" +version = "7.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "coverage", extra = ["toml"] }, { name = "pluggy" }, { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b1/51/a849f96e117386044471c8ec2bd6cfebacda285da9525c9106aeb28da671/pytest_cov-7.1.0.tar.gz", hash = "sha256:30674f2b5f6351aa09702a9c8c364f6a01c27aae0c1366ae8016160d1efc56b2", size = 55592, upload-time = "2026-03-21T20:11:16.284Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, + { url = "https://files.pythonhosted.org/packages/9d/7a/d968e294073affff457b041c2be9868a40c1c71f4a35fcc1e45e5493067b/pytest_cov-7.1.0-py3-none-any.whl", hash = "sha256:a0461110b7865f9a271aa1b51e516c9a95de9d696734a2f71e3e78f46e1d4678", size = 22876, upload-time = "2026-03-21T20:11:14.438Z" }, ] [[package]] @@ -7208,6 +7208,7 @@ benchmarks = [ build = [ { name = "bump-my-version" }, { name = "hatch-mypyc" }, + { name = "mypy" }, { name = "pydantic-settings" }, ] dev = [ @@ -7419,6 +7420,7 @@ benchmarks = [ build = [ { name = "bump-my-version" }, { name = "hatch-mypyc" }, + { name = "mypy", specifier = ">=1.19.1" }, { name = "pydantic-settings" }, ] dev = [ @@ -7440,7 +7442,7 @@ dev = [ { name = "fsspec", extras = ["s3"] }, { name = "hatch-mypyc" }, { name = "jupyter-sphinx" }, - { name = "mypy", specifier = ">=1.13.0" }, + { name = "mypy", specifier = ">=1.19.1" }, { name = "myst-parser" }, { name = "nbsphinx" }, { name = "numpydoc" }, @@ -7525,7 +7527,7 @@ extras = [ ] lint = [ { name = "asyncpg-stubs" }, - { name = "mypy", specifier = ">=1.13.0" }, + { name = "mypy", specifier = ">=1.19.1" }, { name = "pandas-stubs" }, { name = "pre-commit", specifier = ">=3.5.0" }, { name = "pyarrow-stubs" }, @@ -7769,11 +7771,11 @@ wheels = [ [[package]] name = "types-docutils" -version = "0.22.3.20260316" +version = "0.22.3.20260322" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9f/27/a7f16b3a2fad0a4ddd85a668319f9a1d0311c4bd9578894f6471c7e6c788/types_docutils-0.22.3.20260316.tar.gz", hash = "sha256:8ef27d565b9831ff094fe2eac75337a74151013e2d21ecabd445c2955f891564", size = 57263, upload-time = "2026-03-16T04:29:12.211Z" } +sdist = { url = "https://files.pythonhosted.org/packages/44/bb/243a87fc1605a4a94c2c343d6dbddbf0d7ef7c0b9550f360b8cda8e82c39/types_docutils-0.22.3.20260322.tar.gz", hash = "sha256:e2450bb997283c3141ec5db3e436b91f0aa26efe35eb9165178ca976ccb4930b", size = 57311, upload-time = "2026-03-22T04:08:44.064Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/70/60/c1f22b7cfc4837d5419e5a2d8702c7d65f03343f866364b71cccd8a73b79/types_docutils-0.22.3.20260316-py3-none-any.whl", hash = "sha256:083c7091b8072c242998ec51da1bf1492f0332387da81c3b085efbf5ca754c7d", size = 91968, upload-time = "2026-03-16T04:29:11.114Z" }, + { url = "https://files.pythonhosted.org/packages/c6/4a/22c090cd4615a16917dff817cbe7c5956da376c961e024c241cd962d2c3d/types_docutils-0.22.3.20260322-py3-none-any.whl", hash = "sha256:681d4510ce9b80a0c6a593f0f9843d81f8caa786db7b39ba04d9fd5480ac4442", size = 91978, upload-time = "2026-03-22T04:08:43.117Z" }, ] [[package]]