diff --git a/DESCRIPTION.md b/DESCRIPTION.md index 36e3c5f49..6a415cf0f 100644 --- a/DESCRIPTION.md +++ b/DESCRIPTION.md @@ -11,7 +11,7 @@ Source code is also available at: https://github.com/snowflakedb/snowflake-conne - Added support for async I/O. Asynchronous version of connector is available via `snowflake.connector.aio` module. - Added `SnowflakeCursor.stats` property to expose granular DML statistics (rows inserted, deleted, updated, and duplicates) for operations like CTAS where `rowcount` is insufficient. - Added support for injecting SPCS service identifier token (`SPCS_TOKEN`) into login requests when present in SPCS containers. - + - Introduced shared library for extended telemetry to identify and prepare testing platform for native rust extensions. - v4.1.1(TBD) - Relaxed pandas dependency requirements for Python below 3.12. - Changed CRL cache cleanup background task to daemon to avoid blocking main thread. diff --git a/MANIFEST.in b/MANIFEST.in index e5701d149..bb568f2b6 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -13,6 +13,9 @@ exclude src/snowflake/connector/nanoarrow_cpp/ArrowIterator/nanoarrow_arrow_iter exclude src/snowflake/connector/nanoarrow_cpp/scripts/.clang-format exclude src/snowflake/connector/nanoarrow_cpp/scripts/format.sh +include src/snowflake/connector/minicore __init__.py +recursive-include src/snowflake/connector/minicore *.so *.dll *.dylib *.a *.h *.lib + exclude .git-blame-ignore-revs exclude .pre-commit-config.yaml exclude license_header.txt diff --git a/ci/build_linux.sh b/ci/build_linux.sh index f12717ec4..da90612de 100755 --- a/ci/build_linux.sh +++ b/ci/build_linux.sh @@ -4,7 +4,7 @@ # NOTES: # - This is designed to ONLY be called in our build docker image # - To compile only a specific version(s) pass in versions like: `./build_linux.sh "3.9 3.10"` -set -o pipefail +set -ox pipefail U_WIDTH=16 PYTHON_VERSIONS="${1:-3.9 3.10 3.11 3.12 3.13}" @@ -21,6 +21,39 @@ if [ -d "${DIST_DIR}" ]; then fi mkdir -p ${REPAIRED_DIR} +# Clean up unnecessary minicore directories for the current platform +# This ensures only relevant binary files are included in the wheel +MINICORE_DIR="${CONNECTOR_DIR}/src/snowflake/connector/minicore" +arch=$(uname -m) + +# Determine libc type (glibc or musl) +if ldd --version 2>&1 | grep -qi musl; then + libc_type="musl" +else + libc_type="glibc" +fi + +# Determine which directory to keep based on architecture and libc +if [[ $arch == "x86_64" ]]; then + keep_dir="linux_x86_64_${libc_type}" +elif [[ $arch == "aarch64" ]]; then + keep_dir="linux_aarch64_${libc_type}" +else + echo "[WARN] Unknown architecture: $arch, not cleaning minicore directories" + keep_dir="" +fi + +if [[ -n "$keep_dir" && -d "${MINICORE_DIR}" ]]; then + echo "[Info] Cleaning minicore directories, keeping only ${keep_dir}" + for dir in "${MINICORE_DIR}"/*/; do + dir_name=$(basename "$dir") + if [[ "$dir_name" != "$keep_dir" && "$dir_name" != "__pycache__" ]]; then + echo "[Info] Removing minicore/${dir_name}" + rm -rf "$dir" + fi + done +fi + # Necessary for cpython_path source /home/user/multibuild/manylinux_utils.sh @@ -39,6 +72,7 @@ for PYTHON_VERSION in ${PYTHON_VERSIONS}; do ${PYTHON} -m build --outdir ${BUILD_DIR} . # On Linux we should repair wheel(s) generated arch=$(uname -p) +auditwheel show ${BUILD_DIR}/*.whl if [[ $arch == x86_64 ]]; then auditwheel repair --plat manylinux2014_x86_64 ${BUILD_DIR}/*.whl -w ${REPAIRED_DIR} else diff --git a/ci/download_minicore.py b/ci/download_minicore.py new file mode 100755 index 000000000..a38f6e663 --- /dev/null +++ b/ci/download_minicore.py @@ -0,0 +1,246 @@ +#!/usr/bin/env python3 +""" +Download minicore binary for the current platform. +Designed to be used by cibuildwheel during wheel building. + +Usage: + python scripts/download_minicore.py [VERSION] + +Environment variables: + MINICORE_VERSION - Version to download (default: 0.0.1) + MINICORE_OUTPUT_DIR - Output directory (default: src/snowflake/connector/minicore) +""" + +from __future__ import annotations + +import os +import platform +import sys +import tarfile +import tempfile +from pathlib import Path +from urllib.error import HTTPError, URLError +from urllib.request import Request, urlopen + +# Configuration +BASE_URL = "https://sfc-repo.snowflakecomputing.com/minicore" +DEFAULT_VERSION = "0.0.1" + +# Target directory for minicore module (relative to repo root) +MINICORE_MODULE_PATH = Path("src/snowflake/connector/minicore") + + +def get_repo_root() -> Path: + """Get the repository root directory.""" + current = Path(__file__).resolve().parent + while current != current.parent: + if (current / "pyproject.toml").exists() or (current / "setup.py").exists(): + return current + current = current.parent + return Path(__file__).resolve().parent.parent + + +def detect_os() -> str: + """Detect the operating system.""" + system = platform.system().lower() + if system == "linux": + return "linux" + elif system == "darwin": + return "macos" + elif system == "windows": + return "windows" + elif system == "aix": + return "aix" + else: + return "unknown" + + +def detect_arch() -> str: + """Detect the CPU architecture.""" + machine = platform.machine().lower() + if machine in ("x86_64", "amd64"): + return "x86_64" + elif machine in ("aarch64", "arm64"): + return "aarch64" + elif machine in ("i686", "i386", "x86"): + return "i686" + elif machine == "ppc64": + return "ppc64" + else: + return "unknown" + + +def detect_libc() -> str: + """Detect libc type on Linux (glibc vs musl).""" + if detect_os() != "linux": + return "" + + # Check if we're on Alpine/musl + if Path("/etc/alpine-release").exists(): + return "musl" + + # Check for musl by looking at the libc library + try: + import subprocess + + result = subprocess.run( + ["ldd", "--version"], + capture_output=True, + text=True, + ) + if "musl" in result.stdout.lower() or "musl" in result.stderr.lower(): + return "musl" + except Exception: + pass + + # Default to glibc + return "glibc" + + +def get_platform_dir(os_name: str, arch: str) -> str: + """Build platform directory name for URL.""" + if os_name == "linux": + return f"linux_{arch}" + elif os_name == "macos": + return f"mac_{arch}" + elif os_name == "windows": + return f"windows_{arch}" + elif os_name == "aix": + return f"aix_{arch}" + else: + return "" + + +def get_filename_arch(os_name: str, arch: str, libc: str) -> str: + """Build filename architecture component.""" + if os_name == "linux": + return f"linux-{arch}-{libc}" + elif os_name == "macos": + return f"macos-{arch}" + elif os_name == "windows": + return f"windows-{arch}" + elif os_name == "aix": + return f"aix-{arch}" + else: + return "" + + +def build_download_url(platform_dir: str, filename_arch: str, version: str) -> str: + """Build the download URL.""" + filename = f"sf_mini_core_{filename_arch}_{version}.tar.gz" + return f"{BASE_URL}/{platform_dir}/{version}/{filename}" + + +def download_file(url: str, dest_path: Path) -> None: + """Download a file from URL to destination path.""" + print(f"Downloading: {url}") + request = Request(url, headers={"User-Agent": "Python/minicore-downloader"}) + try: + with urlopen(request, timeout=60) as response: + content = response.read() + dest_path.write_bytes(content) + file_size_mb = len(content) / (1024 * 1024) + print(f"Downloaded {file_size_mb:.2f} MB") + except HTTPError as e: + print(f"HTTP Error {e.code}: {e.reason}", file=sys.stderr) + raise + except URLError as e: + print(f"URL Error: {e.reason}", file=sys.stderr) + raise + + +def extract_tar_gz(tar_path: Path, extract_to: Path) -> None: + """Extract a tar.gz file to the specified directory.""" + print(f"Extracting to: {extract_to}") + extract_to.mkdir(parents=True, exist_ok=True) + + with tarfile.open(tar_path, "r:gz") as tar: + # Security check: prevent path traversal attacks + for member in tar.getmembers(): + member_path = extract_to / member.name + try: + member_path.resolve().relative_to(extract_to.resolve()) + except ValueError: + print( + f"Skipping potentially unsafe path: {member.name}", file=sys.stderr + ) + continue + + # The 'filter' parameter was added in Python 3.12 + if sys.version_info >= (3, 12): + tar.extractall(path=extract_to, filter="data") + else: + tar.extractall(path=extract_to) + + +def main() -> int: + # Get version from environment or command line + version = os.environ.get("MINICORE_VERSION") + if not version and len(sys.argv) > 1: + version = sys.argv[1] + if not version: + version = DEFAULT_VERSION + + # Get output directory + output_dir_env = os.environ.get("MINICORE_OUTPUT_DIR") + if output_dir_env: + output_dir = Path(output_dir_env) + else: + repo_root = get_repo_root() + output_dir = repo_root / MINICORE_MODULE_PATH + + # Detect platform + os_name = detect_os() + arch = detect_arch() + libc = detect_libc() + + print(f"Detected OS: {os_name}") + print(f"Detected architecture: {arch}") + if libc: + print(f"Detected libc: {libc}") + + if os_name == "unknown" or arch == "unknown": + print( + f"Error: Unsupported platform: OS={os_name}, ARCH={arch}", file=sys.stderr + ) + return 1 + + # Build URL components + platform_dir = get_platform_dir(os_name, arch) + filename_arch = get_filename_arch(os_name, arch, libc) + + if not platform_dir or not filename_arch: + print( + "Error: Could not determine platform/architecture mapping", file=sys.stderr + ) + return 1 + + url = build_download_url(platform_dir, filename_arch, version) + + print(f"Version: {version}") + print(f"Download URL: {url}") + print(f"Output directory: {output_dir}") + + # Download to temp file and extract + with tempfile.TemporaryDirectory() as temp_dir: + temp_path = Path(temp_dir) / f"sf_mini_core_{filename_arch}_{version}.tar.gz" + + try: + download_file(url, temp_path) + extract_tar_gz(temp_path, output_dir) + except Exception as e: + print(f"Error: {e}", file=sys.stderr) + return 1 + + print("Done!") + + # List extracted files + for item in sorted(output_dir.iterdir()): + if not item.name.startswith("__"): + print(f" {item.name}") + + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/pyproject.toml b/pyproject.toml index 0a23b0a3f..480027eee 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,6 +15,8 @@ build-verbosity = 1 [tool.cibuildwheel.linux] archs = ["x86_64", "aarch64"] +# Exclude pre-built minicore libraries from auditwheel repair +repair-wheel-command = "" [tool.cibuildwheel.macos] archs = ["x86_64", "arm64"] @@ -23,3 +25,6 @@ repair-wheel-command = "" [tool.cibuildwheel.windows] archs = ["AMD64"] + +[tool.check-manifest] +ignore-bad-ideas = ["src/snowflake/connector/minicore/**"] diff --git a/setup.cfg b/setup.cfg index 142120c24..9fc840419 100644 --- a/setup.cfg +++ b/setup.cfg @@ -72,6 +72,13 @@ where = src exclude = snowflake.connector.cpp* include = snowflake.* +[options.package_data] +snowflake.connector.minicore = + *.so + *.dll + *.dylib + *.a + [options.entry_points] console_scripts = snowflake-dump-ocsp-response = snowflake.connector.tool.dump_ocsp_response:main diff --git a/src/snowflake/connector/__init__.py b/src/snowflake/connector/__init__.py index 41b5288ac..e23ee0b12 100644 --- a/src/snowflake/connector/__init__.py +++ b/src/snowflake/connector/__init__.py @@ -5,6 +5,8 @@ from functools import wraps +from ._utils import _core_loader + apilevel = "2.0" threadsafety = 2 paramstyle = "pyformat" @@ -45,6 +47,14 @@ from .log_configuration import EasyLoggingConfigPython from .version import VERSION +# Load the core library - failures are captured in core_loader and don't prevent module loading +try: + _core_loader.load() +except Exception: + # Silently continue if core loading fails - the error is already captured in core_loader + # This ensures the connector module loads even if the minicore library is unavailable + pass + logging.getLogger(__name__).addHandler(NullHandler()) setup_external_libraries() diff --git a/src/snowflake/connector/_utils.py b/src/snowflake/connector/_utils.py index 4d45e1914..cc07935d7 100644 --- a/src/snowflake/connector/_utils.py +++ b/src/snowflake/connector/_utils.py @@ -1,14 +1,21 @@ from __future__ import annotations +import ctypes +import importlib import logging import os +import platform import string +import threading from enum import Enum from inspect import stack +from pathlib import Path from random import choice from threading import Timer from uuid import UUID +from snowflake.connector.description import ISA, OPERATING_SYSTEM, OS_VERSION + logger = logging.getLogger(__name__) @@ -127,3 +134,179 @@ def get_spcs_token() -> str | None: except Exception as exc: # pragma: no cover - best-effort logging only logger.debug("Failed to read SPCS token from %s: %s", path, exc) return None + + +class _CoreLoader: + def __init__(self): + self._version: bytes | None = None + self._error: Exception | None = None + self._path: str | None = None + + @staticmethod + def _detect_os() -> str: + """Detect the operating system.""" + system = platform.system().lower() + if system == "linux": + return "linux" + elif system == "darwin": + return "macos" + elif system == "windows": + return "windows" + elif system == "aix": + return "aix" + else: + return "unknown" + + @staticmethod + def _detect_arch() -> str: + """Detect the CPU architecture.""" + machine = platform.machine().lower() + if machine in ("x86_64", "amd64"): + return "x86_64" + elif machine in ("aarch64", "arm64"): + return "aarch64" + elif machine in ("i686", "i386", "x86"): + return "i686" + elif machine == "ppc64": + return "ppc64" + else: + return "unknown" + + @staticmethod + def _detect_libc() -> str: + """Detect libc type on Linux (glibc vs musl).""" + # Check if we're on Alpine/musl + if Path("/etc/alpine-release").exists(): + return "musl" + + # Check for musl by looking at the libc library + try: + import subprocess + + result = subprocess.run( + ["ldd", "--version"], + capture_output=True, + text=True, + ) + if "musl" in result.stdout.lower() or "musl" in result.stderr.lower(): + return "musl" + except Exception: + pass + + # Default to glibc + return "glibc" + + @staticmethod + def _get_platform_subdir() -> str: + """Get the platform-specific subdirectory name.""" + os_name = _CoreLoader._detect_os() + arch = _CoreLoader._detect_arch() + + if os_name == "linux": + libc = _CoreLoader._detect_libc() + return f"linux_{arch}_{libc}" + elif os_name == "macos": + return f"macos_{arch}" + elif os_name == "windows": + return f"windows_{arch}" + elif os_name == "aix": + return f"aix_{arch}" + + raise OSError(f"Mini core binary for {os_name} {arch} not found") + + @staticmethod + def _get_lib_name() -> str: + """Get the library filename for the current platform.""" + os_name = _CoreLoader._detect_os() + if os_name == "windows": + return "sf_mini_core.dll" + elif os_name == "macos": + return "libsf_mini_core.dylib" + elif os_name == "aix": + return "libsf_mini_core.a" + else: + # Linux and other Unix-like systems + return "libsf_mini_core.so" + + @staticmethod + def _get_core_path() -> Path: + """Get the path to the minicore library for the current platform.""" + subdir = _CoreLoader._get_platform_subdir() + lib_name = _CoreLoader._get_lib_name() + + files = importlib.resources.files("snowflake.connector.minicore") + + return files.joinpath(subdir, lib_name) + + @staticmethod + def _register_functions(core: ctypes.CDLL): + core.sf_core_full_version.argtypes = [] + core.sf_core_full_version.restype = ctypes.c_char_p + + @staticmethod + def _load_minicore(path: str) -> ctypes.CDLL: + # This context manager is the safe way to get a + # file path from importlib.resources. It handles cases + # where the file is inside a zip and needs to be extracted + # to a temporary location. + with importlib.resources.as_file(path) as lib_path: + core = ctypes.CDLL(str(lib_path)) + return core + + def _is_core_disabled(self) -> bool: + value = str(os.getenv("SNOWFLAKE_DISABLE_MINICORE", None)).lower() + return value in ["1", "true"] + + def _load(self) -> None: + try: + path = self._get_core_path() + core = self._load_minicore(path) + self._register_functions(core) + self._version = core.sf_core_full_version() + self._error = None + self._path = str(path) + except Exception as err: + self._error = err + + def load(self): + """Spawn a separate thread to load the minicore library (non-blocking).""" + if self._is_core_disabled(): + self._error = "mini-core-disabled" + return + self._error = "still-loading" + thread = threading.Thread(target=self._load, daemon=True) + thread.start() + + def get_load_error(self) -> str: + return str(self._error) + + def get_core_version(self) -> str | None: + if self._version: + try: + return self._version.decode("utf-8") + except Exception: + pass + return None + + def get_file_name(self) -> str: + return self._path + + +_core_loader = _CoreLoader() + + +def build_minicore_usage_for_session() -> dict[str, str | None]: + return { + "ISA": ISA, + "CORE_VERSION": _core_loader.get_core_version(), + "CORE_FILE_NAME": _core_loader.get_file_name(), + } + + +def build_minicore_usage_for_telemetry() -> dict[str, str | None]: + return { + "OS": OPERATING_SYSTEM, + "OS_VERSION": OS_VERSION, + "CORE_LOAD_ERROR": _core_loader.get_load_error(), + **build_minicore_usage_for_session(), + } diff --git a/src/snowflake/connector/auth/_auth.py b/src/snowflake/connector/auth/_auth.py index 941e293d3..f6ac8d5dc 100644 --- a/src/snowflake/connector/auth/_auth.py +++ b/src/snowflake/connector/auth/_auth.py @@ -17,7 +17,11 @@ load_pem_private_key, ) -from .._utils import get_application_path, get_spcs_token +from .._utils import ( + build_minicore_usage_for_session, + get_application_path, + get_spcs_token, +) from ..compat import urlencode from ..constants import ( DAY_IN_SECONDS, @@ -155,6 +159,7 @@ def base_auth_data( platform_detection_timeout_seconds=platform_detection_timeout_seconds, session_manager=session_manager.clone(max_retries=0), ), + **build_minicore_usage_for_session(), }, }, } diff --git a/src/snowflake/connector/connection.py b/src/snowflake/connector/connection.py index 0a263adae..533379ddb 100644 --- a/src/snowflake/connector/connection.py +++ b/src/snowflake/connector/connection.py @@ -42,6 +42,7 @@ from ._utils import ( _DEFAULT_VALUE_SERVER_DOP_CAP_FOR_FILE_TRANSFER, _VARIABLE_NAME_SERVER_DOP_CAP_FOR_FILE_TRANSFER, + build_minicore_usage_for_telemetry, ) from .auth import ( FIRST_PARTY_AUTHENTICATORS, @@ -681,6 +682,7 @@ def __init__( # get the imported modules from sys.modules self._log_telemetry_imported_packages() + self._log_minicore_import() # check SNOW-1218851 for long term improvement plan to refactor ocsp code atexit.register(self._close_at_exit) @@ -2506,6 +2508,28 @@ def async_query_check_helper( return not found_unfinished_query + def _log_minicore_import(self): + """ + OS - meaningful value like Windows, Linux, Darwin/MacOS etc. + OS_VERSION - meaningful version like kernel version for Linux/Darwin, etc. that your language provides + ISA - instruction set architecture, like amd64/arm64. + CORE_VERSION - result of sf_core_full_version call, if finished successfully. + CORE_FILE_NAME - a string representing binary name that driver tried to load + CORE_LOAD_ERROR - flag or error type included if there are any errors (can’t write a library to disk, can’t load it, can’t find symbols, etc). + :return: + """ + ts = get_time_millis() + self._log_telemetry( + TelemetryData.from_telemetry_data_dict( + from_dict={ + TelemetryField.KEY_TYPE.value: TelemetryField.CORE_IMPORT.value, + TelemetryField.KEY_VALUE.value: build_minicore_usage_for_telemetry(), + }, + timestamp=ts, + connection=self, + ) + ) + def _log_telemetry_imported_packages(self) -> None: if self._log_imported_packages_in_telemetry: # filter out duplicates caused by submodules diff --git a/src/snowflake/connector/description.py b/src/snowflake/connector/description.py index a45250e78..bb2bd167d 100644 --- a/src/snowflake/connector/description.py +++ b/src/snowflake/connector/description.py @@ -12,6 +12,8 @@ PYTHON_VERSION = ".".join(str(v) for v in sys.version_info[:3]) OPERATING_SYSTEM = platform.system() PLATFORM = platform.platform() +OS_VERSION = platform.version() +ISA = platform.machine() IMPLEMENTATION = platform.python_implementation() COMPILER = platform.python_compiler() diff --git a/src/snowflake/connector/minicore/__init__.py b/src/snowflake/connector/minicore/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/snowflake/connector/minicore/aix_ppc64/libsf_mini_core.a b/src/snowflake/connector/minicore/aix_ppc64/libsf_mini_core.a new file mode 100644 index 000000000..330eb9566 Binary files /dev/null and b/src/snowflake/connector/minicore/aix_ppc64/libsf_mini_core.a differ diff --git a/src/snowflake/connector/minicore/aix_ppc64/libsf_mini_core_static.a b/src/snowflake/connector/minicore/aix_ppc64/libsf_mini_core_static.a new file mode 100644 index 000000000..1401fcc4e Binary files /dev/null and b/src/snowflake/connector/minicore/aix_ppc64/libsf_mini_core_static.a differ diff --git a/src/snowflake/connector/minicore/linux_aarch64_glibc/libsf_mini_core.so b/src/snowflake/connector/minicore/linux_aarch64_glibc/libsf_mini_core.so new file mode 100755 index 000000000..8dee6c7cb Binary files /dev/null and b/src/snowflake/connector/minicore/linux_aarch64_glibc/libsf_mini_core.so differ diff --git a/src/snowflake/connector/minicore/linux_aarch64_musl/libsf_mini_core.so b/src/snowflake/connector/minicore/linux_aarch64_musl/libsf_mini_core.so new file mode 100755 index 000000000..00fa69df0 Binary files /dev/null and b/src/snowflake/connector/minicore/linux_aarch64_musl/libsf_mini_core.so differ diff --git a/src/snowflake/connector/minicore/linux_x86_64_glibc/libsf_mini_core.so b/src/snowflake/connector/minicore/linux_x86_64_glibc/libsf_mini_core.so new file mode 100755 index 000000000..e3d472aee Binary files /dev/null and b/src/snowflake/connector/minicore/linux_x86_64_glibc/libsf_mini_core.so differ diff --git a/src/snowflake/connector/minicore/linux_x86_64_musl/libsf_mini_core.so b/src/snowflake/connector/minicore/linux_x86_64_musl/libsf_mini_core.so new file mode 100755 index 000000000..a8bd151de Binary files /dev/null and b/src/snowflake/connector/minicore/linux_x86_64_musl/libsf_mini_core.so differ diff --git a/src/snowflake/connector/minicore/macos_aarch64/libsf_mini_core.dylib b/src/snowflake/connector/minicore/macos_aarch64/libsf_mini_core.dylib new file mode 100755 index 000000000..6ceca5c51 Binary files /dev/null and b/src/snowflake/connector/minicore/macos_aarch64/libsf_mini_core.dylib differ diff --git a/src/snowflake/connector/minicore/macos_x86_64/libsf_mini_core.dylib b/src/snowflake/connector/minicore/macos_x86_64/libsf_mini_core.dylib new file mode 100755 index 000000000..04472b04f Binary files /dev/null and b/src/snowflake/connector/minicore/macos_x86_64/libsf_mini_core.dylib differ diff --git a/src/snowflake/connector/minicore/windows_x86_64/sf_mini_core.dll b/src/snowflake/connector/minicore/windows_x86_64/sf_mini_core.dll new file mode 100644 index 000000000..3b1c490ce Binary files /dev/null and b/src/snowflake/connector/minicore/windows_x86_64/sf_mini_core.dll differ diff --git a/src/snowflake/connector/minicore/windows_x86_64/sf_mini_core_static.lib b/src/snowflake/connector/minicore/windows_x86_64/sf_mini_core_static.lib new file mode 100644 index 000000000..6ec77cb0b Binary files /dev/null and b/src/snowflake/connector/minicore/windows_x86_64/sf_mini_core_static.lib differ diff --git a/src/snowflake/connector/telemetry.py b/src/snowflake/connector/telemetry.py index 37edd3fd4..380e5c6de 100644 --- a/src/snowflake/connector/telemetry.py +++ b/src/snowflake/connector/telemetry.py @@ -39,6 +39,8 @@ class TelemetryField(Enum): PANDAS_WRITE = "client_write_pandas" # imported packages along with client IMPORTED_PACKAGES = "client_imported_packages" + # Core import + CORE_IMPORT = "mini_core_import" # multi-statement usage MULTI_STATEMENT = "client_multi_statement_query" # Keys for telemetry data sent through either in-band or out-of-band telemetry diff --git a/test/integ/test_connection.py b/test/integ/test_connection.py index 7e61272a8..48f0700e4 100644 --- a/test/integ/test_connection.py +++ b/test/integ/test_connection.py @@ -19,6 +19,7 @@ import snowflake.connector from snowflake.connector import DatabaseError, OperationalError, ProgrammingError +from snowflake.connector._utils import _core_loader from snowflake.connector.compat import IS_WINDOWS from snowflake.connector.connection import ( DEFAULT_CLIENT_PREFETCH_THREADS, @@ -26,7 +27,12 @@ ) from snowflake.connector.constants import PARAMETER_MULTI_STATEMENT_COUNT from snowflake.connector.cursor import QueryResultStats -from snowflake.connector.description import CLIENT_NAME +from snowflake.connector.description import ( + CLIENT_NAME, + ISA, + OPERATING_SYSTEM, + OS_VERSION, +) from snowflake.connector.errorcode import ( ER_CONNECTION_IS_CLOSED, ER_FAILED_PROCESSING_PYFORMAT, @@ -1232,6 +1238,38 @@ def check_packages(message: str, expected_packages: list[str]) -> bool: assert len(telemetry_test.records) == 0 +@pytest.mark.skipolddriver +def test_minicore_import_telemetry(conn_cnx, capture_sf_telemetry): + """Test that minicore import telemetry is logged with all required fields and correct values.""" + with ( + conn_cnx() as conn, + capture_sf_telemetry.patch_connection(conn, False) as telemetry_test, + ): + conn._log_minicore_import() + assert len(telemetry_test.records) > 0 + # Check that the telemetry record contains the proper structure + found_minicore_telemetry = False + for t in telemetry_test.records: + if ( + t.message.get(TelemetryField.KEY_TYPE.value) + == TelemetryField.CORE_IMPORT.value + and TelemetryField.KEY_VALUE.value in t.message + ): + found_minicore_telemetry = True + # Verify that the value contains all required fields with correct values + value = t.message[TelemetryField.KEY_VALUE.value] + assert value["OS"] == OPERATING_SYSTEM + assert value["OS_VERSION"] == OS_VERSION + assert value["ISA"] == ISA + assert value["CORE_VERSION"] == _core_loader.get_core_version() + assert value["CORE_FILE_NAME"] == _core_loader.get_file_name() + assert value["CORE_LOAD_ERROR"] == _core_loader.get_load_error() + break + assert ( + found_minicore_telemetry + ), "Minicore telemetry not found in telemetry records" + + @pytest.mark.skipolddriver def test_disable_query_context_cache(conn_cnx) -> None: with conn_cnx(disable_query_context_cache=True) as conn: @@ -1749,8 +1787,8 @@ def test_disable_telemetry(conn_cnx, caplog): with conn.cursor() as cur: cur.execute("select 1").fetchall() assert ( - len(conn._telemetry._log_batch) == 3 - ) # 3 events are import package, fetch first, fetch last + len(conn._telemetry._log_batch) == 4 + ) # 4 events are import package, minicore import, fetch first, fetch last assert "POST /telemetry/send" in caplog.text caplog.clear() @@ -1773,7 +1811,7 @@ def test_disable_telemetry(conn_cnx, caplog): # test disable telemetry in the client with caplog.at_level(logging.DEBUG): with conn_cnx() as conn: - assert conn.telemetry_enabled and len(conn._telemetry._log_batch) == 1 + assert conn.telemetry_enabled and len(conn._telemetry._log_batch) == 2 conn.telemetry_enabled = False with conn.cursor() as cur: cur.execute("select 1").fetchall() diff --git a/test/unit/test_util.py b/test/unit/test_util.py index b2862f466..2a5ca8664 100644 --- a/test/unit/test_util.py +++ b/test/unit/test_util.py @@ -1,9 +1,17 @@ +import ctypes +import os +from importlib import reload +from time import sleep +from unittest import mock + import pytest -try: - from snowflake.connector._utils import _TrackedQueryCancellationTimer -except ImportError: - pass +from snowflake.connector._utils import ( + _CoreLoader, + _TrackedQueryCancellationTimer, + build_minicore_usage_for_session, + build_minicore_usage_for_telemetry, +) pytestmark = pytest.mark.skipolddriver @@ -18,3 +26,621 @@ def test_timer(): timer.start() timer.cancel() assert not timer.executed + + +class TestCoreLoader: + """Tests for the _CoreLoader class.""" + + def test_e2e(self): + loader = _CoreLoader() + loader.load() + # Sleep a moment to make sure the lib is loaded + sleep(2) + assert loader.get_load_error() == str(None) + assert loader.get_core_version() == "0.0.1" + + def test_core_loader_initialization(self): + """Test that _CoreLoader initializes with None values.""" + loader = _CoreLoader() + assert loader._version is None + assert loader._error is None + assert loader._path is None + + @pytest.mark.parametrize( + "system,expected", + [ + ("Linux", "linux"), + ("Darwin", "macos"), + ("Windows", "windows"), + ("AIX", "aix"), + ("FreeBSD", "unknown"), + ], + ) + def test_detect_os(self, system, expected): + """Test _detect_os returns correct OS identifier.""" + with mock.patch("platform.system", return_value=system): + assert _CoreLoader._detect_os() == expected + + @pytest.mark.parametrize( + "machine,expected", + [ + ("x86_64", "x86_64"), + ("AMD64", "x86_64"), + ("aarch64", "aarch64"), + ("arm64", "aarch64"), + ("i686", "i686"), + ("i386", "i686"), + ("ppc64", "ppc64"), + ("sparc", "unknown"), + ], + ) + def test_detect_arch(self, machine, expected): + """Test _detect_arch returns correct architecture identifier.""" + with mock.patch("platform.machine", return_value=machine): + assert _CoreLoader._detect_arch() == expected + + def test_detect_libc_alpine(self, tmp_path): + """Test _detect_libc returns musl on Alpine Linux.""" + with mock.patch("pathlib.Path.exists", return_value=True): + assert _CoreLoader._detect_libc() == "musl" + + def test_detect_libc_glibc_default(self): + """Test _detect_libc returns glibc by default.""" + with mock.patch("pathlib.Path.exists", return_value=False): + with mock.patch("subprocess.run", side_effect=Exception("not found")): + assert _CoreLoader._detect_libc() == "glibc" + + @pytest.mark.parametrize( + "os_name,arch,libc,expected_subdir", + [ + ("linux", "x86_64", "glibc", "linux_x86_64_glibc"), + ("linux", "x86_64", "musl", "linux_x86_64_musl"), + ("linux", "aarch64", "glibc", "linux_aarch64_glibc"), + ("macos", "x86_64", "", "macos_x86_64"), + ("macos", "aarch64", "", "macos_aarch64"), + ("windows", "x86_64", "", "windows_x86_64"), + ("aix", "ppc64", "", "aix_ppc64"), + ], + ) + def test_get_platform_subdir(self, os_name, arch, libc, expected_subdir): + """Test _get_platform_subdir returns correct subdirectory.""" + with mock.patch.object(_CoreLoader, "_detect_os", return_value=os_name): + with mock.patch.object(_CoreLoader, "_detect_arch", return_value=arch): + with mock.patch.object(_CoreLoader, "_detect_libc", return_value=libc): + assert _CoreLoader._get_platform_subdir() == expected_subdir + + def test_get_platform_subdir_unsupported_os(self): + """Test _get_platform_subdir raises OSError for unsupported OS.""" + with mock.patch.object(_CoreLoader, "_detect_os", return_value="unknown"): + with mock.patch.object(_CoreLoader, "_detect_arch", return_value="x86_64"): + with pytest.raises( + OSError, match="Mini core binary for unknown x86_64 not found" + ): + _CoreLoader._get_platform_subdir() + + @pytest.mark.parametrize( + "os_name,expected_lib", + [ + ("windows", "sf_mini_core.dll"), + ("macos", "libsf_mini_core.dylib"), + ("aix", "libsf_mini_core.a"), + ("linux", "libsf_mini_core.so"), + ], + ) + def test_get_lib_name(self, os_name, expected_lib): + """Test _get_lib_name returns correct library filename.""" + with mock.patch.object(_CoreLoader, "_detect_os", return_value=os_name): + assert _CoreLoader._get_lib_name() == expected_lib + + def test_get_core_path_windows(self): + """Test _get_core_path returns correct path for Windows.""" + with mock.patch.object(_CoreLoader, "_detect_os", return_value="windows"): + with mock.patch.object(_CoreLoader, "_detect_arch", return_value="x86_64"): + with mock.patch("importlib.resources.files") as mock_files: + mock_files_obj = mock.MagicMock() + mock_files.return_value = mock_files_obj + + _CoreLoader._get_core_path() + + mock_files.assert_called_once_with("snowflake.connector.minicore") + mock_files_obj.joinpath.assert_called_once_with( + "windows_x86_64", "sf_mini_core.dll" + ) + + def test_get_core_path_darwin(self): + """Test _get_core_path returns correct path for macOS.""" + with mock.patch.object(_CoreLoader, "_detect_os", return_value="macos"): + with mock.patch.object(_CoreLoader, "_detect_arch", return_value="aarch64"): + with mock.patch("importlib.resources.files") as mock_files: + mock_files_obj = mock.MagicMock() + mock_files.return_value = mock_files_obj + + _CoreLoader._get_core_path() + + mock_files.assert_called_once_with("snowflake.connector.minicore") + mock_files_obj.joinpath.assert_called_once_with( + "macos_aarch64", "libsf_mini_core.dylib" + ) + + def test_get_core_path_linux(self): + """Test _get_core_path returns correct path for Linux.""" + with mock.patch.object(_CoreLoader, "_detect_os", return_value="linux"): + with mock.patch.object(_CoreLoader, "_detect_arch", return_value="x86_64"): + with mock.patch.object( + _CoreLoader, "_detect_libc", return_value="glibc" + ): + with mock.patch("importlib.resources.files") as mock_files: + mock_files_obj = mock.MagicMock() + mock_files.return_value = mock_files_obj + + _CoreLoader._get_core_path() + + mock_files.assert_called_once_with( + "snowflake.connector.minicore" + ) + mock_files_obj.joinpath.assert_called_once_with( + "linux_x86_64_glibc", "libsf_mini_core.so" + ) + + def test_register_functions(self): + """Test that _register_functions sets up the C library functions correctly.""" + mock_core = mock.MagicMock() + mock_core.sf_core_full_version = mock.MagicMock() + + _CoreLoader._register_functions(mock_core) + + # Verify the function signature was configured + assert mock_core.sf_core_full_version.argtypes == [] + assert mock_core.sf_core_full_version.restype == ctypes.c_char_p + + def test_load_minicore(self): + """Test that _load_minicore loads the library.""" + mock_path = mock.MagicMock() + mock_lib_path = "/path/to/libsf_mini_core.so" + + with mock.patch("importlib.resources.as_file") as mock_as_file: + with mock.patch("ctypes.CDLL") as mock_cdll: + # Setup the context manager + mock_as_file.return_value.__enter__ = mock.Mock( + return_value=mock_lib_path + ) + mock_as_file.return_value.__exit__ = mock.Mock(return_value=False) + + mock_core = mock.MagicMock() + mock_cdll.return_value = mock_core + + result = _CoreLoader._load_minicore(mock_path) + + mock_as_file.assert_called_once_with(mock_path) + mock_cdll.assert_called_once_with(str(mock_lib_path)) + assert result == mock_core + + @pytest.mark.parametrize("env_value", ["1", "true", "True", "TRUE"]) + def test_is_core_disabled_returns_true(self, env_value): + """Test that _is_core_disabled returns True when env var is '1' or 'true' (case-insensitive).""" + loader = _CoreLoader() + with mock.patch.dict(os.environ, {"SNOWFLAKE_DISABLE_MINICORE": env_value}): + assert loader._is_core_disabled() is True + + @pytest.mark.parametrize("env_value", ["0", "false", "False", "no", "other", ""]) + def test_is_core_disabled_returns_false(self, env_value): + """Test that _is_core_disabled returns False for other values.""" + loader = _CoreLoader() + with mock.patch.dict(os.environ, {"SNOWFLAKE_DISABLE_MINICORE": env_value}): + assert loader._is_core_disabled() is False + + def test_is_core_disabled_returns_false_when_not_set(self): + """Test that _is_core_disabled returns False when env var is not set.""" + loader = _CoreLoader() + with mock.patch.dict(os.environ, {}, clear=True): + # Ensure the env var is not set + os.environ.pop("SNOWFLAKE_DISABLE_MINICORE", None) + assert loader._is_core_disabled() is False + + def test_load_skips_loading_when_core_disabled(self): + """Test that load() returns early when core is disabled.""" + loader = _CoreLoader() + + with mock.patch.dict(os.environ, {"SNOWFLAKE_DISABLE_MINICORE": "1"}): + with mock.patch.object(loader, "_get_core_path") as mock_get_path: + loader.load() + sleep(2) + + # Verify that _get_core_path was never called (loading was skipped) + mock_get_path.assert_not_called() + # Verify the error message is set correctly + assert loader._error == "mini-core-disabled" + assert loader._version is None + + def test_load_success(self): + """Test successful load of the core library.""" + loader = _CoreLoader() + mock_path = mock.MagicMock() + mock_core = mock.MagicMock() + mock_version = b"1.2.3" + mock_core.sf_core_full_version = mock.MagicMock(return_value=mock_version) + + with mock.patch.object(loader, "_is_core_disabled", return_value=False): + with mock.patch.object( + loader, "_get_core_path", return_value=mock_path + ) as mock_get_path: + with mock.patch.object( + loader, "_load_minicore", return_value=mock_core + ) as mock_load: + with mock.patch.object( + loader, "_register_functions" + ) as mock_register: + loader.load() + sleep(2) + + mock_get_path.assert_called_once() + mock_load.assert_called_once_with(mock_path) + mock_register.assert_called_once_with(mock_core) + assert loader._version == mock_version + assert loader._error is None + assert loader._path == str(mock_path) + + def test_load_failure(self): + """Test that load captures exceptions.""" + loader = _CoreLoader() + test_error = Exception("Test error loading core") + + with mock.patch.object(loader, "_is_core_disabled", return_value=False): + with mock.patch.object( + loader, "_get_core_path", side_effect=test_error + ) as mock_get_path: + loader.load() + sleep(2) + + mock_get_path.assert_called_once() + assert loader._version is None + assert loader._error == test_error + assert loader._path is None + + def test_get_load_error_with_error(self): + """Test get_load_error returns error message when error exists.""" + loader = _CoreLoader() + test_error = Exception("Test error message") + loader._error = test_error + + result = loader.get_load_error() + + assert result == "Test error message" + + def test_get_load_error_no_error(self): + """Test get_load_error returns 'None' string when no error exists.""" + loader = _CoreLoader() + + result = loader.get_load_error() + + assert result == "None" + + def test_get_core_version_with_version(self): + """Test get_core_version returns decoded version string.""" + loader = _CoreLoader() + loader._version = b"1.2.3-beta" + + result = loader.get_core_version() + + assert result == "1.2.3-beta" + + def test_get_core_version_no_version(self): + """Test get_core_version returns None when no version exists.""" + loader = _CoreLoader() + + result = loader.get_core_version() + + assert result is None + + def test_get_file_name_with_path(self): + """Test get_file_name returns the path string after successful load.""" + loader = _CoreLoader() + loader._path = "/path/to/libsf_mini_core.so" + + result = loader.get_file_name() + + assert result == "/path/to/libsf_mini_core.so" + + def test_get_file_name_no_path(self): + """Test get_file_name returns None when no path exists.""" + loader = _CoreLoader() + + result = loader.get_file_name() + + assert result is None + + +def test_importing_snowflake_connector_triggers_core_loader_load(): + """Test that importing snowflake.connector triggers core_loader.load().""" + # We need to test that when snowflake.connector is imported, + # core_loader.load() is called. Since snowflake.connector is already imported, + # we need to reload it and mock the load method. + + with mock.patch("snowflake.connector._utils._core_loader.load") as mock_load: + # Reload the connector module to trigger the __init__.py code again + import snowflake.connector + + reload(snowflake.connector) + + # Verify that load was called during import + mock_load.assert_called_once() + + +def test_snowflake_connector_loads_when_core_loader_fails(): + """Test that snowflake.connector loads successfully even if core_loader.load() fails.""" + # Mock core_loader.load() to raise an exception + with mock.patch( + "snowflake.connector._utils._core_loader.load", + side_effect=Exception("Simulated core loading failure"), + ): + import snowflake.connector + + # Reload the connector module - this should NOT raise an exception + try: + reload(snowflake.connector) + # If we reach here, the module loaded successfully despite core_loader.load() failing + assert True + except Exception as e: + pytest.fail( + f"snowflake.connector failed to load when core_loader.load() raised an exception: {e}" + ) + + # Verify the module has expected attributes + assert hasattr(snowflake.connector, "connect") + assert hasattr(snowflake.connector, "SnowflakeConnection") + assert hasattr(snowflake.connector, "Connect") + + +def test_snowflake_connector_usable_when_core_loader_fails(): + """Test that snowflake.connector remains usable even if core_loader.load() fails.""" + # Mock core_loader.load() to raise an exception + with mock.patch( + "snowflake.connector._utils._core_loader.load", + side_effect=RuntimeError("Core library not found"), + ): + import snowflake.connector + + # Reload the connector module + reload(snowflake.connector) + + # Verify we can access key classes and functions + assert snowflake.connector.SnowflakeConnection is not None + assert callable(snowflake.connector.connect) + assert callable(snowflake.connector.Connect) + + # Verify error classes are available + assert hasattr(snowflake.connector, "Error") + assert hasattr(snowflake.connector, "DatabaseError") + assert hasattr(snowflake.connector, "ProgrammingError") + + # Verify DBAPI constants are available + assert hasattr(snowflake.connector, "apilevel") + assert hasattr(snowflake.connector, "threadsafety") + assert hasattr(snowflake.connector, "paramstyle") + + +def test_core_loader_error_captured_when_load_fails(): + """Test that errors from core_loader.load() are captured in the loader's error attribute.""" + loader = _CoreLoader() + test_exception = FileNotFoundError("Library file not found") + + # Mock _get_core_path to raise an exception + with mock.patch.object(loader, "_is_core_disabled", return_value=False): + with mock.patch.object(loader, "_get_core_path", side_effect=test_exception): + # Call load - it should NOT raise an exception + loader.load() + sleep(2) + + # Verify the error was captured + assert loader._error is test_exception + assert loader._version is None + assert loader.get_load_error() == "Library file not found" + assert loader.get_core_version() is None + + +def test_core_loader_fails_gracefully_on_missing_library(): + """Test that core_loader handles missing library files gracefully.""" + loader = _CoreLoader() + + # Mock importlib.resources.files to simulate missing library + with mock.patch.object(loader, "_is_core_disabled", return_value=False): + with mock.patch("importlib.resources.files") as mock_files: + mock_files.side_effect = FileNotFoundError("minicore module not found") + + # Call load - it should NOT raise an exception + loader.load() + sleep(2) + + # Verify the error was captured + assert loader._error is not None + assert loader._version is None + assert "minicore module not found" in loader.get_load_error() + + +def test_core_loader_fails_gracefully_on_incompatible_library(): + """Test that core_loader handles incompatible library files gracefully.""" + loader = _CoreLoader() + mock_path = mock.MagicMock() + + # Mock the loading to simulate incompatible library (OSError is common for this) + with mock.patch.object(loader, "_is_core_disabled", return_value=False): + with mock.patch.object(loader, "_get_core_path", return_value=mock_path): + with mock.patch.object( + loader, + "_load_minicore", + side_effect=OSError("incompatible library version"), + ): + # Call load - it should NOT raise an exception + loader.load() + sleep(2) + + # Verify the error was captured + assert loader._error is not None + assert loader._version is None + assert "incompatible library version" in loader.get_load_error() + + +class TestBuildMinicoreUsage: + """Tests for build_minicore_usage_for_session and build_minicore_usage_for_telemetry functions.""" + + def test_build_minicore_usage_for_session_returns_expected_keys(self): + """Test that build_minicore_usage_for_session returns dict with expected keys.""" + result = build_minicore_usage_for_session() + + assert isinstance(result, dict) + assert "ISA" in result + assert "CORE_VERSION" in result + assert "CORE_FILE_NAME" in result + + def test_build_minicore_usage_for_session_isa_matches_platform(self): + """Test that ISA value matches platform.machine().""" + import platform + + result = build_minicore_usage_for_session() + + assert result["ISA"] == platform.machine() + + def test_build_minicore_usage_for_session_with_mocked_core_loader(self): + """Test build_minicore_usage_for_session with mocked core loader values.""" + with mock.patch( + "snowflake.connector._utils._core_loader.get_core_version", + return_value="1.2.3", + ): + with mock.patch( + "snowflake.connector._utils._core_loader.get_file_name", + return_value="/path/to/lib.so", + ): + result = build_minicore_usage_for_session() + + assert result["CORE_VERSION"] == "1.2.3" + assert result["CORE_FILE_NAME"] == "/path/to/lib.so" + + def test_build_minicore_usage_for_session_with_failed_load(self): + """Test build_minicore_usage_for_session when core loading has failed.""" + with mock.patch( + "snowflake.connector._utils._core_loader.get_core_version", + return_value=None, + ): + with mock.patch( + "snowflake.connector._utils._core_loader.get_file_name", + return_value=None, + ): + result = build_minicore_usage_for_session() + + assert result["CORE_VERSION"] is None + assert result["CORE_FILE_NAME"] is None + + def test_build_minicore_usage_for_telemetry_returns_expected_keys(self): + """Test that build_minicore_usage_for_telemetry returns dict with expected keys.""" + result = build_minicore_usage_for_telemetry() + + assert isinstance(result, dict) + # Telemetry-specific keys + assert "OS" in result + assert "OS_VERSION" in result + assert "CORE_LOAD_ERROR" in result + # Session keys (inherited) + assert "ISA" in result + assert "CORE_VERSION" in result + assert "CORE_FILE_NAME" in result + + def test_build_minicore_usage_for_telemetry_os_matches_platform(self): + """Test that OS value matches platform.system().""" + import platform + + result = build_minicore_usage_for_telemetry() + + assert result["OS"] == platform.system() + + def test_build_minicore_usage_for_telemetry_os_version_matches_platform(self): + """Test that OS_VERSION value matches platform.version().""" + import platform + + result = build_minicore_usage_for_telemetry() + + assert result["OS_VERSION"] == platform.version() + + def test_build_minicore_usage_for_telemetry_includes_session_data(self): + """Test that build_minicore_usage_for_telemetry includes all session data.""" + with mock.patch( + "snowflake.connector._utils._core_loader.get_core_version", + return_value="2.0.0", + ): + with mock.patch( + "snowflake.connector._utils._core_loader.get_file_name", + return_value="/custom/path/lib.dylib", + ): + with mock.patch( + "snowflake.connector._utils._core_loader.get_load_error", + return_value="None", + ): + session_result = build_minicore_usage_for_session() + telemetry_result = build_minicore_usage_for_telemetry() + + # All session keys should be present in telemetry result + for key in session_result: + assert key in telemetry_result + assert telemetry_result[key] == session_result[key] + + def test_build_minicore_usage_for_telemetry_with_mocked_values(self): + """Test build_minicore_usage_for_telemetry with mocked core loader values.""" + with mock.patch( + "snowflake.connector._utils._core_loader.get_core_version", + return_value="3.0.0", + ): + with mock.patch( + "snowflake.connector._utils._core_loader.get_file_name", + return_value="/path/to/lib.dylib", + ): + with mock.patch( + "snowflake.connector._utils._core_loader.get_load_error", + return_value="None", + ): + result = build_minicore_usage_for_telemetry() + + assert result["CORE_VERSION"] == "3.0.0" + assert result["CORE_FILE_NAME"] == "/path/to/lib.dylib" + assert result["CORE_LOAD_ERROR"] == "None" + + def test_build_minicore_usage_for_telemetry_with_disabled_core(self): + """Test build_minicore_usage_for_telemetry when core is disabled.""" + with mock.patch( + "snowflake.connector._utils._core_loader.get_core_version", + return_value=None, + ): + with mock.patch( + "snowflake.connector._utils._core_loader.get_file_name", + return_value=None, + ): + with mock.patch( + "snowflake.connector._utils._core_loader.get_load_error", + return_value="mini-core-disabled", + ): + result = build_minicore_usage_for_telemetry() + + assert result["CORE_VERSION"] is None + assert result["CORE_FILE_NAME"] is None + assert result["CORE_LOAD_ERROR"] == "mini-core-disabled" + # OS info should still be present + assert result["OS"] is not None + assert result["OS_VERSION"] is not None + + def test_build_minicore_usage_for_telemetry_with_load_error(self): + """Test build_minicore_usage_for_telemetry when core loading has failed.""" + with mock.patch( + "snowflake.connector._utils._core_loader.get_core_version", + return_value=None, + ): + with mock.patch( + "snowflake.connector._utils._core_loader.get_file_name", + return_value=None, + ): + with mock.patch( + "snowflake.connector._utils._core_loader.get_load_error", + return_value="Library not found", + ): + result = build_minicore_usage_for_telemetry() + + assert result["CORE_VERSION"] is None + assert result["CORE_FILE_NAME"] is None + assert result["CORE_LOAD_ERROR"] == "Library not found" diff --git a/tox.ini b/tox.ini index 9ca4e77d9..0002a32a3 100644 --- a/tox.ini +++ b/tox.ini @@ -119,8 +119,8 @@ extras= pandas secure-local-storage commands = - {env:SNOWFLAKE_PYTEST_CMD} -n auto -m "aio and unit" -vvv {posargs:} test - {env:SNOWFLAKE_PYTEST_CMD} -n auto -m "aio and integ" -vvv {posargs:} test + {env:SNOWFLAKE_PYTEST_CMD} -n auto -m "aio and unit and not sso and not pandas and not lambda" -vvv {posargs:} test + {env:SNOWFLAKE_PYTEST_CMD} -n auto -m "aio and unit and not sso and not pandas and not lambda" -vvv {posargs:} test [testenv:aio-unsupported-python] description = Run aio connector on unsupported python versions