From 721a2691374ffdab9bf1b730a0f59740e5e9e29e Mon Sep 17 00:00:00 2001 From: Ghislain Fourny Date: Wed, 16 Jul 2025 13:57:54 +0200 Subject: [PATCH] Add warning and bump up version. --- pyproject.toml | 2 +- src/jsoniq/session.py | 37 +++++++++++++++++++++++++++++++++++++ 2 files changed, 38 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index ddc1312..2ee019c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "jsoniq" -version = "0.1.0a9" +version = "0.1.0a10" description = "Python edition of RumbleDB, a JSONiq engine" requires-python = ">=3.11" dependencies = [ diff --git a/src/jsoniq/session.py b/src/jsoniq/session.py index babd03c..81db729 100644 --- a/src/jsoniq/session.py +++ b/src/jsoniq/session.py @@ -1,5 +1,9 @@ from pyspark.sql import SparkSession from .sequence import SequenceOfItems +import sys +import platform +import os +import re import importlib.resources as pkg_resources with pkg_resources.path("jsoniq.jars", "rumbledb-1.24.0.jar") as jar_path: @@ -19,6 +23,39 @@ def __init__(self, spark_session: SparkSession): class Builder: def __init__(self): + + java_version = os.popen("java -version 2>&1").read() + if "version" in java_version: + match = re.search(r'version "(\d+\.\d+)', java_version) + if match: + version = match.group(1) + if not (version.startswith("17.") or version.startswith("21.")): + sys.stderr.write("**************************************************************************\n") + sys.stderr.write("[Error] RumbleDB builds on top of pyspark 4, which requires Java 17 or 21.\n") + sys.stderr.write(f"Your Java version: {version}\n") + sys.stderr.write("**************************************************************************\n") + sys.stderr.write("\n") + sys.stderr.write("What should you do?\n") + sys.stderr.write("\n") + sys.stderr.write("If you do NOT have Java 17 or 21 installed, you can download Java 17 or 21 for example from https://adoptium.net/\n") + sys.stderr.write("\n") + sys.stderr.write("Quick command for macOS: brew install --cask temurin17 or brew install --cask temurin21\n") + sys.stderr.write("Quick command for Ubuntu: apt-get install temurin-17-jdk or apt-get install temurin-21-jdk\n") + sys.stderr.write("Quick command for Windows 11: winget install EclipseAdoptium.Temurin.17.JDK or. winget install EclipseAdoptium.Temurin.21.JDK\n") + sys.stderr.write("\n") + sys.stderr.write( + "If you DO have Java 17 or 21, but the wrong version appears above, then it means you need to set your JAVA_HOME environment variable properly to point to Java 17 or 21.\n" + ) + sys.stderr.write("\n") + sys.stderr.write("For macOS, try: export JAVA_HOME=$(/usr/libexec/java_home -v 17) or export JAVA_HOME=$(/usr/libexec/java_home -v 21)\n"); + sys.stderr.write("\n") + sys.stderr.write("For Ubuntu, find the paths to installed versions with this command: update-alternatives --config java\n then: export JAVA_HOME=...your desired path...\n") + sys.stderr.write("\n") + sys.stderr.write("For Windows 11: look for the default Java path with 'which java' and/or look for alternate installed versions in Program Files. Then: setx /m JAVA_HOME \"...your desired path here...\"\n") + sys.exit(43) + else: + sys.stderr.write("[Error] Could not determine Java version. Please ensure Java is installed and JAVA_HOME is properly set.\n") + sys.exit(43) self._sparkbuilder = SparkSession.builder.config("spark.jars", jar_path_str) def getOrCreate(self):