66import com .databricks .sdk .core .oauth .OAuthHeaderFactory ;
77import com .databricks .sdk .core .oauth .Token ;
88import com .databricks .sdk .core .oauth .TokenSource ;
9+ import com .databricks .sdk .core .utils .Environment ;
910import com .databricks .sdk .core .utils .OSUtils ;
1011import com .databricks .sdk .support .InternalApi ;
1112import com .fasterxml .jackson .core .JsonProcessingException ;
13+ import com .fasterxml .jackson .databind .JsonNode ;
1214import com .fasterxml .jackson .databind .ObjectMapper ;
15+ import java .io .IOException ;
16+ import java .io .InputStream ;
1317import java .nio .charset .StandardCharsets ;
1418import java .util .*;
19+ import java .util .concurrent .ConcurrentHashMap ;
20+ import java .util .concurrent .TimeUnit ;
21+ import org .apache .commons .io .IOUtils ;
1522
1623@ InternalApi
1724public class DatabricksCliCredentialsProvider implements CredentialsProvider {
@@ -22,6 +29,22 @@ public class DatabricksCliCredentialsProvider implements CredentialsProvider {
2229
2330 private static final ObjectMapper MAPPER = new ObjectMapper ();
2431
32+ // ---- Version detection ----
33+
34+ // --profile support added in CLI v0.207.1: https://github.com/databricks/cli/pull/855
35+ static final DatabricksCliVersion CLI_VERSION_FOR_PROFILE = new DatabricksCliVersion (0 , 207 , 1 );
36+
37+ // 5-second cap on `databricks version` so a hung CLI (slow first-run scan, antivirus, blocked
38+ // stdin) does not wedge SDK init indefinitely.
39+ private static final long VERSION_PROBE_TIMEOUT_SECONDS = 5 ;
40+
41+ // Successful version probes keyed by cliPath. Failures are deliberately not cached, so a
42+ // transient error (timeout, AV scan) does not pin every later token source to the conservative
43+ // fallback for the rest of the process lifetime.
44+ private static final Map <String , DatabricksCliVersion > VERSION_CACHE = new ConcurrentHashMap <>();
45+
46+ // ---- Scope validation ----
47+
2548 /** Thrown when the cached CLI token's scopes don't match the SDK's configured scopes. */
2649 static class ScopeMismatchException extends DatabricksException {
2750 ScopeMismatchException (String message ) {
@@ -36,59 +59,13 @@ static class ScopeMismatchException extends DatabricksException {
3659 private static final Set <String > SCOPES_IGNORED_FOR_COMPARISON =
3760 Collections .singleton ("offline_access" );
3861
62+ // ---- Public API ----
63+
3964 @ Override
4065 public String authType () {
4166 return DATABRICKS_CLI ;
4267 }
4368
44- /**
45- * Builds the CLI command arguments using --host (legacy path).
46- *
47- * @param cliPath Path to the databricks CLI executable
48- * @param config Configuration containing host, account ID, workspace ID, etc.
49- * @return List of command arguments
50- */
51- List <String > buildHostArgs (String cliPath , DatabricksConfig config ) {
52- List <String > cmd =
53- new ArrayList <>(Arrays .asList (cliPath , "auth" , "token" , "--host" , config .getHost ()));
54- if (config .getClientType () == ClientType .ACCOUNT ) {
55- cmd .add ("--account-id" );
56- cmd .add (config .getAccountId ());
57- }
58- return cmd ;
59- }
60-
61- private CliTokenSource getDatabricksCliTokenSource (DatabricksConfig config ) {
62- String cliPath = config .getDatabricksCliPath ();
63- if (cliPath == null ) {
64- cliPath = OSUtils .get (config .getEnv ()).getDatabricksCliPath ();
65- }
66- if (cliPath == null ) {
67- LOG .debug ("Databricks CLI could not be found" );
68- return null ;
69- }
70-
71- List <String > cmd ;
72- List <String > fallbackCmd = null ;
73-
74- if (config .getProfile () != null ) {
75- // When profile is set, use --profile as the primary command.
76- // The profile contains the full config (host, account_id, etc.).
77- cmd =
78- new ArrayList <>(
79- Arrays .asList (cliPath , "auth" , "token" , "--profile" , config .getProfile ()));
80- // Build a --host fallback for older CLIs that don't support --profile.
81- if (config .getHost () != null ) {
82- fallbackCmd = buildHostArgs (cliPath , config );
83- }
84- } else {
85- cmd = buildHostArgs (cliPath , config );
86- }
87-
88- return new CliTokenSource (
89- cmd , "token_type" , "access_token" , "expiry" , config .getEnv (), fallbackCmd );
90- }
91-
9269 @ Override
9370 public OAuthHeaderFactory configure (DatabricksConfig config ) {
9471 String host = config .getHost ();
@@ -151,6 +128,187 @@ public Token getToken() {
151128 }
152129 }
153130
131+ // ---- Token source construction ----
132+
133+ private CliTokenSource getDatabricksCliTokenSource (DatabricksConfig config ) {
134+ String cliPath = config .getDatabricksCliPath ();
135+ if (cliPath == null ) {
136+ cliPath = OSUtils .get (config .getEnv ()).getDatabricksCliPath ();
137+ }
138+ if (cliPath == null ) {
139+ LOG .debug ("Databricks CLI could not be found" );
140+ return null ;
141+ }
142+
143+ List <String > cmd = resolveCliCommand (cliPath , config );
144+ return new CliTokenSource (cmd , "token_type" , "access_token" , "expiry" , config .getEnv ());
145+ }
146+
147+ /**
148+ * Detects the installed CLI version and builds the {@code auth token} command. Falls back to the
149+ * most conservative command when version detection fails.
150+ */
151+ List <String > resolveCliCommand (String cliPath , DatabricksConfig config ) {
152+ DatabricksCliVersion version = getCliVersion (cliPath , config .getEnv ());
153+ if (version .isDefaultDevBuild ()) {
154+ // A default-marker dev build has no injected version, so every feature gate fails.
155+ // Surface an informational hint so users know why their feature flags aren't taking effect.
156+ LOG .info (
157+ "Databricks CLI {} is a development build; feature detection will use conservative "
158+ + "fallbacks. Rebuild the CLI with an explicit version to enable capability-based "
159+ + "flag selection." ,
160+ version );
161+ }
162+ return buildCliCommand (cliPath , config , version );
163+ }
164+
165+ /**
166+ * Builds the {@code auth token} command for the given CLI version.
167+ *
168+ * <p>Falls back to {@code --host} when {@code --profile} is either not configured or not
169+ * supported by the installed CLI.
170+ */
171+ List <String > buildCliCommand (
172+ String cliPath , DatabricksConfig config , DatabricksCliVersion version ) {
173+ if (config .getProfile () == null ) {
174+ return buildHostArgs (cliPath , config );
175+ }
176+
177+ // Flag --profile is a global CLI flag and is recognized for all commands even the ones that
178+ // do not support it. Only use --profile in CLI versions known to support it in `auth token`.
179+ if (!version .atLeast (CLI_VERSION_FOR_PROFILE )) {
180+ if (version .equals (DatabricksCliVersion .UNKNOWN ) || version .isDefaultDevBuild ()) {
181+ // We didn't actually prove the CLI lacks --profile; we just failed to confirm it.
182+ LOG .warn (
183+ "Could not confirm --profile support for Databricks CLI {} (requires >= {}). "
184+ + "Falling back to --host." ,
185+ version ,
186+ CLI_VERSION_FOR_PROFILE );
187+ } else {
188+ LOG .warn (
189+ "Databricks CLI {} does not support --profile (requires >= {}). Falling back to --host." ,
190+ version ,
191+ CLI_VERSION_FOR_PROFILE );
192+ }
193+ return buildHostArgs (cliPath , config );
194+ }
195+
196+ return new ArrayList <>(
197+ Arrays .asList (cliPath , "auth" , "token" , "--profile" , config .getProfile ()));
198+ }
199+
200+ /**
201+ * Builds the CLI command arguments using --host (legacy path).
202+ *
203+ * @param cliPath Path to the databricks CLI executable
204+ * @param config Configuration containing host, account ID, workspace ID, etc.
205+ * @return List of command arguments
206+ */
207+ List <String > buildHostArgs (String cliPath , DatabricksConfig config ) {
208+ List <String > cmd =
209+ new ArrayList <>(Arrays .asList (cliPath , "auth" , "token" , "--host" , config .getHost ()));
210+ if (config .getClientType () == ClientType .ACCOUNT ) {
211+ cmd .add ("--account-id" );
212+ cmd .add (config .getAccountId ());
213+ }
214+ return cmd ;
215+ }
216+
217+ // ---- Version detection ----
218+
219+ /**
220+ * Returns the CLI version, catching subprocess failures so the caller can proceed with the
221+ * conservative fallback. Successful results are cached per {@code cliPath} for the process
222+ * lifetime; failures are not cached and will be retried on the next call.
223+ */
224+ DatabricksCliVersion getCliVersion (String cliPath , Environment env ) {
225+ DatabricksCliVersion cached = VERSION_CACHE .get (cliPath );
226+ if (cached != null ) {
227+ return cached ;
228+ }
229+
230+ try {
231+ DatabricksCliVersion version = probeCliVersion (cliPath , env );
232+ VERSION_CACHE .put (cliPath , version );
233+ return version ;
234+ } catch (Exception e ) {
235+ LOG .warn (
236+ "Failed to detect Databricks CLI version: {}. Falling back to conservative flag set." ,
237+ e .getMessage ());
238+ return DatabricksCliVersion .UNKNOWN ;
239+ }
240+ }
241+
242+ /**
243+ * Runs {@code databricks version --output json} and returns the parsed {@link
244+ * DatabricksCliVersion}.
245+ */
246+ DatabricksCliVersion probeCliVersion (String cliPath , Environment env ) throws IOException {
247+ List <String > versionArgs = Arrays .asList (cliPath , "version" , "--output" , "json" );
248+ List <String > cmd = OSUtils .get (env ).getCliExecutableCommand (versionArgs );
249+
250+ ProcessBuilder pb = new ProcessBuilder (cmd );
251+ pb .environment ().putAll (env .getEnv ());
252+ Process process = pb .start ();
253+
254+ try {
255+ if (!process .waitFor (VERSION_PROBE_TIMEOUT_SECONDS , TimeUnit .SECONDS )) {
256+ process .destroyForcibly ();
257+ throw new IOException (
258+ "timed out after "
259+ + VERSION_PROBE_TIMEOUT_SECONDS
260+ + "s waiting for `databricks version`" );
261+ }
262+ } catch (InterruptedException e ) {
263+ Thread .currentThread ().interrupt ();
264+ throw new IOException ("interrupted waiting for `databricks version`" , e );
265+ }
266+
267+ if (process .exitValue () != 0 ) {
268+ String stderr = readStream (process .getErrorStream ());
269+ throw new IOException (
270+ "`databricks version` exited with code " + process .exitValue () + ": " + stderr );
271+ }
272+
273+ return parseCliVersion (readStream (process .getInputStream ()));
274+ }
275+
276+ /**
277+ * Parses the JSON output of {@code databricks version --output json}.
278+ *
279+ * <p>Takes Major/Minor/Patch from the JSON's pre-parsed numeric fields. The Prerelease field and
280+ * the Version string are intentionally ignored: for our feature-gate purposes the base triple is
281+ * sufficient, and the (0, 0, 0) case already identifies the default dev build (a CLI built
282+ * without version metadata leaves these fields at their zero defaults).
283+ *
284+ * <p>Returns {@link DatabricksCliVersion#UNKNOWN} on failure so that an unparseable version
285+ * disables every feature gate.
286+ */
287+ static DatabricksCliVersion parseCliVersion (String output ) {
288+ try {
289+ JsonNode node = MAPPER .readTree (output );
290+ JsonNode major = node .get ("Major" );
291+ JsonNode minor = node .get ("Minor" );
292+ JsonNode patch = node .get ("Patch" );
293+ if (major == null || minor == null || patch == null ) {
294+ LOG .debug (
295+ "Failed to parse Databricks CLI version: missing Major/Minor/Patch in {}" , output );
296+ return DatabricksCliVersion .UNKNOWN ;
297+ }
298+ return new DatabricksCliVersion (major .asInt (), minor .asInt (), patch .asInt ());
299+ } catch (JsonProcessingException e ) {
300+ LOG .debug (
301+ "Failed to parse Databricks CLI version from output: {} ({})" , output , e .getMessage ());
302+ return DatabricksCliVersion .UNKNOWN ;
303+ }
304+ }
305+
306+ private static String readStream (InputStream stream ) throws IOException {
307+ return new String (IOUtils .toByteArray (stream ), StandardCharsets .UTF_8 );
308+ }
309+
310+ // ---- Scope validation ----
311+
154312 /**
155313 * Validate that the token's scopes match the requested scopes from the config.
156314 *
0 commit comments