Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"file_override_var": "from-overrides-file"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
bundle:
name: test-bundle-$UNIQUE_NAME

variables:
my_catalog:
default: main
# Both resolve to same value to test ambiguity (when we have multiple matches → skip)
landing_schema:
default: raw_data
curated_schema:
default: raw_data
# Variable with value set only in target (no default)
target_env:
description: "Environment name"
# Variable set via .databricks/bundle/<target>/variable-overrides.json
file_override_var:
description: "Set from variable-overrides.json"
# Complex variable to verify no panics
cluster_config:
type: complex
default:
node_type_id: Standard_DS3_v2
num_workers: 2

resources:
jobs:
my_job:
parameters:
- name: catalog
default: ${var.my_catalog}
- name: env
default: ${var.target_env}
- name: file_val
default: ${var.file_override_var}
# Both use variables that resolve to the same value ("raw_data").
# Tests disambiguation: original reference is preserved on Replace.
- name: landing
default: ${var.landing_schema}
- name: curated
default: ${var.curated_schema}
tasks:
- task_key: main
notebook_task:
notebook_path: /Users/{{workspace_user_name}}/notebook

targets:
default:
mode: development
variables:
target_env: production

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

47 changes: 47 additions & 0 deletions acceptance/bundle/config-remote-sync/resolve_variables/output.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!

=== Add parameters remotely
=== Detect and save changes
Detected changes in 1 resource(s):

Resource: resources.jobs.my_job
parameters[name='data_catalog']: add
parameters[name='deploy_env']: add
parameters[name='file_sourced']: add
parameters[name='region']: add
parameters[name='some_schema']: add



=== Configuration changes

>>> diff.py databricks.yml.backup databricks.yml
--- databricks.yml.backup
+++ databricks.yml
@@ -39,4 +39,14 @@
- name: curated
default: ${var.curated_schema}
+ - default: ${var.my_catalog}
+ name: data_catalog
+ - default: ${var.target_env}
+ name: deploy_env
+ - default: ${var.file_override_var}
+ name: file_sourced
+ - default: us-west-2
+ name: region
+ - default: raw_data
+ name: some_schema
tasks:
- task_key: main

>>> [CLI] bundle destroy --auto-approve
The following resources will be deleted:
delete resources.jobs.my_job

All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default

Deleting files...
Destroy complete!
35 changes: 35 additions & 0 deletions acceptance/bundle/config-remote-sync/resolve_variables/script
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
#!/bin/bash

envsubst < databricks.yml.tmpl > databricks.yml

cleanup() {
trace $CLI bundle destroy --auto-approve
}
trap cleanup EXIT

$CLI bundle deploy
job_id="$(read_id.py my_job)"

title "Add parameters remotely"
edit_resource.py jobs $job_id <<EOF
# "main" matches exactly one variable (my_catalog) -> should be restored to \${var.my_catalog}
r["parameters"].append({"name": "data_catalog", "default": "main"})
# "raw_data" matches two variables (landing_schema, curated_schema) -> ambiguous, stays hardcoded
r["parameters"].append({"name": "some_schema", "default": "raw_data"})
# "us-west-2" matches no variable -> stays hardcoded
r["parameters"].append({"name": "region", "default": "us-west-2"})
# "production" matches target_env (set in target, not default) -> should be restored to \${var.target_env}
r["parameters"].append({"name": "deploy_env", "default": "production"})
# "from-overrides-file" matches file_override_var (set via variable-overrides.json) -> should be restored
r["parameters"].append({"name": "file_sourced", "default": "from-overrides-file"})
EOF

title "Detect and save changes"
echo
cp databricks.yml databricks.yml.backup
$CLI bundle config-remote-sync --save

title "Configuration changes"
echo
trace diff.py databricks.yml.backup databricks.yml
rm databricks.yml.backup
11 changes: 11 additions & 0 deletions acceptance/bundle/config-remote-sync/resolve_variables/test.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
Cloud = true
RequiresUnityCatalog = true

RecordRequests = false
Ignore = [".databricks", "databricks.yml", "databricks.yml.backup"]

[Env]
DATABRICKS_BUNDLE_ENABLE_EXPERIMENTAL_YAML_SYNC = "true"

[EnvMatrix]
DATABRICKS_BUNDLE_ENGINE = ["direct", "terraform"]
Loading
Loading