Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 2 additions & 4 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,11 +1,9 @@
# Changelog

## 1.8.8 (2025-10-15)
## 1.9.1 (2025-10-15)

- added `--output-file` argument
- Added performance enhancement query strategy

## 1.8.7 (2025-10-14)

- Added tab completion
- Added enhanced logging decorators

Expand Down
9 changes: 9 additions & 0 deletions examples/databricks/serverless/outputs/deployment.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
{
"stack_name": "stackql-serverless",
"stack_env": "prd",
"databricks_workspace_name": "stackql-serverless-prd-workspace",
"databricks_workspace_id": "4014389171618363",
"databricks_deployment_name": "dbc-5a3a87f7-6914",
"databricks_workspace_status": "RUNNING",
"databricks_workspace_url": "https://dbc-5a3a87f7-6914.cloud.databricks.com"
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,9 @@ SELECT
SELECT
'{{ workspace_name }}' AS databricks_workspace_name,
workspace_id AS databricks_workspace_id,
deployment_name AS databricks_deployment_name
deployment_name AS databricks_deployment_name,
workspace_status AS databricks_workspace_status,
'https://' || deployment_name || '.cloud.databricks.com' AS databricks_workspace_url
FROM databricks_account.provisioning.workspaces
WHERE account_id = '{{ databricks_account_id }}'
AND workspace_name = '{{ workspace_name }}'
Expand Down
11 changes: 9 additions & 2 deletions examples/databricks/serverless/stackql_manifest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -381,7 +381,9 @@ resources:
exports:
- databricks_workspace_name
- databricks_workspace_id
- databricks_deployment_name
- databricks_deployment_name
- databricks_workspace_status
- databricks_workspace_url

- name: databricks_account/workspace_group
props:
Expand Down Expand Up @@ -489,4 +491,9 @@ resources:
securable_type = 'external_location' AND
deployment_name = '{{ databricks_deployment_name }}';


exports:
- databricks_workspace_name
- databricks_workspace_id
- databricks_deployment_name
- databricks_workspace_status
- databricks_workspace_url
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

setup(
name='stackql-deploy',
version='1.8.8',
version='1.9.1',
description='Model driven resource provisioning and deployment framework using StackQL.',
long_description=readme,
long_description_content_type='text/x-rst',
Expand Down
2 changes: 1 addition & 1 deletion stackql_deploy/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = '1.8.8'
__version__ = '1.9.1'
12 changes: 8 additions & 4 deletions stackql_deploy/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,10 +162,12 @@ def setup_command_context(
@click.argument('stack_env')
@add_common_options
@add_stackql_kwarg_options
@click.option('--output-file', default=None,
help='File path to write deployment outputs as JSON.')
@click.pass_context
def build(ctx, stack_dir, stack_env, log_level, env_file,
env, dry_run, show_queries, on_failure,
custom_registry, download_dir ):
custom_registry, download_dir, output_file):
"""Create or update resources."""

from .cmd.build import StackQLProvisioner
Expand All @@ -184,7 +186,7 @@ def build(ctx, stack_dir, stack_env, log_level, env_file,
f"to environment: [{stack_env}]")
print_unicode_box(message, BorderColor.YELLOW)

provisioner.run(dry_run, show_queries, on_failure)
provisioner.run(dry_run, show_queries, on_failure, output_file)
click.echo("🎯 dry-run build complete" if dry_run
else "🚀 build complete")

Expand Down Expand Up @@ -232,9 +234,11 @@ def teardown(ctx, stack_dir, stack_env, log_level, env_file,
@click.argument('stack_env')
@add_common_options
@add_stackql_kwarg_options
@click.option('--output-file', default=None,
help='File path to write deployment outputs as JSON.')
@click.pass_context
def test(ctx, stack_dir, stack_env, log_level, env_file,
env, dry_run, show_queries, on_failure, custom_registry, download_dir):
env, dry_run, show_queries, on_failure, custom_registry, download_dir, output_file):
"""Run test queries for the stack."""

from .cmd.test import StackQLTestRunner
Expand All @@ -253,7 +257,7 @@ def test(ctx, stack_dir, stack_env, log_level, env_file,
f"in environment: [{stack_env}]")
print_unicode_box(message, BorderColor.YELLOW)

test_runner.run(dry_run, show_queries, on_failure)
test_runner.run(dry_run, show_queries, on_failure, output_file)
click.echo(f"🔍 tests complete (dry run: {dry_run})")

#
Expand Down
66 changes: 66 additions & 0 deletions stackql_deploy/cmd/base.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
# cmd/base.py
import os
import json
from ..lib.utils import (
perform_retries,
run_stackql_command,
Expand Down Expand Up @@ -468,3 +470,67 @@ def run_command(self, command_query, command_retries, command_retry_delay, dry_r
)
else:
self.logger.info("command query not configured, skipping command...")

def process_stack_exports(self, dry_run, output_file=None):
"""
Process root-level exports from manifest and write to JSON file
"""
if not output_file:
return

self.logger.info("📦 processing stack exports...")

manifest_exports = self.manifest.get('exports', [])

if dry_run:
total_vars = len(manifest_exports) + 2 # +2 for stack_name and stack_env
self.logger.info(
f"📁 dry run: would export {total_vars} variables to {output_file} "
f"(including automatic stack_name and stack_env)"
)
return

# Collect data from global context
export_data = {}
missing_vars = []

# Always include stack_name and stack_env automatically
export_data['stack_name'] = self.stack_name
export_data['stack_env'] = self.stack_env

for var_name in manifest_exports:
# Skip stack_name and stack_env if they're explicitly listed (already added above)
if var_name in ('stack_name', 'stack_env'):
continue

if var_name in self.global_context:
value = self.global_context[var_name]
# Parse JSON strings back to their original type if they were serialized
try:
if isinstance(value, str) and (value.startswith('[') or value.startswith('{')):
value = json.loads(value)
except (json.JSONDecodeError, ValueError):
# Keep as string if not valid JSON
pass
export_data[var_name] = value
else:
missing_vars.append(var_name)

if missing_vars:
catch_error_and_exit(
f"exports failed: variables not found in context: {missing_vars}",
self.logger
)

# Ensure destination directory exists
dest_dir = os.path.dirname(output_file)
if dest_dir and not os.path.exists(dest_dir):
os.makedirs(dest_dir, exist_ok=True)

# Write JSON file
try:
with open(output_file, 'w') as f:
json.dump(export_data, f, indent=2)
self.logger.info(f"✅ exported {len(export_data)} variables to {output_file}")
except Exception as e:
catch_error_and_exit(f"failed to write exports file {output_file}: {e}", self.logger)
5 changes: 4 additions & 1 deletion stackql_deploy/cmd/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def process_script_resource(self, resource, dry_run, full_context):
except Exception as e:
catch_error_and_exit(f"script failed: {e}", self.logger)

def run(self, dry_run, show_queries, on_failure):
def run(self, dry_run, show_queries, on_failure, output_file=None):

start_time = datetime.datetime.now()

Expand Down Expand Up @@ -409,5 +409,8 @@ def run(self, dry_run, show_queries, on_failure):
elif type == 'query':
self.logger.info(f"✅ successfully exported variables for query in {resource['name']}")

# Process stack-level exports after all resources are deployed
self.process_stack_exports(dry_run, output_file)

elapsed_time = datetime.datetime.now() - start_time
self.logger.info(f"deployment completed in {elapsed_time}")
6 changes: 5 additions & 1 deletion stackql_deploy/cmd/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from .base import StackQLBase

class StackQLTestRunner(StackQLBase):
def run(self, dry_run, show_queries, on_failure):
def run(self, dry_run, show_queries, on_failure, output_file=None):

start_time = datetime.datetime.now()

Expand Down Expand Up @@ -146,3 +146,7 @@ def run(self, dry_run, show_queries, on_failure):

elapsed_time = datetime.datetime.now() - start_time
self.logger.info(f"deployment completed in {elapsed_time}")

# Process stack-level exports if specified
if output_file:
self.process_stack_exports(dry_run, output_file)
30 changes: 30 additions & 0 deletions website/docs/cli-reference/build.md
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ Command used to create or update resources in a StackQL environment.
|<span class="nowrap">`-e`</span> <span class="nowrap">`--env`</span>|Set additional environment variables (can be used multiple times) | `--env DB_USER=admin` |
|<span class="nowrap">`--dry-run`</span>|Perform a dry run of the operation. No changes will be made | |
|<span class="nowrap">`--show-queries`</span>|Display the queries executed in the output logs | |
|<span class="nowrap">`--output-file`</span>|Export deployment variables to a JSON file after successful deployment | `--output-file ./outputs/deploy.json` |
|<span class="nowrap">`--download-dir`</span>|Custom download directory for StackQL | `/etc/stackql` |
|<span class="nowrap">`--custom-registry`</span>|Custom StackQL provider registry URL | `https://myreg` |

Expand Down Expand Up @@ -91,3 +92,32 @@ Use a custom environment file `.env.prod` to supply environment variables to a s
stackql build gcp-stack prod \
--env-file .env.prod
```

### Export deployment variables to a file

Deploy a stack and export key deployment variables to a JSON file for use in CI/CD workflows or downstream processes:

```bash
stackql-deploy build databricks-stack prod \
--output-file ./outputs/deployment.json \
-e DATABRICKS_ACCOUNT_ID=12345678-1234-1234-1234-123456789012
```

This will create a JSON file containing the exported variables defined in the `exports` section of your `stackql_manifest.yml`:

```json
{
"stack_name": "my-databricks-workspace",
"stack_env": "prod",
"workspace_name": "my-databricks-workspace-prod",
"workspace_id": "123456789012345",
"deployment_name": "dbc-ab123456-789a",
"workspace_status": "RUNNING"
}
```

:::tip

`stack_name` and `stack_env` are automatically included in all exports and do not need to be listed in the manifest.

:::
46 changes: 46 additions & 0 deletions website/docs/github-actions.md
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ The following inputs can be configured for the `stackql-deploy` GitHub Action:
| `stack_env` | The environment to deploy or test (e.g., `dev`, `prod`) | `dev` |
| `env_vars` | (Optional) Environment variables or secrets to import into a stack | `GOOGLE_PROJECT=stackql-k8s-the-hard-way-demo` |
| `env_file` | (Optional) Environment variables sourced from a file | `.env.prod` |
| `output_file` | (Optional) File path to export deployment variables as JSON | `./outputs/deployment.json` |
| `show_queries` | (Optional) Show the queries executed in the output logs | `true` |
| `log_level` | (Optional) Set the logging level (`INFO` or `DEBUG`, defaults to `INFO`) | `DEBUG` |
| `dry_run` | (Optional) Perform a dry run of the operation | `true` |
Expand Down Expand Up @@ -115,3 +116,48 @@ jobs:
stack_env: 'sit'
env_vars: 'GOOGLE_PROJECT=stackql-k8s-the-hard-way-demo'
```

### Deploy and Export Variables

This example shows how to deploy a stack and export deployment variables to a JSON file for use in subsequent workflow steps:

```yaml
jobs:
deploy-and-process:
name: Deploy Stack and Process Outputs
runs-on: ubuntu-latest
env:
DATABRICKS_CLIENT_ID: ${{ secrets.DATABRICKS_CLIENT_ID }}
DATABRICKS_CLIENT_SECRET: ${{ secrets.DATABRICKS_CLIENT_SECRET }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}

steps:
- name: Checkout
uses: actions/checkout@v4

- name: Deploy Databricks Stack
uses: stackql/setup-deploy@v1.0.1
with:
command: 'build'
stack_dir: 'examples/databricks/serverless'
stack_env: 'prod'
output_file: './deployment-outputs.json'
env_vars: |
DATABRICKS_ACCOUNT_ID=${{ secrets.DATABRICKS_ACCOUNT_ID }}
AWS_REGION=us-east-1
AWS_ACCOUNT_ID=${{ secrets.AWS_ACCOUNT_ID }}

- name: Parse Deployment Outputs
id: parse_outputs
run: |
echo "workspace_name=$(jq -r '.databricks_workspace_name' ./deployment-outputs.json)" >> $GITHUB_OUTPUT
echo "workspace_id=$(jq -r '.databricks_workspace_id' ./deployment-outputs.json)" >> $GITHUB_OUTPUT
echo "workspace_status=$(jq -r '.workspace_status' ./deployment-outputs.json)" >> $GITHUB_OUTPUT

- name: Use Exported Variables
run: |
echo "Deployed workspace: ${{ steps.parse_outputs.outputs.workspace_name }}"
echo "Workspace ID: ${{ steps.parse_outputs.outputs.workspace_id }}"
echo "Status: ${{ steps.parse_outputs.outputs.workspace_status }}"
```
12 changes: 12 additions & 0 deletions website/docs/manifest-file.md
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,12 @@ the fields within the __`stackql_manifest.yml`__ file are described in further d

***

### <span className="docFieldHeading">`exports`</span>

<ManifestFields.Exports />

***

### <span className="docFieldHeading">`version`</span>

<ManifestFields.Version />
Expand Down Expand Up @@ -406,6 +412,12 @@ resources:
- {dest_range: "10.200.0.0/24", next_hop_ip: "10.240.0.20"}
- {dest_range: "10.200.1.0/24", next_hop_ip: "10.240.0.21"}
- {dest_range: "10.200.2.0/24", next_hop_ip: "10.240.0.22"}
exports:
- vpc_name
- vpc_link
- subnet_name
- address
- target_pool_link
```

</File>
23 changes: 23 additions & 0 deletions website/docs/manifest_fields/exports.mdx
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
**Type**: `array of strings` (optional)

**Description**: List of variable names to export to a JSON file after deployment completion. Variables must exist in the deployment context (from globals or resource exports). Use with the `--output-file` CLI argument to specify the destination file.

**Usage**: Use this to extract key deployment outputs for use in CI/CD pipelines, downstream processes, or for record-keeping.

**Example**:

```yaml
exports:
- databricks_workspace_name
- databricks_workspace_id
- aws_iam_role_arn
- deployment_timestamp
```

**Notes**:
- `stack_name` and `stack_env` are automatically included in exports and do not need to be listed
- Variables are exported exactly as they exist in the deployment context
- Complex objects and arrays are preserved as JSON structures
- If a listed variable doesn't exist in the context, deployment will fail
- Requires `--output-file` CLI argument to be specified, otherwise exports are skipped
- Exported JSON file contains a flat object with variable names as keys
1 change: 1 addition & 0 deletions website/docs/manifest_fields/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,5 @@ export { default as ResourcePropDescription } from "./resources/props/descriptio
export { default as ResourcePropValue } from "./resources/props/value.mdx";
export { default as ResourcePropValues } from "./resources/props/values.mdx";
export { default as ResourcePropMerge } from "./resources/props/merge.mdx";
export { default as Exports } from "./exports.mdx";
export { default as Version } from "./version.mdx";
Loading