From 5298817e207b5dfad7c25f347016eb3b57700f8a Mon Sep 17 00:00:00 2001 From: Jeffrey Aven Date: Thu, 16 Oct 2025 08:10:21 +1100 Subject: [PATCH] v1.9.1 --- CHANGELOG.md | 6 +- .../serverless/outputs/deployment.json | 9 +++ .../databricks_account/workspace.iql | 4 +- .../serverless/stackql_manifest.yml | 11 +++- setup.py | 2 +- stackql_deploy/__init__.py | 2 +- stackql_deploy/cli.py | 12 ++-- stackql_deploy/cmd/base.py | 66 +++++++++++++++++++ stackql_deploy/cmd/build.py | 5 +- stackql_deploy/cmd/test.py | 6 +- website/docs/cli-reference/build.md | 30 +++++++++ website/docs/github-actions.md | 46 +++++++++++++ website/docs/manifest-file.md | 12 ++++ website/docs/manifest_fields/exports.mdx | 23 +++++++ website/docs/manifest_fields/index.js | 1 + 15 files changed, 220 insertions(+), 15 deletions(-) create mode 100644 examples/databricks/serverless/outputs/deployment.json create mode 100644 website/docs/manifest_fields/exports.mdx diff --git a/CHANGELOG.md b/CHANGELOG.md index 18ca06c..58f0745 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,11 +1,9 @@ # Changelog -## 1.8.8 (2025-10-15) +## 1.9.1 (2025-10-15) +- added `--output-file` argument - Added performance enhancement query strategy - -## 1.8.7 (2025-10-14) - - Added tab completion - Added enhanced logging decorators diff --git a/examples/databricks/serverless/outputs/deployment.json b/examples/databricks/serverless/outputs/deployment.json new file mode 100644 index 0000000..70b16e7 --- /dev/null +++ b/examples/databricks/serverless/outputs/deployment.json @@ -0,0 +1,9 @@ +{ + "stack_name": "stackql-serverless", + "stack_env": "prd", + "databricks_workspace_name": "stackql-serverless-prd-workspace", + "databricks_workspace_id": "4014389171618363", + "databricks_deployment_name": "dbc-5a3a87f7-6914", + "databricks_workspace_status": "RUNNING", + "databricks_workspace_url": "https://dbc-5a3a87f7-6914.cloud.databricks.com" +} \ No newline at end of file diff --git a/examples/databricks/serverless/resources/databricks_account/workspace.iql b/examples/databricks/serverless/resources/databricks_account/workspace.iql index 292b2b0..1a7efc1 100644 --- a/examples/databricks/serverless/resources/databricks_account/workspace.iql +++ b/examples/databricks/serverless/resources/databricks_account/workspace.iql @@ -25,7 +25,9 @@ SELECT SELECT '{{ workspace_name }}' AS databricks_workspace_name, workspace_id AS databricks_workspace_id, -deployment_name AS databricks_deployment_name +deployment_name AS databricks_deployment_name, +workspace_status AS databricks_workspace_status, +'https://' || deployment_name || '.cloud.databricks.com' AS databricks_workspace_url FROM databricks_account.provisioning.workspaces WHERE account_id = '{{ databricks_account_id }}' AND workspace_name = '{{ workspace_name }}' diff --git a/examples/databricks/serverless/stackql_manifest.yml b/examples/databricks/serverless/stackql_manifest.yml index c20030c..b9f540e 100644 --- a/examples/databricks/serverless/stackql_manifest.yml +++ b/examples/databricks/serverless/stackql_manifest.yml @@ -381,7 +381,9 @@ resources: exports: - databricks_workspace_name - databricks_workspace_id - - databricks_deployment_name + - databricks_deployment_name + - databricks_workspace_status + - databricks_workspace_url - name: databricks_account/workspace_group props: @@ -489,4 +491,9 @@ resources: securable_type = 'external_location' AND deployment_name = '{{ databricks_deployment_name }}'; - +exports: + - databricks_workspace_name + - databricks_workspace_id + - databricks_deployment_name + - databricks_workspace_status + - databricks_workspace_url \ No newline at end of file diff --git a/setup.py b/setup.py index 20a3ad3..bd6ff6f 100644 --- a/setup.py +++ b/setup.py @@ -10,7 +10,7 @@ setup( name='stackql-deploy', - version='1.8.8', + version='1.9.1', description='Model driven resource provisioning and deployment framework using StackQL.', long_description=readme, long_description_content_type='text/x-rst', diff --git a/stackql_deploy/__init__.py b/stackql_deploy/__init__.py index 8108e4d..487d853 100644 --- a/stackql_deploy/__init__.py +++ b/stackql_deploy/__init__.py @@ -1 +1 @@ -__version__ = '1.8.8' +__version__ = '1.9.1' diff --git a/stackql_deploy/cli.py b/stackql_deploy/cli.py index 60b3109..d8c0016 100644 --- a/stackql_deploy/cli.py +++ b/stackql_deploy/cli.py @@ -162,10 +162,12 @@ def setup_command_context( @click.argument('stack_env') @add_common_options @add_stackql_kwarg_options +@click.option('--output-file', default=None, + help='File path to write deployment outputs as JSON.') @click.pass_context def build(ctx, stack_dir, stack_env, log_level, env_file, env, dry_run, show_queries, on_failure, - custom_registry, download_dir ): + custom_registry, download_dir, output_file): """Create or update resources.""" from .cmd.build import StackQLProvisioner @@ -184,7 +186,7 @@ def build(ctx, stack_dir, stack_env, log_level, env_file, f"to environment: [{stack_env}]") print_unicode_box(message, BorderColor.YELLOW) - provisioner.run(dry_run, show_queries, on_failure) + provisioner.run(dry_run, show_queries, on_failure, output_file) click.echo("🎯 dry-run build complete" if dry_run else "🚀 build complete") @@ -232,9 +234,11 @@ def teardown(ctx, stack_dir, stack_env, log_level, env_file, @click.argument('stack_env') @add_common_options @add_stackql_kwarg_options +@click.option('--output-file', default=None, + help='File path to write deployment outputs as JSON.') @click.pass_context def test(ctx, stack_dir, stack_env, log_level, env_file, - env, dry_run, show_queries, on_failure, custom_registry, download_dir): + env, dry_run, show_queries, on_failure, custom_registry, download_dir, output_file): """Run test queries for the stack.""" from .cmd.test import StackQLTestRunner @@ -253,7 +257,7 @@ def test(ctx, stack_dir, stack_env, log_level, env_file, f"in environment: [{stack_env}]") print_unicode_box(message, BorderColor.YELLOW) - test_runner.run(dry_run, show_queries, on_failure) + test_runner.run(dry_run, show_queries, on_failure, output_file) click.echo(f"🔍 tests complete (dry run: {dry_run})") # diff --git a/stackql_deploy/cmd/base.py b/stackql_deploy/cmd/base.py index 038a196..65c2e75 100644 --- a/stackql_deploy/cmd/base.py +++ b/stackql_deploy/cmd/base.py @@ -1,4 +1,6 @@ # cmd/base.py +import os +import json from ..lib.utils import ( perform_retries, run_stackql_command, @@ -468,3 +470,67 @@ def run_command(self, command_query, command_retries, command_retry_delay, dry_r ) else: self.logger.info("command query not configured, skipping command...") + + def process_stack_exports(self, dry_run, output_file=None): + """ + Process root-level exports from manifest and write to JSON file + """ + if not output_file: + return + + self.logger.info("📦 processing stack exports...") + + manifest_exports = self.manifest.get('exports', []) + + if dry_run: + total_vars = len(manifest_exports) + 2 # +2 for stack_name and stack_env + self.logger.info( + f"📁 dry run: would export {total_vars} variables to {output_file} " + f"(including automatic stack_name and stack_env)" + ) + return + + # Collect data from global context + export_data = {} + missing_vars = [] + + # Always include stack_name and stack_env automatically + export_data['stack_name'] = self.stack_name + export_data['stack_env'] = self.stack_env + + for var_name in manifest_exports: + # Skip stack_name and stack_env if they're explicitly listed (already added above) + if var_name in ('stack_name', 'stack_env'): + continue + + if var_name in self.global_context: + value = self.global_context[var_name] + # Parse JSON strings back to their original type if they were serialized + try: + if isinstance(value, str) and (value.startswith('[') or value.startswith('{')): + value = json.loads(value) + except (json.JSONDecodeError, ValueError): + # Keep as string if not valid JSON + pass + export_data[var_name] = value + else: + missing_vars.append(var_name) + + if missing_vars: + catch_error_and_exit( + f"exports failed: variables not found in context: {missing_vars}", + self.logger + ) + + # Ensure destination directory exists + dest_dir = os.path.dirname(output_file) + if dest_dir and not os.path.exists(dest_dir): + os.makedirs(dest_dir, exist_ok=True) + + # Write JSON file + try: + with open(output_file, 'w') as f: + json.dump(export_data, f, indent=2) + self.logger.info(f"✅ exported {len(export_data)} variables to {output_file}") + except Exception as e: + catch_error_and_exit(f"failed to write exports file {output_file}: {e}", self.logger) diff --git a/stackql_deploy/cmd/build.py b/stackql_deploy/cmd/build.py index f562d09..dbd1b0c 100644 --- a/stackql_deploy/cmd/build.py +++ b/stackql_deploy/cmd/build.py @@ -35,7 +35,7 @@ def process_script_resource(self, resource, dry_run, full_context): except Exception as e: catch_error_and_exit(f"script failed: {e}", self.logger) - def run(self, dry_run, show_queries, on_failure): + def run(self, dry_run, show_queries, on_failure, output_file=None): start_time = datetime.datetime.now() @@ -409,5 +409,8 @@ def run(self, dry_run, show_queries, on_failure): elif type == 'query': self.logger.info(f"✅ successfully exported variables for query in {resource['name']}") + # Process stack-level exports after all resources are deployed + self.process_stack_exports(dry_run, output_file) + elapsed_time = datetime.datetime.now() - start_time self.logger.info(f"deployment completed in {elapsed_time}") diff --git a/stackql_deploy/cmd/test.py b/stackql_deploy/cmd/test.py index 18f4bde..7f61e93 100644 --- a/stackql_deploy/cmd/test.py +++ b/stackql_deploy/cmd/test.py @@ -11,7 +11,7 @@ from .base import StackQLBase class StackQLTestRunner(StackQLBase): - def run(self, dry_run, show_queries, on_failure): + def run(self, dry_run, show_queries, on_failure, output_file=None): start_time = datetime.datetime.now() @@ -146,3 +146,7 @@ def run(self, dry_run, show_queries, on_failure): elapsed_time = datetime.datetime.now() - start_time self.logger.info(f"deployment completed in {elapsed_time}") + + # Process stack-level exports if specified + if output_file: + self.process_stack_exports(dry_run, output_file) diff --git a/website/docs/cli-reference/build.md b/website/docs/cli-reference/build.md index d56b4df..f568d66 100644 --- a/website/docs/cli-reference/build.md +++ b/website/docs/cli-reference/build.md @@ -52,6 +52,7 @@ Command used to create or update resources in a StackQL environment. |`-e` `--env`|Set additional environment variables (can be used multiple times) | `--env DB_USER=admin` | |`--dry-run`|Perform a dry run of the operation. No changes will be made | | |`--show-queries`|Display the queries executed in the output logs | | +|`--output-file`|Export deployment variables to a JSON file after successful deployment | `--output-file ./outputs/deploy.json` | |`--download-dir`|Custom download directory for StackQL | `/etc/stackql` | |`--custom-registry`|Custom StackQL provider registry URL | `https://myreg` | @@ -91,3 +92,32 @@ Use a custom environment file `.env.prod` to supply environment variables to a s stackql build gcp-stack prod \ --env-file .env.prod ``` + +### Export deployment variables to a file + +Deploy a stack and export key deployment variables to a JSON file for use in CI/CD workflows or downstream processes: + +```bash +stackql-deploy build databricks-stack prod \ +--output-file ./outputs/deployment.json \ +-e DATABRICKS_ACCOUNT_ID=12345678-1234-1234-1234-123456789012 +``` + +This will create a JSON file containing the exported variables defined in the `exports` section of your `stackql_manifest.yml`: + +```json +{ + "stack_name": "my-databricks-workspace", + "stack_env": "prod", + "workspace_name": "my-databricks-workspace-prod", + "workspace_id": "123456789012345", + "deployment_name": "dbc-ab123456-789a", + "workspace_status": "RUNNING" +} +``` + +:::tip + +`stack_name` and `stack_env` are automatically included in all exports and do not need to be listed in the manifest. + +::: diff --git a/website/docs/github-actions.md b/website/docs/github-actions.md index cc6fbdd..8586d39 100644 --- a/website/docs/github-actions.md +++ b/website/docs/github-actions.md @@ -58,6 +58,7 @@ The following inputs can be configured for the `stackql-deploy` GitHub Action: | `stack_env` | The environment to deploy or test (e.g., `dev`, `prod`) | `dev` | | `env_vars` | (Optional) Environment variables or secrets to import into a stack | `GOOGLE_PROJECT=stackql-k8s-the-hard-way-demo` | | `env_file` | (Optional) Environment variables sourced from a file | `.env.prod` | +| `output_file` | (Optional) File path to export deployment variables as JSON | `./outputs/deployment.json` | | `show_queries` | (Optional) Show the queries executed in the output logs | `true` | | `log_level` | (Optional) Set the logging level (`INFO` or `DEBUG`, defaults to `INFO`) | `DEBUG` | | `dry_run` | (Optional) Perform a dry run of the operation | `true` | @@ -115,3 +116,48 @@ jobs: stack_env: 'sit' env_vars: 'GOOGLE_PROJECT=stackql-k8s-the-hard-way-demo' ``` + +### Deploy and Export Variables + +This example shows how to deploy a stack and export deployment variables to a JSON file for use in subsequent workflow steps: + +```yaml +jobs: + deploy-and-process: + name: Deploy Stack and Process Outputs + runs-on: ubuntu-latest + env: + DATABRICKS_CLIENT_ID: ${{ secrets.DATABRICKS_CLIENT_ID }} + DATABRICKS_CLIENT_SECRET: ${{ secrets.DATABRICKS_CLIENT_SECRET }} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Deploy Databricks Stack + uses: stackql/setup-deploy@v1.0.1 + with: + command: 'build' + stack_dir: 'examples/databricks/serverless' + stack_env: 'prod' + output_file: './deployment-outputs.json' + env_vars: | + DATABRICKS_ACCOUNT_ID=${{ secrets.DATABRICKS_ACCOUNT_ID }} + AWS_REGION=us-east-1 + AWS_ACCOUNT_ID=${{ secrets.AWS_ACCOUNT_ID }} + + - name: Parse Deployment Outputs + id: parse_outputs + run: | + echo "workspace_name=$(jq -r '.databricks_workspace_name' ./deployment-outputs.json)" >> $GITHUB_OUTPUT + echo "workspace_id=$(jq -r '.databricks_workspace_id' ./deployment-outputs.json)" >> $GITHUB_OUTPUT + echo "workspace_status=$(jq -r '.workspace_status' ./deployment-outputs.json)" >> $GITHUB_OUTPUT + + - name: Use Exported Variables + run: | + echo "Deployed workspace: ${{ steps.parse_outputs.outputs.workspace_name }}" + echo "Workspace ID: ${{ steps.parse_outputs.outputs.workspace_id }}" + echo "Status: ${{ steps.parse_outputs.outputs.workspace_status }}" +``` diff --git a/website/docs/manifest-file.md b/website/docs/manifest-file.md index 89256e0..1dea06c 100644 --- a/website/docs/manifest-file.md +++ b/website/docs/manifest-file.md @@ -171,6 +171,12 @@ the fields within the __`stackql_manifest.yml`__ file are described in further d *** +### `exports` + + + +*** + ### `version` @@ -406,6 +412,12 @@ resources: - {dest_range: "10.200.0.0/24", next_hop_ip: "10.240.0.20"} - {dest_range: "10.200.1.0/24", next_hop_ip: "10.240.0.21"} - {dest_range: "10.200.2.0/24", next_hop_ip: "10.240.0.22"} +exports: + - vpc_name + - vpc_link + - subnet_name + - address + - target_pool_link ``` diff --git a/website/docs/manifest_fields/exports.mdx b/website/docs/manifest_fields/exports.mdx new file mode 100644 index 0000000..75c523c --- /dev/null +++ b/website/docs/manifest_fields/exports.mdx @@ -0,0 +1,23 @@ +**Type**: `array of strings` (optional) + +**Description**: List of variable names to export to a JSON file after deployment completion. Variables must exist in the deployment context (from globals or resource exports). Use with the `--output-file` CLI argument to specify the destination file. + +**Usage**: Use this to extract key deployment outputs for use in CI/CD pipelines, downstream processes, or for record-keeping. + +**Example**: + +```yaml +exports: + - databricks_workspace_name + - databricks_workspace_id + - aws_iam_role_arn + - deployment_timestamp +``` + +**Notes**: +- `stack_name` and `stack_env` are automatically included in exports and do not need to be listed +- Variables are exported exactly as they exist in the deployment context +- Complex objects and arrays are preserved as JSON structures +- If a listed variable doesn't exist in the context, deployment will fail +- Requires `--output-file` CLI argument to be specified, otherwise exports are skipped +- Exported JSON file contains a flat object with variable names as keys \ No newline at end of file diff --git a/website/docs/manifest_fields/index.js b/website/docs/manifest_fields/index.js index d00069d..9592759 100644 --- a/website/docs/manifest_fields/index.js +++ b/website/docs/manifest_fields/index.js @@ -22,4 +22,5 @@ export { default as ResourcePropDescription } from "./resources/props/descriptio export { default as ResourcePropValue } from "./resources/props/value.mdx"; export { default as ResourcePropValues } from "./resources/props/values.mdx"; export { default as ResourcePropMerge } from "./resources/props/merge.mdx"; +export { default as Exports } from "./exports.mdx"; export { default as Version } from "./version.mdx";