Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions acceptance/bundle/config-remote-sync/cli_defaults/output.txt
Original file line number Diff line number Diff line change
Expand Up @@ -66,3 +66,18 @@ Resource: resources.pipelines.pipeline1
path: /Users/{{workspace_user_name}}/notebook
+ channel: PREVIEW
+ edition: CORE

>>> [CLI] bundle destroy --auto-approve
The following resources will be deleted:
delete resources.jobs.job1
delete resources.jobs.job2
delete resources.pipelines.pipeline1

This action will result in the deletion of the following Lakeflow Spark Declarative Pipelines along with the
Streaming Tables (STs) and Materialized Views (MVs) managed by them:
delete resources.pipelines.pipeline1

All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default

Deleting files...
Destroy complete!
5 changes: 5 additions & 0 deletions acceptance/bundle/config-remote-sync/cli_defaults/script
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@

envsubst < databricks.yml.tmpl > databricks.yml

cleanup() {
trace $CLI bundle destroy --auto-approve
}
trap cleanup EXIT

$CLI bundle deploy
job1_id="$(read_id.py job1)"
job2_id="$(read_id.py job2)"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,3 +62,12 @@ Resource: resources.jobs.my_job
+ max_concurrent_runs: 5
timeout_seconds: 3600
environments:

>>> [CLI] bundle destroy --auto-approve
The following resources will be deleted:
delete resources.jobs.my_job

All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default

Deleting files...
Destroy complete!
5 changes: 5 additions & 0 deletions acceptance/bundle/config-remote-sync/config_edits/script
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@

envsubst < databricks.yml.tmpl > databricks.yml

cleanup() {
trace $CLI bundle destroy --auto-approve
}
trap cleanup EXIT

touch dummy.whl
$CLI bundle deploy
job_id="$(read_id.py my_job)"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,3 +20,12 @@ Resource: resources.jobs.test_job
max_concurrent_runs: replace



>>> [CLI] bundle destroy --auto-approve
The following resources will be deleted:
delete resources.jobs.test_job

All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default

Deleting files...
Destroy complete!
5 changes: 5 additions & 0 deletions acceptance/bundle/config-remote-sync/flushed_cache/script
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@

envsubst < databricks.yml.tmpl > databricks.yml

cleanup() {
trace $CLI bundle destroy --auto-approve
}
trap cleanup EXIT

title "Deploy bundle"
echo
$CLI bundle deploy
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,3 +37,12 @@ Resource: resources.jobs.my_job
-
# Tags for categorization
tags:

>>> [CLI] bundle destroy --auto-approve
The following resources will be deleted:
delete resources.jobs.my_job

All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default

Deleting files...
Destroy complete!
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@

envsubst < databricks.yml.tmpl > databricks.yml

cleanup() {
trace $CLI bundle destroy --auto-approve
}
trap cleanup EXIT

touch dummy.whl
$CLI bundle deploy
job_id="$(read_id.py my_job)"
Expand Down
9 changes: 9 additions & 0 deletions acceptance/bundle/config-remote-sync/job_fields/output.txt
Original file line number Diff line number Diff line change
Expand Up @@ -60,3 +60,12 @@ Resource: resources.jobs.my_job
+ team: data
targets:
default:

>>> [CLI] bundle destroy --auto-approve
The following resources will be deleted:
delete resources.jobs.my_job

All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default

Deleting files...
Destroy complete!
5 changes: 5 additions & 0 deletions acceptance/bundle/config-remote-sync/job_fields/script
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@

envsubst < databricks.yml.tmpl > databricks.yml

cleanup() {
trace $CLI bundle destroy --auto-approve
}
trap cleanup EXIT

touch dummy.whl
$CLI bundle deploy
job_id="$(read_id.py my_job)"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -114,3 +114,13 @@ Resource: resources.jobs.rename_task_job
+ - task_key: b_task_renamed
notebook_task:
notebook_path: /Users/{{workspace_user_name}}/c_task

>>> [CLI] bundle destroy --auto-approve
The following resources will be deleted:
delete resources.jobs.my_job
delete resources.jobs.rename_task_job

All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default

Deleting files...
Destroy complete!
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@

envsubst < databricks.yml.tmpl > databricks.yml

cleanup() {
trace $CLI bundle destroy --auto-approve
}
trap cleanup EXIT

touch dummy.whl
$CLI bundle deploy
job_id="$(read_id.py my_job)"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,3 +42,17 @@ Resource: resources.pipelines.my_pipeline
pipeline_id: ${resources.pipelines.my_pipeline.id}
- full_refresh: false
+ full_refresh: true

>>> [CLI] bundle destroy --auto-approve
The following resources will be deleted:
delete resources.jobs.my_job
delete resources.pipelines.my_pipeline

This action will result in the deletion of the following Lakeflow Spark Declarative Pipelines along with the
Streaming Tables (STs) and Materialized Views (MVs) managed by them:
delete resources.pipelines.my_pipeline

All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default

Deleting files...
Destroy complete!
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@

envsubst < databricks.yml.tmpl > databricks.yml

cleanup() {
trace $CLI bundle destroy --auto-approve
}
trap cleanup EXIT

touch dummy.whl
$CLI bundle deploy
job_id="$(read_id.py my_job)"
Expand Down
10 changes: 10 additions & 0 deletions acceptance/bundle/config-remote-sync/multiple_files/output.txt
Original file line number Diff line number Diff line change
Expand Up @@ -92,3 +92,13 @@ Resource: resources.jobs.job_two
notebook_task:
notebook_path: /Users/{{workspace_user_name}}/extra
+ task_key: extra_task

>>> [CLI] bundle destroy --auto-approve
The following resources will be deleted:
delete resources.jobs.job_one
delete resources.jobs.job_two

All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/dev

Deleting files...
Destroy complete!
5 changes: 5 additions & 0 deletions acceptance/bundle/config-remote-sync/multiple_files/script
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,11 @@ envsubst < databricks.yml.tmpl > databricks.yml
envsubst < resources/job1.yml.tmpl > resources/job1.yml
envsubst < resources/job2.yml.tmpl > resources/job2.yml

cleanup() {
trace $CLI bundle destroy --auto-approve
}
trap cleanup EXIT

touch dummy.whl
$CLI bundle deploy

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,3 +49,13 @@ Resource: resources.jobs.job_two
num_workers: 1
+ tags:
+ team: ml

>>> [CLI] bundle destroy --auto-approve
The following resources will be deleted:
delete resources.jobs.job_one
delete resources.jobs.job_two

All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default

Deleting files...
Destroy complete!
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@

envsubst < databricks.yml.tmpl > databricks.yml

cleanup() {
trace $CLI bundle destroy --auto-approve
}
trap cleanup EXIT

touch dummy.whl
$CLI bundle deploy

Expand Down
9 changes: 9 additions & 0 deletions acceptance/bundle/config-remote-sync/output_json/output.txt
Original file line number Diff line number Diff line change
Expand Up @@ -26,3 +26,12 @@ Deployment complete!
}
}
}

>>> [CLI] bundle destroy --auto-approve
The following resources will be deleted:
delete resources.jobs.test_job

All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default

Deleting files...
Destroy complete!
5 changes: 5 additions & 0 deletions acceptance/bundle/config-remote-sync/output_json/script
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@

envsubst < databricks.yml.tmpl > databricks.yml

cleanup() {
trace $CLI bundle destroy --auto-approve
}
trap cleanup EXIT

touch dummy.whl
$CLI bundle deploy
job_id="$(read_id.py test_job)"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,3 +16,12 @@ No changes detected.
"files": null,
"changes": {}
}

>>> [CLI] bundle destroy --auto-approve
The following resources will be deleted:
delete resources.jobs.test_job

All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default

Deleting files...
Destroy complete!
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@

envsubst < databricks.yml.tmpl > databricks.yml

cleanup() {
trace $CLI bundle destroy --auto-approve
}
trap cleanup EXIT

touch dummy.whl
$CLI bundle deploy

Expand Down
13 changes: 13 additions & 0 deletions acceptance/bundle/config-remote-sync/pipeline_fields/output.txt
Original file line number Diff line number Diff line change
Expand Up @@ -55,3 +55,16 @@ Resource: resources.pipelines.my_pipeline
+ foo: bar
targets:
default:

>>> [CLI] bundle destroy --auto-approve
The following resources will be deleted:
delete resources.pipelines.my_pipeline

This action will result in the deletion of the following Lakeflow Spark Declarative Pipelines along with the
Streaming Tables (STs) and Materialized Views (MVs) managed by them:
delete resources.pipelines.my_pipeline

All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default

Deleting files...
Destroy complete!
5 changes: 5 additions & 0 deletions acceptance/bundle/config-remote-sync/pipeline_fields/script
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@

envsubst < databricks.yml.tmpl > databricks.yml

cleanup() {
trace $CLI bundle destroy --auto-approve
}
trap cleanup EXIT

$CLI bundle deploy
pipeline_id="$(read_id.py my_pipeline)"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,3 +40,12 @@ Resource: resources.jobs.my_job
- env: dev
+ env: staging
+ owner: data-team

>>> [CLI] bundle destroy --auto-approve -t dev
The following resources will be deleted:
delete resources.jobs.my_job

All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/dev

Deleting files...
Destroy complete!
5 changes: 5 additions & 0 deletions acceptance/bundle/config-remote-sync/target_override/script
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@

envsubst < databricks.yml.tmpl > databricks.yml

cleanup() {
trace $CLI bundle destroy --auto-approve -t dev
}
trap cleanup EXIT

touch dummy.whl
$CLI bundle deploy -t dev
job_id="$(read_id.py my_job)"
Expand Down