diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 912d730c62..3cb3b7da03 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -2d7aaedaf5d20f82b17cc1de48543b88e609f464 \ No newline at end of file +69a174b6c47c5e1039a5f14271440c10e33998ce \ No newline at end of file diff --git a/.github/workflows/tagging.yml b/.github/workflows/tagging.yml index 15c8060dd9..33316d7e53 100644 --- a/.github/workflows/tagging.yml +++ b/.github/workflows/tagging.yml @@ -2,10 +2,14 @@ name: tagging on: + # Manual dispatch. workflow_dispatch: - # Enable for automatic tagging - #schedule: - # - cron: '0 0 * * TUE' + # No inputs are required for the manual dispatch. + + # Runs at 8:00 UTC on Tuesday, Wednesday, and Thursday. To enable automated + # tagging for a repository, simply add it to the if block of the tag job. + schedule: + - cron: '0 8 * * TUE,WED,THU' # Ensure that only a single instance of the workflow is running at a time. concurrency: @@ -13,6 +17,16 @@ concurrency: jobs: tag: + # Only run the tag job if the trigger is manual (workflow_dispatch) or + # the repository has been approved for automated releases. + # + # To disable release for a repository, simply exclude it from the if + # condition. + if: >- + github.event_name == 'workflow_dispatch' || + github.repository == 'databricks/databricks-sdk-go' || + github.repository == 'databricks/databricks-sdk-py' || + github.repository == 'databricks/databricks-sdk-java' environment: "release-is" runs-on: group: databricks-deco-testing-runner-group diff --git a/acceptance/auth/credentials/unified-host/output.txt b/acceptance/auth/credentials/unified-host/output.txt index 89aa4c891c..b78a016851 100644 --- a/acceptance/auth/credentials/unified-host/output.txt +++ b/acceptance/auth/credentials/unified-host/output.txt @@ -6,6 +6,6 @@ } === Without workspace_id (should error) -Error: WorkspaceId must be set when using WorkspaceClient with unified host +Error: WorkspaceID must be set when using WorkspaceClient with unified host Exit code: 1 diff --git a/acceptance/bundle/refschema/out.fields.txt b/acceptance/bundle/refschema/out.fields.txt index 12e8da5992..5ede9061b5 100644 --- a/acceptance/bundle/refschema/out.fields.txt +++ b/acceptance/bundle/refschema/out.fields.txt @@ -691,6 +691,7 @@ resources.jobs.*.environments []jobs.JobEnvironment ALL resources.jobs.*.environments[*] jobs.JobEnvironment ALL resources.jobs.*.environments[*].environment_key string ALL resources.jobs.*.environments[*].spec *compute.Environment ALL +resources.jobs.*.environments[*].spec.base_environment string ALL resources.jobs.*.environments[*].spec.client string ALL resources.jobs.*.environments[*].spec.dependencies []string ALL resources.jobs.*.environments[*].spec.dependencies[*] string ALL @@ -877,6 +878,8 @@ resources.jobs.*.tasks[*].condition_task.op jobs.ConditionTaskOp ALL resources.jobs.*.tasks[*].condition_task.right string ALL resources.jobs.*.tasks[*].dashboard_task *jobs.DashboardTask ALL resources.jobs.*.tasks[*].dashboard_task.dashboard_id string ALL +resources.jobs.*.tasks[*].dashboard_task.filters map[string]string ALL +resources.jobs.*.tasks[*].dashboard_task.filters.* string ALL resources.jobs.*.tasks[*].dashboard_task.subscription *jobs.Subscription ALL resources.jobs.*.tasks[*].dashboard_task.subscription.custom_subject string ALL resources.jobs.*.tasks[*].dashboard_task.subscription.paused bool ALL @@ -939,6 +942,8 @@ resources.jobs.*.tasks[*].for_each_task.task.condition_task.op jobs.ConditionTas resources.jobs.*.tasks[*].for_each_task.task.condition_task.right string ALL resources.jobs.*.tasks[*].for_each_task.task.dashboard_task *jobs.DashboardTask ALL resources.jobs.*.tasks[*].for_each_task.task.dashboard_task.dashboard_id string ALL +resources.jobs.*.tasks[*].for_each_task.task.dashboard_task.filters map[string]string ALL +resources.jobs.*.tasks[*].for_each_task.task.dashboard_task.filters.* string ALL resources.jobs.*.tasks[*].for_each_task.task.dashboard_task.subscription *jobs.Subscription ALL resources.jobs.*.tasks[*].for_each_task.task.dashboard_task.subscription.custom_subject string ALL resources.jobs.*.tasks[*].for_each_task.task.dashboard_task.subscription.paused bool ALL diff --git a/bundle/config/workspace.go b/bundle/config/workspace.go index dcdebf9781..3d6633b7e9 100644 --- a/bundle/config/workspace.go +++ b/bundle/config/workspace.go @@ -124,7 +124,7 @@ func (w *Workspace) Config() *config.Config { // Unified host Experimental_IsUnifiedHost: w.ExperimentalIsUnifiedHost, - WorkspaceId: w.WorkspaceId, + WorkspaceID: w.WorkspaceId, } for k := range config.ConfigAttributes { diff --git a/bundle/internal/schema/annotations.yml b/bundle/internal/schema/annotations.yml index 6464e26cac..51d27d2f89 100644 --- a/bundle/internal/schema/annotations.yml +++ b/bundle/internal/schema/annotations.yml @@ -556,12 +556,6 @@ github.com/databricks/cli/bundle/config/resources.Catalog: "connection_name": "description": |- PLACEHOLDER - "grants": - "description": |- - PLACEHOLDER - "lifecycle": - "description": |- - PLACEHOLDER "name": "description": |- PLACEHOLDER diff --git a/bundle/internal/schema/annotations_openapi.yml b/bundle/internal/schema/annotations_openapi.yml index b47cfe8cbf..231b18d8fd 100644 --- a/bundle/internal/schema/annotations_openapi.yml +++ b/bundle/internal/schema/annotations_openapi.yml @@ -2120,6 +2120,10 @@ github.com/databricks/databricks-sdk-go/service/compute.Environment: "description": |- The environment entity used to preserve serverless environment side panel, jobs' environment for non-notebook task, and DLT's environment for classic and serverless pipelines. In this minimal environment spec, only pip dependencies are supported. + "base_environment": + "description": |- + The `base_environment` key refers to an `env.yaml` file that specifies an environment version and a collection of dependencies required for the environment setup. + This `env.yaml` file may itself include a `base_environment` reference pointing to another `env_1.yaml` file. However, when used as a base environment, `env_1.yaml` (or further nested references) will not be processed or included in the final environment, meaning that the resolution of `base_environment` references is not recursive. "client": "description": |- Use `environment_version` instead. @@ -2876,6 +2880,16 @@ github.com/databricks/databricks-sdk-go/service/jobs.DashboardTask: "dashboard_id": "description": |- The identifier of the dashboard to refresh. + "filters": + "description": |- + Dashboard task parameters. Used to apply dashboard filter values during dashboard task execution. Parameter values get applied to any dashboard filters that have a matching URL identifier as the parameter key. + The parameter value format is dependent on the filter type: + - For text and single-select filters, provide a single value (e.g. `"value"`) + - For date and datetime filters, provide the value in ISO 8601 format (e.g. `"2000-01-01T00:00:00"`) + - For multi-select filters, provide a JSON array of values (e.g. `"[\"value1\",\"value2\"]"`) + - For range and date range filters, provide a JSON object with `start` and `end` (e.g. `"{\"start\":\"1\",\"end\":\"10\"}"`) + "x-databricks-preview": |- + PRIVATE "subscription": "description": |- Optional: subscription configuration for sending the dashboard snapshot. diff --git a/bundle/internal/schema/annotations_openapi_overrides.yml b/bundle/internal/schema/annotations_openapi_overrides.yml index 737e698f66..2618fc7967 100644 --- a/bundle/internal/schema/annotations_openapi_overrides.yml +++ b/bundle/internal/schema/annotations_openapi_overrides.yml @@ -84,6 +84,13 @@ github.com/databricks/cli/bundle/config/resources.AppPermissionLevel: CAN_MANAGE - |- CAN_USE +github.com/databricks/cli/bundle/config/resources.Catalog: + "grants": + "description": |- + PLACEHOLDER + "lifecycle": + "description": |- + PLACEHOLDER github.com/databricks/cli/bundle/config/resources.Cluster: "_": "markdown_description": |- diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index 0a25ac5a95..c41296d46f 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -4535,6 +4535,10 @@ "type": "object", "description": "The environment entity used to preserve serverless environment side panel, jobs' environment for non-notebook task, and DLT's environment for classic and serverless pipelines.\nIn this minimal environment spec, only pip dependencies are supported.", "properties": { + "base_environment": { + "description": "The `base_environment` key refers to an `env.yaml` file that specifies an environment version and a collection of dependencies required for the environment setup.\nThis `env.yaml` file may itself include a `base_environment` reference pointing to another `env_1.yaml` file. However, when used as a base environment, `env_1.yaml` (or further nested references) will not be processed or included in the final environment, meaning that the resolution of `base_environment` references is not recursive.", + "$ref": "#/$defs/string" + }, "client": { "description": "Use `environment_version` instead.", "$ref": "#/$defs/string", @@ -5605,6 +5609,12 @@ "dashboard_id": { "$ref": "#/$defs/string" }, + "filters": { + "description": "Dashboard task parameters. Used to apply dashboard filter values during dashboard task execution. Parameter values get applied to any dashboard filters that have a matching URL identifier as the parameter key.\nThe parameter value format is dependent on the filter type:\n- For text and single-select filters, provide a single value (e.g. `\"value\"`)\n- For date and datetime filters, provide the value in ISO 8601 format (e.g. `\"2000-01-01T00:00:00\"`)\n- For multi-select filters, provide a JSON array of values (e.g. `\"[\\\"value1\\\",\\\"value2\\\"]\"`)\n- For range and date range filters, provide a JSON object with `start` and `end` (e.g. `\"{\\\"start\\\":\\\"1\\\",\\\"end\\\":\\\"10\\\"}\"`)", + "$ref": "#/$defs/map/string", + "x-databricks-preview": "PRIVATE", + "doNotSuggest": true + }, "subscription": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Subscription" }, diff --git a/bundle/schema/jsonschema_for_docs.json b/bundle/schema/jsonschema_for_docs.json index 42fe18b664..5d6ca3b1cd 100644 --- a/bundle/schema/jsonschema_for_docs.json +++ b/bundle/schema/jsonschema_for_docs.json @@ -4018,6 +4018,10 @@ "type": "object", "description": "The environment entity used to preserve serverless environment side panel, jobs' environment for non-notebook task, and DLT's environment for classic and serverless pipelines.\nIn this minimal environment spec, only pip dependencies are supported.", "properties": { + "base_environment": { + "description": "The `base_environment` key refers to an `env.yaml` file that specifies an environment version and a collection of dependencies required for the environment setup.\nThis `env.yaml` file may itself include a `base_environment` reference pointing to another `env_1.yaml` file. However, when used as a base environment, `env_1.yaml` (or further nested references) will not be processed or included in the final environment, meaning that the resolution of `base_environment` references is not recursive.", + "$ref": "#/$defs/string" + }, "client": { "description": "Use `environment_version` instead.", "$ref": "#/$defs/string", @@ -4802,6 +4806,12 @@ "$ref": "#/$defs/string", "x-since-version": "v0.248.0" }, + "filters": { + "description": "Dashboard task parameters. Used to apply dashboard filter values during dashboard task execution. Parameter values get applied to any dashboard filters that have a matching URL identifier as the parameter key.\nThe parameter value format is dependent on the filter type:\n- For text and single-select filters, provide a single value (e.g. `\"value\"`)\n- For date and datetime filters, provide the value in ISO 8601 format (e.g. `\"2000-01-01T00:00:00\"`)\n- For multi-select filters, provide a JSON array of values (e.g. `\"[\\\"value1\\\",\\\"value2\\\"]\"`)\n- For range and date range filters, provide a JSON object with `start` and `end` (e.g. `\"{\\\"start\\\":\\\"1\\\",\\\"end\\\":\\\"10\\\"}\"`)", + "$ref": "#/$defs/map/string", + "x-databricks-preview": "PRIVATE", + "doNotSuggest": true + }, "subscription": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Subscription", "x-since-version": "v0.248.0" diff --git a/cmd/auth/login.go b/cmd/auth/login.go index 70aeef9707..fd48a6ec0a 100644 --- a/cmd/auth/login.go +++ b/cmd/auth/login.go @@ -196,7 +196,7 @@ depends on the existing profiles you have set in your configuration file w, err := databricks.NewWorkspaceClient(&databricks.Config{ Host: authArguments.Host, AccountID: authArguments.AccountID, - WorkspaceId: authArguments.WorkspaceId, + WorkspaceID: authArguments.WorkspaceId, Experimental_IsUnifiedHost: authArguments.IsUnifiedHost, Credentials: config.NewTokenSourceStrategy("login-token", authconv.AuthTokenSource(persistentAuth)), }) @@ -230,7 +230,7 @@ depends on the existing profiles you have set in your configuration file Host: authArguments.Host, AuthType: authTypeDatabricksCLI, AccountID: authArguments.AccountID, - WorkspaceId: authArguments.WorkspaceId, + WorkspaceID: authArguments.WorkspaceId, Experimental_IsUnifiedHost: authArguments.IsUnifiedHost, ClusterID: clusterID, ConfigFile: os.Getenv("DATABRICKS_CONFIG_FILE"), @@ -291,7 +291,7 @@ func setHostAndAccountId(ctx context.Context, existingProfile *profile.Profile, cfg := &config.Config{ Host: authArguments.Host, AccountID: authArguments.AccountID, - WorkspaceId: authArguments.WorkspaceId, + WorkspaceID: authArguments.WorkspaceId, Experimental_IsUnifiedHost: authArguments.IsUnifiedHost, } diff --git a/cmd/workspace/genie/genie.go b/cmd/workspace/genie/genie.go index 5431508396..f77b5d1bea 100755 --- a/cmd/workspace/genie/genie.go +++ b/cmd/workspace/genie/genie.go @@ -521,13 +521,29 @@ func newGenerateDownloadFullQueryResult() *cobra.Command { cmd.Short = `Generate full query result download.` cmd.Long = `Generate full query result download. - Initiates a new SQL execution and returns a download_id that you can use to - track the progress of the download. The query result is stored in an external - link and can be retrieved using the [Get Download Full Query - Result](:method:genie/getdownloadfullqueryresult) API. Warning: Databricks - strongly recommends that you protect the URLs that are returned by the - EXTERNAL_LINKS disposition. See [Execute - Statement](:method:statementexecution/executestatement) for more details. + Initiates a new SQL execution and returns a download_id and + download_id_signature that you can use to track the progress of the + download. The query result is stored in an external link and can be retrieved + using the [Get Download Full Query + Result](:method:genie/getdownloadfullqueryresult) API. Both download_id and + download_id_signature must be provided when calling the Get endpoint. + + ---- + + ### **Warning: Databricks strongly recommends that you protect the URLs that + are returned by the EXTERNAL_LINKS disposition.** + + When you use the EXTERNAL_LINKS disposition, a short-lived, URL is + generated, which can be used to download the results directly from . As a + short-lived is embedded in this URL, you should protect the URL. + + Because URLs are already generated with embedded temporary s, you must not set + an Authorization header in the download requests. + + See [Execute Statement](:method:statementexecution/executestatement) for more + details. + + ---- Arguments: SPACE_ID: Genie space ID @@ -592,15 +608,29 @@ func newGetDownloadFullQueryResult() *cobra.Command { cmd.Long = `Get download full query result. After [Generating a Full Query Result - Download](:method:genie/getdownloadfullqueryresult) and successfully receiving - a download_id, use this API to poll the download progress. When the download - is complete, the API returns one or more external links to the query result - files. Warning: Databricks strongly recommends that you protect the URLs that - are returned by the EXTERNAL_LINKS disposition. You must not set an - Authorization header in download requests. When using the EXTERNAL_LINKS - disposition, Databricks returns presigned URLs that grant temporary access to - data. See [Execute Statement](:method:statementexecution/executestatement) for - more details. + Download](:method:genie/generatedownloadfullqueryresult) and successfully + receiving a download_id and download_id_signature, use this API to poll + the download progress. Both download_id and download_id_signature are + required to call this endpoint. When the download is complete, the API returns + the result in the EXTERNAL_LINKS disposition, containing one or more + external links to the query result files. + + ---- + + ### **Warning: Databricks strongly recommends that you protect the URLs that + are returned by the EXTERNAL_LINKS disposition.** + + When you use the EXTERNAL_LINKS disposition, a short-lived, URL is + generated, which can be used to download the results directly from . As a + short-lived is embedded in this URL, you should protect the URL. + + Because URLs are already generated with embedded temporary s, you must not set + an Authorization header in the download requests. + + See [Execute Statement](:method:statementexecution/executestatement) for more + details. + + ---- Arguments: SPACE_ID: Genie space ID diff --git a/cmd/workspace/policies/policies.go b/cmd/workspace/policies/policies.go index 6efae75b38..35e8d0cfab 100755 --- a/cmd/workspace/policies/policies.go +++ b/cmd/workspace/policies/policies.go @@ -125,7 +125,7 @@ func newCreatePolicy() *cobra.Command { TABLE, VOLUME, ] - POLICY_TYPE: Type of the policy. Required on create and ignored on update. + POLICY_TYPE: Type of the policy. Required on create. Supported values: [POLICY_TYPE_COLUMN_MASK, POLICY_TYPE_ROW_FILTER]` cmd.Annotations = make(map[string]string) @@ -467,7 +467,7 @@ func newUpdatePolicy() *cobra.Command { TABLE, VOLUME, ] - POLICY_TYPE: Type of the policy. Required on create and ignored on update. + POLICY_TYPE: Type of the policy. Required on create. Supported values: [POLICY_TYPE_COLUMN_MASK, POLICY_TYPE_ROW_FILTER]` cmd.Annotations = make(map[string]string) diff --git a/cmd/workspace/warehouses/warehouses.go b/cmd/workspace/warehouses/warehouses.go index 881e46a74e..c498a390ff 100755 --- a/cmd/workspace/warehouses/warehouses.go +++ b/cmd/workspace/warehouses/warehouses.go @@ -200,9 +200,6 @@ func newCreateDefaultWarehouseOverride() *cobra.Command { TYPE: The type of override behavior. Supported values: [CUSTOM, LAST_SELECTED]` - // This command is being previewed; hide from help output. - cmd.Hidden = true - cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { @@ -358,9 +355,6 @@ func newDeleteDefaultWarehouseOverride() *cobra.Command { default_warehouse_override_id can be a numeric user ID or the literal string "me" for the current user.` - // This command is being previewed; hide from help output. - cmd.Hidden = true - cmd.Annotations = make(map[string]string) cmd.PreRunE = root.MustWorkspaceClient @@ -624,9 +618,6 @@ func newGetDefaultWarehouseOverride() *cobra.Command { default_warehouse_override_id can be a numeric user ID or the literal string "me" for the current user.` - // This command is being previewed; hide from help output. - cmd.Hidden = true - cmd.Annotations = make(map[string]string) cmd.PreRunE = root.MustWorkspaceClient @@ -928,9 +919,6 @@ func newListDefaultWarehouseOverrides() *cobra.Command { Lists all default warehouse overrides in the workspace. Only workspace administrators can list all overrides.` - // This command is being previewed; hide from help output. - cmd.Hidden = true - cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { @@ -1349,9 +1337,6 @@ func newUpdateDefaultWarehouseOverride() *cobra.Command { TYPE: The type of override behavior. Supported values: [CUSTOM, LAST_SELECTED]` - // This command is being previewed; hide from help output. - cmd.Hidden = true - cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { diff --git a/go.mod b/go.mod index fa924108f4..aab380e3af 100644 --- a/go.mod +++ b/go.mod @@ -12,7 +12,7 @@ require ( github.com/charmbracelet/bubbletea v1.3.10 // MIT github.com/charmbracelet/huh v0.8.0 github.com/charmbracelet/lipgloss v1.1.0 // MIT - github.com/databricks/databricks-sdk-go v0.104.0 // Apache 2.0 + github.com/databricks/databricks-sdk-go v0.106.0 // Apache 2.0 github.com/fatih/color v1.18.0 // MIT github.com/google/uuid v1.6.0 // BSD-3-Clause github.com/gorilla/mux v1.8.1 // BSD 3-Clause diff --git a/go.sum b/go.sum index c1114bbc4b..30bb93a4bd 100644 --- a/go.sum +++ b/go.sum @@ -69,8 +69,8 @@ github.com/creack/pty v1.1.24 h1:bJrF4RRfyJnbTJqzRLHzcGaZK1NeM5kTC9jGgovnR1s= github.com/creack/pty v1.1.24/go.mod h1:08sCNb52WyoAwi2QDyzUCTgcvVFhUzewun7wtTfvcwE= github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s= github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI= -github.com/databricks/databricks-sdk-go v0.104.0 h1:8V8JAebwuQjaMszA1e3F+BtQ816oSUQm2yU8mmbRc28= -github.com/databricks/databricks-sdk-go v0.104.0/go.mod h1:hWoHnHbNLjPKiTm5K/7bcIv3J3Pkgo5x9pPzh8K3RVE= +github.com/databricks/databricks-sdk-go v0.106.0 h1:hSignqC1MWuC3w3VsXZpkOki5yfRCufZOESv79XMGxo= +github.com/databricks/databricks-sdk-go v0.106.0/go.mod h1:hWoHnHbNLjPKiTm5K/7bcIv3J3Pkgo5x9pPzh8K3RVE= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= diff --git a/libs/auth/arguments.go b/libs/auth/arguments.go index 6d1bf84434..57f8daa42a 100644 --- a/libs/auth/arguments.go +++ b/libs/auth/arguments.go @@ -21,7 +21,7 @@ func (a AuthArguments) ToOAuthArgument() (u2m.OAuthArgument, error) { cfg := &config.Config{ Host: a.Host, AccountID: a.AccountID, - WorkspaceId: a.WorkspaceId, + WorkspaceID: a.WorkspaceId, Experimental_IsUnifiedHost: a.IsUnifiedHost, } host := cfg.CanonicalHostName() diff --git a/python/databricks/bundles/jobs/_models/dashboard_task.py b/python/databricks/bundles/jobs/_models/dashboard_task.py index 6284ca36d3..98e171359c 100644 --- a/python/databricks/bundles/jobs/_models/dashboard_task.py +++ b/python/databricks/bundles/jobs/_models/dashboard_task.py @@ -1,9 +1,9 @@ -from dataclasses import dataclass +from dataclasses import dataclass, field from typing import TYPE_CHECKING, TypedDict from databricks.bundles.core._transform import _transform from databricks.bundles.core._transform_to_json import _transform_to_json_value -from databricks.bundles.core._variable import VariableOrOptional +from databricks.bundles.core._variable import VariableOrDict, VariableOrOptional from databricks.bundles.jobs._models.subscription import ( Subscription, SubscriptionParam, @@ -21,6 +21,18 @@ class DashboardTask: dashboard_id: VariableOrOptional[str] = None + filters: VariableOrDict[str] = field(default_factory=dict) + """ + :meta private: [EXPERIMENTAL] + + Dashboard task parameters. Used to apply dashboard filter values during dashboard task execution. Parameter values get applied to any dashboard filters that have a matching URL identifier as the parameter key. + The parameter value format is dependent on the filter type: + - For text and single-select filters, provide a single value (e.g. `"value"`) + - For date and datetime filters, provide the value in ISO 8601 format (e.g. `"2000-01-01T00:00:00"`) + - For multi-select filters, provide a JSON array of values (e.g. `"[\"value1\",\"value2\"]"`) + - For range and date range filters, provide a JSON object with `start` and `end` (e.g. `"{\"start\":\"1\",\"end\":\"10\"}"`) + """ + subscription: VariableOrOptional[Subscription] = None warehouse_id: VariableOrOptional[str] = None @@ -42,6 +54,18 @@ class DashboardTaskDict(TypedDict, total=False): dashboard_id: VariableOrOptional[str] + filters: VariableOrDict[str] + """ + :meta private: [EXPERIMENTAL] + + Dashboard task parameters. Used to apply dashboard filter values during dashboard task execution. Parameter values get applied to any dashboard filters that have a matching URL identifier as the parameter key. + The parameter value format is dependent on the filter type: + - For text and single-select filters, provide a single value (e.g. `"value"`) + - For date and datetime filters, provide the value in ISO 8601 format (e.g. `"2000-01-01T00:00:00"`) + - For multi-select filters, provide a JSON array of values (e.g. `"[\"value1\",\"value2\"]"`) + - For range and date range filters, provide a JSON object with `start` and `end` (e.g. `"{\"start\":\"1\",\"end\":\"10\"}"`) + """ + subscription: VariableOrOptional[SubscriptionParam] warehouse_id: VariableOrOptional[str] diff --git a/python/databricks/bundles/jobs/_models/environment.py b/python/databricks/bundles/jobs/_models/environment.py index b912693ef2..3bf806c7f4 100644 --- a/python/databricks/bundles/jobs/_models/environment.py +++ b/python/databricks/bundles/jobs/_models/environment.py @@ -16,6 +16,12 @@ class Environment: In this minimal environment spec, only pip dependencies are supported. """ + base_environment: VariableOrOptional[str] = None + """ + The `base_environment` key refers to an `env.yaml` file that specifies an environment version and a collection of dependencies required for the environment setup. + This `env.yaml` file may itself include a `base_environment` reference pointing to another `env_1.yaml` file. However, when used as a base environment, `env_1.yaml` (or further nested references) will not be processed or included in the final environment, meaning that the resolution of `base_environment` references is not recursive. + """ + client: VariableOrOptional[str] = None """ [DEPRECATED] Use `environment_version` instead. @@ -46,6 +52,12 @@ def as_dict(self) -> "EnvironmentDict": class EnvironmentDict(TypedDict, total=False): """""" + base_environment: VariableOrOptional[str] + """ + The `base_environment` key refers to an `env.yaml` file that specifies an environment version and a collection of dependencies required for the environment setup. + This `env.yaml` file may itself include a `base_environment` reference pointing to another `env_1.yaml` file. However, when used as a base environment, `env_1.yaml` (or further nested references) will not be processed or included in the final environment, meaning that the resolution of `base_environment` references is not recursive. + """ + client: VariableOrOptional[str] """ [DEPRECATED] Use `environment_version` instead.