diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 15c31f0e40..6854c7f284 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -8b2cd47cbac64b32e120601110a5fc70b8189ba4 \ No newline at end of file +e1ea3f5ba0bc5b53be94f56535a67ba701a52a52 \ No newline at end of file diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 491e5b29dc..bfdd637938 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -11,5 +11,6 @@ * Pass SYSTEM_ACCESSTOKEN from env to the Terraform provider ([#4135](https://github.com/databricks/cli/pull/4135) ### Dependency updates +* Upgrade Go SDK to 0.94.0 ([#4148](https://github.com/databricks/cli/pull/4148)) ### API Changes diff --git a/acceptance/bundle/run/inline-script/databricks-cli/profile-is-passed/from_flag/out.requests.txt b/acceptance/bundle/run/inline-script/databricks-cli/profile-is-passed/from_flag/out.requests.txt index bbad66cf8b..c0bd5f7127 100644 --- a/acceptance/bundle/run/inline-script/databricks-cli/profile-is-passed/from_flag/out.requests.txt +++ b/acceptance/bundle/run/inline-script/databricks-cli/profile-is-passed/from_flag/out.requests.txt @@ -2,6 +2,16 @@ "method": "GET", "path": "/oidc/.well-known/oauth-authorization-server" } +{ + "headers": { + "Authorization": [ + "Basic [ENCODED_AUTH]" + ] + }, + "method": "POST", + "path": "/oidc/v1/token", + "raw_body": "grant_type=client_credentials\u0026scope=all-apis" +} { "method": "GET", "path": "/oidc/.well-known/oauth-authorization-server" diff --git a/acceptance/bundle/run/inline-script/databricks-cli/target-is-passed/from_flag/out.requests.txt b/acceptance/bundle/run/inline-script/databricks-cli/target-is-passed/from_flag/out.requests.txt index 7bcd00c05c..2d8de7ca18 100644 --- a/acceptance/bundle/run/inline-script/databricks-cli/target-is-passed/from_flag/out.requests.txt +++ b/acceptance/bundle/run/inline-script/databricks-cli/target-is-passed/from_flag/out.requests.txt @@ -2,6 +2,16 @@ "method": "GET", "path": "/oidc/.well-known/oauth-authorization-server" } +{ + "headers": { + "Authorization": [ + "Basic [ENCODED_AUTH]" + ] + }, + "method": "POST", + "path": "/oidc/v1/token", + "raw_body": "grant_type=client_credentials\u0026scope=all-apis" +} { "method": "GET", "path": "/oidc/.well-known/oauth-authorization-server" diff --git a/bundle/internal/schema/annotations_openapi.yml b/bundle/internal/schema/annotations_openapi.yml index 038d8959be..c75e8007c7 100644 --- a/bundle/internal/schema/annotations_openapi.yml +++ b/bundle/internal/schema/annotations_openapi.yml @@ -556,6 +556,7 @@ github.com/databricks/cli/bundle/config/resources.Job: "performance_target": "description": |- The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run. + The performance target does not apply to tasks that run on Serverless GPU compute. * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance. diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index bf8dc42ac0..624b1686a0 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -793,7 +793,7 @@ "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.JobParameterDefinition" }, "performance_target": { - "description": "The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run.\n\n* `STANDARD`: Enables cost-efficient execution of serverless workloads.\n* `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.", + "description": "The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run.\nThe performance target does not apply to tasks that run on Serverless GPU compute.\n\n* `STANDARD`: Enables cost-efficient execution of serverless workloads.\n* `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PerformanceTarget" }, "permissions": { diff --git a/cmd/workspace/alerts-legacy/alerts-legacy.go b/cmd/workspace/alerts-legacy/alerts-legacy.go index c929e2b811..ee517a7d86 100755 --- a/cmd/workspace/alerts-legacy/alerts-legacy.go +++ b/cmd/workspace/alerts-legacy/alerts-legacy.go @@ -27,8 +27,8 @@ func New() *cobra.Command { the condition was met. Alerts can be scheduled using the sql_task type of the Jobs API, e.g. :method:jobs/create. - **Note**: A new version of the Databricks SQL API is now available. Please see - the latest version. [Learn more] + **Warning**: This API is deprecated. Please see the latest version of the + Databricks SQL API. [Learn more] [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html`, GroupID: "sql", @@ -78,8 +78,8 @@ func newCreate() *cobra.Command { query, evaluates a condition of its result, and notifies users or notification destinations if the condition was met. - **Note**: A new version of the Databricks SQL API is now available. Please use - :method:alerts/create instead. [Learn more] + **Warning**: This API is deprecated. Please use :method:alerts/create instead. + [Learn more] [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html` @@ -146,8 +146,8 @@ func newDelete() *cobra.Command { restored. **Note**: Unlike queries and dashboards, alerts cannot be moved to the trash. - **Note**: A new version of the Databricks SQL API is now available. Please use - :method:alerts/delete instead. [Learn more] + **Warning**: This API is deprecated. Please use :method:alerts/delete instead. + [Learn more] [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html` @@ -204,8 +204,8 @@ func newGet() *cobra.Command { Gets an alert. - **Note**: A new version of the Databricks SQL API is now available. Please use - :method:alerts/get instead. [Learn more] + **Warning**: This API is deprecated. Please use :method:alerts/get instead. + [Learn more] [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html` @@ -259,8 +259,8 @@ func newList() *cobra.Command { Gets a list of alerts. - **Note**: A new version of the Databricks SQL API is now available. Please use - :method:alerts/list instead. [Learn more] + **Warning**: This API is deprecated. Please use :method:alerts/list instead. + [Learn more] [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html` @@ -314,8 +314,8 @@ func newUpdate() *cobra.Command { Updates an alert. - **Note**: A new version of the Databricks SQL API is now available. Please use - :method:alerts/update instead. [Learn more] + **Warning**: This API is deprecated. Please use :method:alerts/update instead. + [Learn more] [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html` diff --git a/cmd/workspace/data-sources/data-sources.go b/cmd/workspace/data-sources/data-sources.go index 3d1700c17c..920c01c5c0 100755 --- a/cmd/workspace/data-sources/data-sources.go +++ b/cmd/workspace/data-sources/data-sources.go @@ -28,8 +28,8 @@ func New() *cobra.Command { grep to search the response from this API for the name of your SQL warehouse as it appears in Databricks SQL. - **Note**: A new version of the Databricks SQL API is now available. [Learn - more] + **Warning**: This API is deprecated. Please see the latest version of the + Databricks SQL API. [Learn more] [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html`, GroupID: "sql", @@ -66,8 +66,8 @@ func newList() *cobra.Command { fields that appear in this API response are enumerated for clarity. However, you need only a SQL warehouse's id to create new queries against it. - **Note**: A new version of the Databricks SQL API is now available. Please use - :method:warehouses/list instead. [Learn more] + **Warning**: This API is deprecated. Please use :method:warehouses/list + instead. [Learn more] [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html` diff --git a/cmd/workspace/feature-engineering/feature-engineering.go b/cmd/workspace/feature-engineering/feature-engineering.go index 46b2797c1e..621fd66bb1 100755 --- a/cmd/workspace/feature-engineering/feature-engineering.go +++ b/cmd/workspace/feature-engineering/feature-engineering.go @@ -77,8 +77,9 @@ func newCreateFeature() *cobra.Command { cmd.Flags().StringVar(&createFeatureReq.Feature.Description, "description", createFeatureReq.Feature.Description, `The description of the feature.`) cmd.Flags().StringVar(&createFeatureReq.Feature.FilterCondition, "filter-condition", createFeatureReq.Feature.FilterCondition, `The filter condition applied to the source data before aggregation.`) // TODO: complex arg: lineage_context + // TODO: complex arg: time_window - cmd.Use = "create-feature FULL_NAME SOURCE INPUTS FUNCTION TIME_WINDOW" + cmd.Use = "create-feature FULL_NAME SOURCE INPUTS FUNCTION" cmd.Short = `Create a feature.` cmd.Long = `Create a feature. @@ -88,8 +89,7 @@ func newCreateFeature() *cobra.Command { FULL_NAME: The full three-part name (catalog, schema, name) of the feature. SOURCE: The data source of the feature. INPUTS: The input columns from which the feature is computed. - FUNCTION: The function by which the feature is computed. - TIME_WINDOW: The time window in which the feature is computed.` + FUNCTION: The function by which the feature is computed.` cmd.Annotations = make(map[string]string) @@ -97,11 +97,11 @@ func newCreateFeature() *cobra.Command { if cmd.Flags().Changed("json") { err := root.ExactArgs(0)(cmd, args) if err != nil { - return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide 'full_name', 'source', 'inputs', 'function', 'time_window' in your JSON input") + return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide 'full_name', 'source', 'inputs', 'function' in your JSON input") } return nil } - check := root.ExactArgs(5) + check := root.ExactArgs(4) return check(cmd, args) } @@ -145,13 +145,6 @@ func newCreateFeature() *cobra.Command { return fmt.Errorf("invalid FUNCTION: %s", args[3]) } - } - if !cmd.Flags().Changed("json") { - _, err = fmt.Sscan(args[4], &createFeatureReq.Feature.TimeWindow) - if err != nil { - return fmt.Errorf("invalid TIME_WINDOW: %s", args[4]) - } - } response, err := w.FeatureEngineering.CreateFeature(ctx, createFeatureReq) @@ -296,6 +289,7 @@ func newCreateMaterializedFeature() *cobra.Command { cmd.Flags().Var(&createMaterializedFeatureJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().StringVar(&createMaterializedFeatureReq.MaterializedFeature.CronSchedule, "cron-schedule", createMaterializedFeatureReq.MaterializedFeature.CronSchedule, `The quartz cron expression that defines the schedule of the materialization pipeline.`) // TODO: complex arg: offline_store_config // TODO: complex arg: online_store_config cmd.Flags().Var(&createMaterializedFeatureReq.MaterializedFeature.PipelineScheduleState, "pipeline-schedule-state", `The schedule state of the materialization pipeline. Supported values: [ACTIVE, PAUSED, SNAPSHOT]`) @@ -860,8 +854,9 @@ func newUpdateFeature() *cobra.Command { cmd.Flags().StringVar(&updateFeatureReq.Feature.Description, "description", updateFeatureReq.Feature.Description, `The description of the feature.`) cmd.Flags().StringVar(&updateFeatureReq.Feature.FilterCondition, "filter-condition", updateFeatureReq.Feature.FilterCondition, `The filter condition applied to the source data before aggregation.`) // TODO: complex arg: lineage_context + // TODO: complex arg: time_window - cmd.Use = "update-feature FULL_NAME UPDATE_MASK SOURCE INPUTS FUNCTION TIME_WINDOW" + cmd.Use = "update-feature FULL_NAME UPDATE_MASK SOURCE INPUTS FUNCTION" cmd.Short = `Update a feature's description (all other fields are immutable).` cmd.Long = `Update a feature's description (all other fields are immutable). @@ -872,8 +867,7 @@ func newUpdateFeature() *cobra.Command { UPDATE_MASK: The list of fields to update. SOURCE: The data source of the feature. INPUTS: The input columns from which the feature is computed. - FUNCTION: The function by which the feature is computed. - TIME_WINDOW: The time window in which the feature is computed.` + FUNCTION: The function by which the feature is computed.` cmd.Annotations = make(map[string]string) @@ -881,11 +875,11 @@ func newUpdateFeature() *cobra.Command { if cmd.Flags().Changed("json") { err := root.ExactArgs(2)(cmd, args) if err != nil { - return fmt.Errorf("when --json flag is specified, provide only FULL_NAME, UPDATE_MASK as positional arguments. Provide 'full_name', 'source', 'inputs', 'function', 'time_window' in your JSON input") + return fmt.Errorf("when --json flag is specified, provide only FULL_NAME, UPDATE_MASK as positional arguments. Provide 'full_name', 'source', 'inputs', 'function' in your JSON input") } return nil } - check := root.ExactArgs(6) + check := root.ExactArgs(5) return check(cmd, args) } @@ -928,13 +922,6 @@ func newUpdateFeature() *cobra.Command { return fmt.Errorf("invalid FUNCTION: %s", args[4]) } - } - if !cmd.Flags().Changed("json") { - _, err = fmt.Sscan(args[5], &updateFeatureReq.Feature.TimeWindow) - if err != nil { - return fmt.Errorf("invalid TIME_WINDOW: %s", args[5]) - } - } response, err := w.FeatureEngineering.UpdateFeature(ctx, updateFeatureReq) @@ -1082,6 +1069,7 @@ func newUpdateMaterializedFeature() *cobra.Command { cmd.Flags().Var(&updateMaterializedFeatureJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().StringVar(&updateMaterializedFeatureReq.MaterializedFeature.CronSchedule, "cron-schedule", updateMaterializedFeatureReq.MaterializedFeature.CronSchedule, `The quartz cron expression that defines the schedule of the materialization pipeline.`) // TODO: complex arg: offline_store_config // TODO: complex arg: online_store_config cmd.Flags().Var(&updateMaterializedFeatureReq.MaterializedFeature.PipelineScheduleState, "pipeline-schedule-state", `The schedule state of the materialization pipeline. Supported values: [ACTIVE, PAUSED, SNAPSHOT]`) diff --git a/cmd/workspace/pipelines/pipelines.go b/cmd/workspace/pipelines/pipelines.go index b45364b348..6ee81e29b0 100755 --- a/cmd/workspace/pipelines/pipelines.go +++ b/cmd/workspace/pipelines/pipelines.go @@ -42,6 +42,7 @@ func New() *cobra.Command { } // Add methods + cmd.AddCommand(newClone()) cmd.AddCommand(newCreate()) cmd.AddCommand(newDelete()) cmd.AddCommand(newGet()) @@ -65,6 +66,81 @@ func New() *cobra.Command { return cmd } +// start clone command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cloneOverrides []func( + *cobra.Command, + *pipelines.ClonePipelineRequest, +) + +func newClone() *cobra.Command { + cmd := &cobra.Command{} + + var cloneReq pipelines.ClonePipelineRequest + var cloneJson flags.JsonFlag + + cmd.Flags().Var(&cloneJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Use = "clone PIPELINE_ID" + cmd.Short = `Clone a pipeline.` + cmd.Long = `Clone a pipeline. + + Creates a new pipeline using Unity Catalog from a pipeline using Hive + Metastore. This method returns the ID of the newly created clone. + Additionally, this method starts an update for the newly created pipeline. + + Arguments: + PIPELINE_ID: Source pipeline to clone from` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := cloneJson.Unmarshal(&cloneReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") + } + cloneReq.PipelineId = args[0] + + response, err := w.Pipelines.Clone(ctx, cloneReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range cloneOverrides { + fn(cmd, &cloneReq) + } + + return cmd +} + // start create command // Slice with functions to override default command behavior. diff --git a/cmd/workspace/queries-legacy/queries-legacy.go b/cmd/workspace/queries-legacy/queries-legacy.go index 01389aadfb..07686bbf02 100755 --- a/cmd/workspace/queries-legacy/queries-legacy.go +++ b/cmd/workspace/queries-legacy/queries-legacy.go @@ -24,8 +24,8 @@ func New() *cobra.Command { tags, parameters, and visualizations. Queries can be scheduled using the sql_task type of the Jobs API, e.g. :method:jobs/create. - **Note**: A new version of the Databricks SQL API is now available. Please see - the latest version. [Learn more] + **Warning**: This API is deprecated. Please see the latest version of the + Databricks SQL API. [Learn more] [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html`, GroupID: "sql", @@ -88,8 +88,8 @@ func newCreate() *cobra.Command { **Note**: You cannot add a visualization until you create the query. - **Note**: A new version of the Databricks SQL API is now available. Please use - :method:queries/create instead. [Learn more] + **Warning**: This API is deprecated. Please use :method:queries/create + instead. [Learn more] [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html` @@ -159,8 +159,8 @@ func newDelete() *cobra.Command { searches and list views, and they cannot be used for alerts. The trash is deleted after 30 days. - **Note**: A new version of the Databricks SQL API is now available. Please use - :method:queries/delete instead. [Learn more] + **Warning**: This API is deprecated. Please use :method:queries/delete + instead. [Learn more] [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html` @@ -218,8 +218,8 @@ func newGet() *cobra.Command { Retrieve a query object definition along with contextual permissions information about the currently authenticated user. - **Note**: A new version of the Databricks SQL API is now available. Please use - :method:queries/get instead. [Learn more] + **Warning**: This API is deprecated. Please use :method:queries/get instead. + [Learn more] [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html` @@ -285,8 +285,8 @@ func newList() *cobra.Command { **Warning**: Calling this API concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. - **Note**: A new version of the Databricks SQL API is now available. Please use - :method:queries/list instead. [Learn more] + **Warning**: This API is deprecated. Please use :method:queries/list instead. + [Learn more] [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html` @@ -339,8 +339,8 @@ func newRestore() *cobra.Command { Restore a query that has been moved to the trash. A restored query appears in list views and searches. You can use restored queries for alerts. - **Note**: A new version of the Databricks SQL API is now available. Please see - the latest version. [Learn more] + **Warning**: This API is deprecated. Please see the latest version. [Learn + more] [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html` @@ -410,8 +410,8 @@ func newUpdate() *cobra.Command { **Note**: You cannot undo this operation. - **Note**: A new version of the Databricks SQL API is now available. Please use - :method:queries/update instead. [Learn more] + **Warning**: This API is deprecated. Please use :method:queries/update + instead. [Learn more] [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html` diff --git a/cmd/workspace/query-visualizations-legacy/query-visualizations-legacy.go b/cmd/workspace/query-visualizations-legacy/query-visualizations-legacy.go index 09d271237a..f455097c50 100755 --- a/cmd/workspace/query-visualizations-legacy/query-visualizations-legacy.go +++ b/cmd/workspace/query-visualizations-legacy/query-visualizations-legacy.go @@ -25,8 +25,8 @@ func New() *cobra.Command { vizualisations from existing queries within the Databricks Workspace. Data structures may change over time. - **Note**: A new version of the Databricks SQL API is now available. Please see - the latest version. [Learn more] + **Warning**: This API is deprecated. Please see the latest version of the + Databricks SQL API. [Learn more] [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html`, GroupID: "sql", @@ -75,7 +75,7 @@ func newCreate() *cobra.Command { Creates visualization in the query. - **Note**: A new version of the Databricks SQL API is now available. Please use + **Warning**: This API is deprecated. Please use :method:queryvisualizations/create instead. [Learn more] [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html` @@ -141,7 +141,7 @@ func newDelete() *cobra.Command { Removes a visualization from the query. - **Note**: A new version of the Databricks SQL API is now available. Please use + **Warning**: This API is deprecated. Please use :method:queryvisualizations/delete instead. [Learn more] [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html @@ -214,7 +214,7 @@ func newUpdate() *cobra.Command { Updates visualization in the query. - **Note**: A new version of the Databricks SQL API is now available. Please use + **Warning**: This API is deprecated. Please use :method:queryvisualizations/update instead. [Learn more] [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html` diff --git a/go.mod b/go.mod index e796aeaeff..0bbb386248 100644 --- a/go.mod +++ b/go.mod @@ -9,7 +9,7 @@ require ( github.com/BurntSushi/toml v1.5.0 // MIT github.com/Masterminds/semver/v3 v3.4.0 // MIT github.com/briandowns/spinner v1.23.1 // Apache 2.0 - github.com/databricks/databricks-sdk-go v0.93.0 // Apache 2.0 + github.com/databricks/databricks-sdk-go v0.94.0 // Apache 2.0 github.com/fatih/color v1.18.0 // MIT github.com/google/uuid v1.6.0 // BSD-3-Clause github.com/gorilla/mux v1.8.1 // BSD 3-Clause diff --git a/go.sum b/go.sum index 7935e45a2e..dcfd165701 100644 --- a/go.sum +++ b/go.sum @@ -29,8 +29,8 @@ github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZ github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s= github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI= -github.com/databricks/databricks-sdk-go v0.93.0 h1:ov+n+pZBI8ZpKscZiPf9OxuwVVm9eftaZyoGENdO754= -github.com/databricks/databricks-sdk-go v0.93.0/go.mod h1:hWoHnHbNLjPKiTm5K/7bcIv3J3Pkgo5x9pPzh8K3RVE= +github.com/databricks/databricks-sdk-go v0.94.0 h1:+ITzL/O6/8FkHBJ4oWj1o8PkZ9GPozqydpXJRjsdxzw= +github.com/databricks/databricks-sdk-go v0.94.0/go.mod h1:hWoHnHbNLjPKiTm5K/7bcIv3J3Pkgo5x9pPzh8K3RVE= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= diff --git a/python/databricks/bundles/jobs/_models/job.py b/python/databricks/bundles/jobs/_models/job.py index 88b6ee17a7..e836d4c9a8 100644 --- a/python/databricks/bundles/jobs/_models/job.py +++ b/python/databricks/bundles/jobs/_models/job.py @@ -149,6 +149,7 @@ class Job(Resource): performance_target: VariableOrOptional[PerformanceTarget] = None """ The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run. + The performance target does not apply to tasks that run on Serverless GPU compute. * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance. @@ -293,6 +294,7 @@ class JobDict(TypedDict, total=False): performance_target: VariableOrOptional[PerformanceTargetParam] """ The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run. + The performance target does not apply to tasks that run on Serverless GPU compute. * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.