From f0f8699c55d1bd32c3ca520a0c89af6506f34420 Mon Sep 17 00:00:00 2001 From: Lennart Kats Date: Tue, 23 Dec 2025 15:02:08 +0100 Subject: [PATCH 1/5] Add interactive warehouse picker for SQL templates MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add `SelectWarehouse` function with state-sorted picker (running first) - Add `GetDefaultWarehouse` function for determining default warehouse - Add `Format` field to JSON schema for custom input types - Use `format: "warehouse_path"` in default-sql and dbt-sql templates - Update apps-mcp to use shared `GetDefaultWarehouse` logic - Fix case-insensitive search in cmdio Select 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- .../dbt-sql/output/my_dbt_sql/README.md | 11 +- .../output/my_default_sql/README.md | 5 + .../apps-mcp/lib/middlewares/warehouse.go | 60 +------- libs/cmdio/io.go | 2 +- libs/databrickscfg/cfgpickers/warehouses.go | 134 +++++++++++++++++- libs/jsonschema/schema.go | 4 + libs/template/config.go | 64 +++++++-- .../dbt-sql/databricks_template_schema.json | 7 +- .../template/{{.project_name}}/README.md.tmpl | 11 +- .../databricks_template_schema.json | 7 +- .../template/{{.project_name}}/README.md.tmpl | 5 + 11 files changed, 226 insertions(+), 84 deletions(-) diff --git a/acceptance/bundle/templates/dbt-sql/output/my_dbt_sql/README.md b/acceptance/bundle/templates/dbt-sql/output/my_dbt_sql/README.md index 756a2eda4e..e8c7ec95d7 100644 --- a/acceptance/bundle/templates/dbt-sql/output/my_dbt_sql/README.md +++ b/acceptance/bundle/templates/dbt-sql/output/my_dbt_sql/README.md @@ -88,12 +88,15 @@ $ dbt test ## Production setup -Your production dbt profiles are defined in dbt_profiles/profiles.yml. -These profiles define the default catalog, schema, and any other +Your production dbt profiles are defined in `dbt_profiles/profiles.yml`. +These profiles define the default warehouse, catalog, schema, and any other target-specific settings. Read more about dbt profiles on Databricks at https://docs.databricks.com/en/workflows/jobs/how-to/use-dbt-in-workflows.html#advanced-run-dbt-with-a-custom-profile. -The target workspaces for staging and prod are defined in databricks.yml. +To change the warehouse, catalog, or schema used by deployed dbt jobs, +edit the settings in `dbt_profiles/profiles.yml`. + +The target workspaces for staging and prod are defined in `databricks.yml`. You can manually deploy based on these configurations (see below). Or you can use CI/CD to automate deployment. See https://docs.databricks.com/dev-tools/bundles/ci-cd.html for documentation @@ -120,7 +123,7 @@ For example, the default template would deploy a job called You can find that job by opening your workpace and clicking on **Workflows**. You can also deploy to your production target directly from the command-line. -The warehouse, catalog, and schema for that target are configured in databricks.yml. +The warehouse, catalog, and schema for that target are configured in `dbt_profiles/profiles.yml`. When deploying to this target, note that the default job at resources/my_dbt_sql.job.yml has a schedule set that runs every day. The schedule is paused when deploying in development mode (see https://docs.databricks.com/dev-tools/bundles/deployment-modes.html). diff --git a/acceptance/bundle/templates/default-sql/output/my_default_sql/README.md b/acceptance/bundle/templates/default-sql/output/my_default_sql/README.md index 67ded153f1..903869eebe 100644 --- a/acceptance/bundle/templates/default-sql/output/my_default_sql/README.md +++ b/acceptance/bundle/templates/default-sql/output/my_default_sql/README.md @@ -39,3 +39,8 @@ The 'my_default_sql' project was generated by using the default-sql template. 7. For documentation on the Databricks Asset Bundles format used for this project, and for CI/CD configuration, see https://docs.databricks.com/dev-tools/bundles/index.html. + +## Changing the warehouse, catalog, or schema + +The default SQL warehouse, catalog, and schema are configured in `databricks.yml`. +To change these settings, edit the `variables` section for each target (dev/prod). diff --git a/experimental/apps-mcp/lib/middlewares/warehouse.go b/experimental/apps-mcp/lib/middlewares/warehouse.go index 9bf1b0a071..8f16ab9135 100644 --- a/experimental/apps-mcp/lib/middlewares/warehouse.go +++ b/experimental/apps-mcp/lib/middlewares/warehouse.go @@ -2,15 +2,12 @@ package middlewares import ( "context" - "errors" "fmt" - "net/url" - "sort" "sync" "github.com/databricks/cli/experimental/apps-mcp/lib/session" + "github.com/databricks/cli/libs/databrickscfg/cfgpickers" "github.com/databricks/cli/libs/env" - "github.com/databricks/databricks-sdk-go/httpclient" "github.com/databricks/databricks-sdk-go/service/sql" ) @@ -83,13 +80,13 @@ func GetWarehouseID(ctx context.Context) (string, error) { } func getDefaultWarehouse(ctx context.Context) (*sql.EndpointInfo, error) { - // first resolve DATABRICKS_WAREHOUSE_ID env variable + w, err := GetDatabricksClient(ctx) + if err != nil { + return nil, fmt.Errorf("get databricks client: %w", err) + } + warehouseID := env.Get(ctx, "DATABRICKS_WAREHOUSE_ID") if warehouseID != "" { - w, err := GetDatabricksClient(ctx) - if err != nil { - return nil, fmt.Errorf("get databricks client: %w", err) - } warehouse, err := w.Warehouses.Get(ctx, sql.GetWarehouseRequest{ Id: warehouseID, }) @@ -103,48 +100,5 @@ func getDefaultWarehouse(ctx context.Context) (*sql.EndpointInfo, error) { }, nil } - apiClient, err := GetApiClient(ctx) - if err != nil { - return nil, err - } - - apiPath := "/api/2.0/sql/warehouses" - params := url.Values{} - params.Add("skip_cannot_use", "true") - fullPath := fmt.Sprintf("%s?%s", apiPath, params.Encode()) - - var response sql.ListWarehousesResponse - err = apiClient.Do(ctx, "GET", fullPath, httpclient.WithResponseUnmarshal(&response)) - if err != nil { - return nil, err - } - - priorities := map[sql.State]int{ - sql.StateRunning: 1, - sql.StateStarting: 2, - sql.StateStopped: 3, - sql.StateStopping: 4, - sql.StateDeleted: 99, - sql.StateDeleting: 99, - } - - warehouses := response.Warehouses - sort.Slice(warehouses, func(i, j int) bool { - return priorities[warehouses[i].State] < priorities[warehouses[j].State] - }) - - if len(warehouses) == 0 { - return nil, errNoWarehouses() - } - - firstWarehouse := warehouses[0] - if firstWarehouse.State == sql.StateDeleted || firstWarehouse.State == sql.StateDeleting { - return nil, errNoWarehouses() - } - - return &firstWarehouse, nil -} - -func errNoWarehouses() error { - return errors.New("no warehouse found. You can explicitly set the warehouse ID using the DATABRICKS_WAREHOUSE_ID environment variable") + return cfgpickers.GetDefaultWarehouse(ctx, w) } diff --git a/libs/cmdio/io.go b/libs/cmdio/io.go index 356c6f9ac8..d0bb9e6c2e 100644 --- a/libs/cmdio/io.go +++ b/libs/cmdio/io.go @@ -114,7 +114,7 @@ func (c *cmdIO) Select(items []Tuple, label string) (id string, err error) { StartInSearchMode: true, Searcher: func(input string, idx int) bool { lower := strings.ToLower(items[idx].Name) - return strings.Contains(lower, input) + return strings.Contains(lower, strings.ToLower(input)) }, Templates: &promptui.SelectTemplates{ Active: `{{.Name | bold}} ({{.Id|faint}})`, diff --git a/libs/databrickscfg/cfgpickers/warehouses.go b/libs/databrickscfg/cfgpickers/warehouses.go index 65b5f8c83c..49f4a84c98 100644 --- a/libs/databrickscfg/cfgpickers/warehouses.go +++ b/libs/databrickscfg/cfgpickers/warehouses.go @@ -4,14 +4,19 @@ import ( "context" "errors" "fmt" + "sort" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/databricks-sdk-go" + "github.com/databricks/databricks-sdk-go/apierr" + "github.com/databricks/databricks-sdk-go/config" + "github.com/databricks/databricks-sdk-go/httpclient" "github.com/databricks/databricks-sdk-go/service/sql" "github.com/fatih/color" + "github.com/manifoldco/promptui" ) -var ErrNoCompatibleWarehouses = errors.New("no compatible warehouses") +var ErrNoCompatibleWarehouses = errors.New("no compatible warehouses. You can explicitly set the warehouse ID using the DATABRICKS_WAREHOUSE_ID environment variable") type warehouseFilter func(sql.EndpointInfo) bool @@ -63,3 +68,130 @@ func AskForWarehouse(ctx context.Context, w *databricks.WorkspaceClient, filters } return cmdio.Select(ctx, names, "Choose SQL Warehouse") } + +// sortWarehousesByState sorts warehouses by state priority (running first) and filters out deleted ones. +func sortWarehousesByState(all []sql.EndpointInfo) []sql.EndpointInfo { + var warehouses []sql.EndpointInfo + for _, wh := range all { + if wh.State != sql.StateDeleted && wh.State != sql.StateDeleting { + warehouses = append(warehouses, wh) + } + } + + priorities := map[sql.State]int{ + sql.StateRunning: 1, + sql.StateStarting: 2, + sql.StateStopped: 3, + sql.StateStopping: 4, + } + sort.Slice(warehouses, func(i, j int) bool { + return priorities[warehouses[i].State] < priorities[warehouses[j].State] + }) + + return warehouses +} + +// GetDefaultWarehouse returns the default warehouse for the workspace. +// It tries the following in order: +// 1. The "default" warehouse via API (server-side convention, not yet fully rolled out) +// 2. The first usable warehouse sorted by state (running first) +func GetDefaultWarehouse(ctx context.Context, w *databricks.WorkspaceClient) (*sql.EndpointInfo, error) { + // Try the "default" warehouse convention first + // This is a new server-side feature that may not be available everywhere yet + warehouse, err := w.Warehouses.Get(ctx, sql.GetWarehouseRequest{Id: "default"}) + if err == nil { + return &sql.EndpointInfo{ + Id: warehouse.Id, + Name: warehouse.Name, + State: warehouse.State, + }, nil + } + var apiErr *apierr.APIError + if !errors.As(err, &apiErr) || apiErr.StatusCode >= 500 { + return nil, fmt.Errorf("get default warehouse: %w", err) + } + + warehouses, err := listUsableWarehouses(ctx, w) + if err != nil { + return nil, err + } + warehouses = sortWarehousesByState(warehouses) + if len(warehouses) == 0 { + return nil, ErrNoCompatibleWarehouses + } + return &warehouses[0], nil +} + +// listUsableWarehouses returns warehouses the user has permission to use. +// This uses the skip_cannot_use=true parameter to filter out inaccessible warehouses. +func listUsableWarehouses(ctx context.Context, w *databricks.WorkspaceClient) ([]sql.EndpointInfo, error) { + // The SDK doesn't expose skip_cannot_use parameter, so we use the raw API + clientCfg, err := config.HTTPClientConfigFromConfig(w.Config) + if err != nil { + return nil, fmt.Errorf("create HTTP client config: %w", err) + } + apiClient := httpclient.NewApiClient(clientCfg) + + var response sql.ListWarehousesResponse + err = apiClient.Do(ctx, "GET", "/api/2.0/sql/warehouses?skip_cannot_use=true", + httpclient.WithResponseUnmarshal(&response)) + if err != nil { + return nil, fmt.Errorf("list warehouses: %w", err) + } + return response.Warehouses, nil +} + +// SelectWarehouse prompts the user to select a SQL warehouse and returns the warehouse ID. +// Warehouses are sorted by state (running first) so the default selection is the best available. +// In non-interactive mode, returns the first (best) warehouse automatically. +// The description parameter is shown before the picker (if non-empty). +func SelectWarehouse(ctx context.Context, w *databricks.WorkspaceClient, description string) (string, error) { + all, err := w.Warehouses.ListAll(ctx, sql.ListWarehousesRequest{}) + if err != nil { + return "", fmt.Errorf("list warehouses: %w", err) + } + + warehouses := sortWarehousesByState(all) + if len(warehouses) == 0 { + return "", ErrNoCompatibleWarehouses + } + + if len(warehouses) == 1 || !cmdio.IsPromptSupported(ctx) { + return warehouses[0].Id, nil + } + + // Build options for the picker (● = running, ○ = not running) + var items []cmdio.Tuple + for i, warehouse := range warehouses { + var icon string + if warehouse.State == sql.StateRunning { + icon = color.GreenString("●") + } else { + icon = color.HiBlackString("○") + } + + name := fmt.Sprintf("%s %s", icon, warehouse.Name) + if i == 0 { + name += " [DEFAULT]" + } + items = append(items, cmdio.Tuple{Name: name, Id: warehouse.Id}) + } + + if description != "" { + cmdio.LogString(ctx, description) + } + promptui.SearchPrompt = "Search: " + warehouseId, err := cmdio.SelectOrdered(ctx, items, "warehouse\n") + if err != nil { + return "", err + } + + for _, wh := range warehouses { + if wh.Id == warehouseId { + cmdio.LogString(ctx, fmt.Sprintf("warehouse_id: %s (%s)", warehouseId, wh.Name)) + break + } + } + + return warehouseId, nil +} diff --git a/libs/jsonschema/schema.go b/libs/jsonschema/schema.go index 47a6407470..1520fb4281 100644 --- a/libs/jsonschema/schema.go +++ b/libs/jsonschema/schema.go @@ -65,6 +65,10 @@ type Schema struct { // here: https://github.com/google/re2/wiki/Syntax Pattern string `json:"pattern,omitempty"` + // Format defines a semantic format for string values (e.g., "date-time", "email", "uri"). + // For templates, custom formats like "warehouse_path" trigger special input handling. + Format string `json:"format,omitempty"` + // Extension embeds our custom JSON schema extensions. Extension diff --git a/libs/template/config.go b/libs/template/config.go index d8f9829649..13f61716e3 100644 --- a/libs/template/config.go +++ b/libs/template/config.go @@ -6,7 +6,9 @@ import ( "fmt" "io/fs" + "github.com/databricks/cli/libs/cmdctx" "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/databrickscfg/cfgpickers" "github.com/databricks/cli/libs/jsonschema" "github.com/databricks/cli/libs/log" "golang.org/x/exp/maps" @@ -112,6 +114,18 @@ func (c *config) assignDefaultValues(r *renderer) error { if _, ok := c.values[name]; ok { continue } + + if property.Format != "" { + val, err := c.resolveFormat(property.Format, "") + if err != nil { + return err + } + if val != "" { + c.values[name] = val + continue + } + } + // No default value defined for the property if property.Default == nil { continue @@ -133,6 +147,22 @@ func (c *config) assignDefaultValues(r *renderer) error { return nil } +// resolveFormat returns a value for custom format types. +// Returns empty string for unknown formats (to fall back to default handling). +func (c *config) resolveFormat(format, description string) (string, error) { + switch format { + case "warehouse_path": + w := cmdctx.WorkspaceClient(c.ctx) + warehouseId, err := cfgpickers.SelectWarehouse(c.ctx, w, description) + if err != nil { + return "", err + } + return "/sql/1.0/warehouses/" + warehouseId, nil + default: + return "", nil + } +} + func (c *config) skipPrompt(p jsonschema.Property, r *renderer) (bool, error) { // Config already has a value assigned. We don't have to prompt for a user input. if _, ok := c.values[p.Name]; ok { @@ -170,26 +200,34 @@ func (c *config) skipPrompt(p jsonschema.Property, r *renderer) (bool, error) { func (c *config) promptOnce(property *jsonschema.Schema, name, defaultVal, description string) error { var userInput string - if property.Enum != nil { - // List options for the user to select from - options, err := property.EnumStringSlice() - if err != nil { - return err - } - userInput, err = cmdio.AskSelect(c.ctx, description, options) + var err error + + if property.Format != "" { + userInput, err = c.resolveFormat(property.Format, description) if err != nil { return err } - } else { - var err error - userInput, err = cmdio.Ask(c.ctx, description, defaultVal) - if err != nil { - return err + } + + if userInput == "" { + if property.Enum != nil { + options, err := property.EnumStringSlice() + if err != nil { + return err + } + userInput, err = cmdio.AskSelect(c.ctx, description, options) + if err != nil { + return err + } + } else { + userInput, err = cmdio.Ask(c.ctx, description, defaultVal) + if err != nil { + return err + } } } // Convert user input string back to a Go value - var err error c.values[name], err = property.ParseString(userInput) if err != nil { // Show error and retry if validation fails diff --git a/libs/template/templates/dbt-sql/databricks_template_schema.json b/libs/template/templates/dbt-sql/databricks_template_schema.json index 962913be06..34d2874f86 100644 --- a/libs/template/templates/dbt-sql/databricks_template_schema.json +++ b/libs/template/templates/dbt-sql/databricks_template_schema.json @@ -11,10 +11,9 @@ }, "http_path": { "type": "string", - "pattern": "^/sql/.\\../warehouses/[a-z0-9]+$", - "pattern_match_failure_message": "Path must be of the form /sql/1.0/warehouses/", - "description": "\nPlease provide the HTTP Path of the SQL warehouse you would like to use with dbt during development.\nYou can find this path by clicking on \"Connection details\" for your SQL warehouse.\nhttp_path [example: /sql/1.0/warehouses/abcdef1234567890]", - "order": 2 + "description": "\nSelect a SQL warehouse to use during development (you can change this in databricks.yml later).", + "order": 2, + "format": "warehouse_path" }, "default_catalog": { "type": "string", diff --git a/libs/template/templates/dbt-sql/template/{{.project_name}}/README.md.tmpl b/libs/template/templates/dbt-sql/template/{{.project_name}}/README.md.tmpl index cd4c29a76d..7d94d1890b 100644 --- a/libs/template/templates/dbt-sql/template/{{.project_name}}/README.md.tmpl +++ b/libs/template/templates/dbt-sql/template/{{.project_name}}/README.md.tmpl @@ -88,12 +88,15 @@ $ dbt test ## Production setup -Your production dbt profiles are defined in dbt_profiles/profiles.yml. -These profiles define the default catalog, schema, and any other +Your production dbt profiles are defined in `dbt_profiles/profiles.yml`. +These profiles define the default warehouse, catalog, schema, and any other target-specific settings. Read more about dbt profiles on Databricks at https://docs.databricks.com/en/workflows/jobs/how-to/use-dbt-in-workflows.html#advanced-run-dbt-with-a-custom-profile. -The target workspaces for staging and prod are defined in databricks.yml. +To change the warehouse, catalog, or schema used by deployed dbt jobs, +edit the settings in `dbt_profiles/profiles.yml`. + +The target workspaces for staging and prod are defined in `databricks.yml`. You can manually deploy based on these configurations (see below). Or you can use CI/CD to automate deployment. See https://docs.databricks.com/dev-tools/bundles/ci-cd.html for documentation @@ -120,7 +123,7 @@ For example, the default template would deploy a job called You can find that job by opening your workpace and clicking on **Workflows**. You can also deploy to your production target directly from the command-line. -The warehouse, catalog, and schema for that target are configured in databricks.yml. +The warehouse, catalog, and schema for that target are configured in `dbt_profiles/profiles.yml`. When deploying to this target, note that the default job at resources/{{.project_name}}.job.yml has a schedule set that runs every day. The schedule is paused when deploying in development mode (see https://docs.databricks.com/dev-tools/bundles/deployment-modes.html). diff --git a/libs/template/templates/default-sql/databricks_template_schema.json b/libs/template/templates/default-sql/databricks_template_schema.json index 113cbef642..cb73006001 100644 --- a/libs/template/templates/default-sql/databricks_template_schema.json +++ b/libs/template/templates/default-sql/databricks_template_schema.json @@ -11,10 +11,9 @@ }, "http_path": { "type": "string", - "pattern": "^/sql/.\\../warehouses/[a-z0-9]+$", - "pattern_match_failure_message": "Path must be of the form /sql/1.0/warehouses/", - "description": "\nPlease provide the HTTP Path of the SQL warehouse you would like to use during development.\nYou can find this path by clicking on \"Connection details\" for your SQL warehouse.\nhttp_path [example: /sql/1.0/warehouses/abcdef1234567890]", - "order": 2 + "description": "\nSelect a SQL warehouse to use during development (you can change this in databricks.yml later).", + "order": 2, + "format": "warehouse_path" }, "default_catalog": { "type": "string", diff --git a/libs/template/templates/default-sql/template/{{.project_name}}/README.md.tmpl b/libs/template/templates/default-sql/template/{{.project_name}}/README.md.tmpl index e5c44320d9..866e2a41ab 100644 --- a/libs/template/templates/default-sql/template/{{.project_name}}/README.md.tmpl +++ b/libs/template/templates/default-sql/template/{{.project_name}}/README.md.tmpl @@ -39,3 +39,8 @@ The '{{.project_name}}' project was generated by using the default-sql template. 7. For documentation on the Databricks Asset Bundles format used for this project, and for CI/CD configuration, see https://docs.databricks.com/dev-tools/bundles/index.html. + +## Changing the warehouse, catalog, or schema + +The default SQL warehouse, catalog, and schema are configured in `databricks.yml`. +To change these settings, edit the `variables` section for each target (dev/prod). From 4a84af61765e21d899da8f8e5bf06702e71ccf24 Mon Sep 17 00:00:00 2001 From: Lennart Kats Date: Tue, 23 Dec 2025 15:13:25 +0100 Subject: [PATCH 2/5] Improve SelectWarehouse: add filters, type info, and better sorting MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add filter support to SelectWarehouse (variadic warehouseFilter) - Show warehouse type (pro/classic/serverless) in gray lowercase - Make [DEFAULT] marker gray - Sort by state priority then alphabetically for default selection - Display list sorted by running state first, then alphabetically 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- libs/databrickscfg/cfgpickers/warehouses.go | 56 ++++++++++++++++++--- 1 file changed, 49 insertions(+), 7 deletions(-) diff --git a/libs/databrickscfg/cfgpickers/warehouses.go b/libs/databrickscfg/cfgpickers/warehouses.go index 49f4a84c98..45cbacf615 100644 --- a/libs/databrickscfg/cfgpickers/warehouses.go +++ b/libs/databrickscfg/cfgpickers/warehouses.go @@ -5,6 +5,7 @@ import ( "errors" "fmt" "sort" + "strings" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/databricks-sdk-go" @@ -69,7 +70,8 @@ func AskForWarehouse(ctx context.Context, w *databricks.WorkspaceClient, filters return cmdio.Select(ctx, names, "Choose SQL Warehouse") } -// sortWarehousesByState sorts warehouses by state priority (running first) and filters out deleted ones. +// sortWarehousesByState sorts warehouses by state priority (running first), then alphabetically by name. +// Deleted warehouses are filtered out. func sortWarehousesByState(all []sql.EndpointInfo) []sql.EndpointInfo { var warehouses []sql.EndpointInfo for _, wh := range all { @@ -85,7 +87,11 @@ func sortWarehousesByState(all []sql.EndpointInfo) []sql.EndpointInfo { sql.StateStopping: 4, } sort.Slice(warehouses, func(i, j int) bool { - return priorities[warehouses[i].State] < priorities[warehouses[j].State] + pi, pj := priorities[warehouses[i].State], priorities[warehouses[j].State] + if pi != pj { + return pi < pj + } + return strings.ToLower(warehouses[i].Name) < strings.ToLower(warehouses[j].Name) }) return warehouses @@ -145,13 +151,30 @@ func listUsableWarehouses(ctx context.Context, w *databricks.WorkspaceClient) ([ // Warehouses are sorted by state (running first) so the default selection is the best available. // In non-interactive mode, returns the first (best) warehouse automatically. // The description parameter is shown before the picker (if non-empty). -func SelectWarehouse(ctx context.Context, w *databricks.WorkspaceClient, description string) (string, error) { +func SelectWarehouse(ctx context.Context, w *databricks.WorkspaceClient, description string, filters ...warehouseFilter) (string, error) { all, err := w.Warehouses.ListAll(ctx, sql.ListWarehousesRequest{}) if err != nil { return "", fmt.Errorf("list warehouses: %w", err) } warehouses := sortWarehousesByState(all) + + // Apply filters + var filtered []sql.EndpointInfo + for _, wh := range warehouses { + skip := false + for _, filter := range filters { + if !filter(wh) { + skip = true + break + } + } + if !skip { + filtered = append(filtered, wh) + } + } + warehouses = filtered + if len(warehouses) == 0 { return "", ErrNoCompatibleWarehouses } @@ -160,9 +183,22 @@ func SelectWarehouse(ctx context.Context, w *databricks.WorkspaceClient, descrip return warehouses[0].Id, nil } + // The first warehouse (sorted by state, then alphabetically) is the default + defaultId := warehouses[0].Id + + // Sort by running state first, then alphabetically for display + sort.Slice(warehouses, func(i, j int) bool { + iRunning := warehouses[i].State == sql.StateRunning + jRunning := warehouses[j].State == sql.StateRunning + if iRunning != jRunning { + return iRunning + } + return strings.ToLower(warehouses[i].Name) < strings.ToLower(warehouses[j].Name) + }) + // Build options for the picker (● = running, ○ = not running) var items []cmdio.Tuple - for i, warehouse := range warehouses { + for _, warehouse := range warehouses { var icon string if warehouse.State == sql.StateRunning { icon = color.GreenString("●") @@ -170,9 +206,15 @@ func SelectWarehouse(ctx context.Context, w *databricks.WorkspaceClient, descrip icon = color.HiBlackString("○") } - name := fmt.Sprintf("%s %s", icon, warehouse.Name) - if i == 0 { - name += " [DEFAULT]" + // Show type info in gray + typeInfo := strings.ToLower(string(warehouse.WarehouseType)) + if warehouse.EnableServerlessCompute { + typeInfo = "serverless" + } + + name := fmt.Sprintf("%s %s %s", icon, warehouse.Name, color.HiBlackString(typeInfo)) + if warehouse.Id == defaultId { + name += color.HiBlackString(" [DEFAULT]") } items = append(items, cmdio.Tuple{Name: name, Id: warehouse.Id}) } From 076c18e870e6831b5c4a8a0a03a0ae3e08cce525 Mon Sep 17 00:00:00 2001 From: Lennart Kats Date: Tue, 23 Dec 2025 16:47:52 +0100 Subject: [PATCH 3/5] Cleanup --- .../dbt-sql/output/my_dbt_sql/README.md | 3 -- .../apps-mcp/lib/middlewares/warehouse.go | 1 + libs/databrickscfg/cfgpickers/clusters.go | 2 +- libs/databrickscfg/cfgpickers/warehouses.go | 2 +- libs/jsonschema/schema.go | 3 +- libs/template/config.go | 42 +++++++++---------- .../template/{{.project_name}}/README.md.tmpl | 3 -- 7 files changed, 24 insertions(+), 32 deletions(-) diff --git a/acceptance/bundle/templates/dbt-sql/output/my_dbt_sql/README.md b/acceptance/bundle/templates/dbt-sql/output/my_dbt_sql/README.md index e8c7ec95d7..7a36fa56fc 100644 --- a/acceptance/bundle/templates/dbt-sql/output/my_dbt_sql/README.md +++ b/acceptance/bundle/templates/dbt-sql/output/my_dbt_sql/README.md @@ -93,9 +93,6 @@ These profiles define the default warehouse, catalog, schema, and any other target-specific settings. Read more about dbt profiles on Databricks at https://docs.databricks.com/en/workflows/jobs/how-to/use-dbt-in-workflows.html#advanced-run-dbt-with-a-custom-profile. -To change the warehouse, catalog, or schema used by deployed dbt jobs, -edit the settings in `dbt_profiles/profiles.yml`. - The target workspaces for staging and prod are defined in `databricks.yml`. You can manually deploy based on these configurations (see below). Or you can use CI/CD to automate deployment. See diff --git a/experimental/apps-mcp/lib/middlewares/warehouse.go b/experimental/apps-mcp/lib/middlewares/warehouse.go index 8f16ab9135..dd56a65828 100644 --- a/experimental/apps-mcp/lib/middlewares/warehouse.go +++ b/experimental/apps-mcp/lib/middlewares/warehouse.go @@ -85,6 +85,7 @@ func getDefaultWarehouse(ctx context.Context) (*sql.EndpointInfo, error) { return nil, fmt.Errorf("get databricks client: %w", err) } + // first resolve DATABRICKS_WAREHOUSE_ID env variable warehouseID := env.Get(ctx, "DATABRICKS_WAREHOUSE_ID") if warehouseID != "" { warehouse, err := w.Warehouses.Get(ctx, sql.GetWarehouseRequest{ diff --git a/libs/databrickscfg/cfgpickers/clusters.go b/libs/databrickscfg/cfgpickers/clusters.go index 44e4f71fb7..d1be8158a9 100644 --- a/libs/databrickscfg/cfgpickers/clusters.go +++ b/libs/databrickscfg/cfgpickers/clusters.go @@ -194,7 +194,7 @@ func AskForCluster(ctx context.Context, w *databricks.WorkspaceClient, filters . Items: compatible, Searcher: func(input string, idx int) bool { lower := strings.ToLower(compatible[idx].ClusterName) - return strings.Contains(lower, input) + return strings.Contains(lower, strings.ToLower(input)) }, StartInSearchMode: true, Templates: &promptui.SelectTemplates{ diff --git a/libs/databrickscfg/cfgpickers/warehouses.go b/libs/databrickscfg/cfgpickers/warehouses.go index 45cbacf615..73c60dc08e 100644 --- a/libs/databrickscfg/cfgpickers/warehouses.go +++ b/libs/databrickscfg/cfgpickers/warehouses.go @@ -17,7 +17,7 @@ import ( "github.com/manifoldco/promptui" ) -var ErrNoCompatibleWarehouses = errors.New("no compatible warehouses. You can explicitly set the warehouse ID using the DATABRICKS_WAREHOUSE_ID environment variable") +var ErrNoCompatibleWarehouses = errors.New("no compatible warehouses") type warehouseFilter func(sql.EndpointInfo) bool diff --git a/libs/jsonschema/schema.go b/libs/jsonschema/schema.go index 1520fb4281..99afa2f677 100644 --- a/libs/jsonschema/schema.go +++ b/libs/jsonschema/schema.go @@ -65,8 +65,7 @@ type Schema struct { // here: https://github.com/google/re2/wiki/Syntax Pattern string `json:"pattern,omitempty"` - // Format defines a semantic format for string values (e.g., "date-time", "email", "uri"). - // For templates, custom formats like "warehouse_path" trigger special input handling. + // Format specifies custom input handling. Supported: "warehouse_path". Format string `json:"format,omitempty"` // Extension embeds our custom JSON schema extensions. diff --git a/libs/template/config.go b/libs/template/config.go index 13f61716e3..8e1e2ffe38 100644 --- a/libs/template/config.go +++ b/libs/template/config.go @@ -115,15 +115,14 @@ func (c *config) assignDefaultValues(r *renderer) error { continue } + // Resolve custom formats (e.g., warehouse_path auto-selects the default warehouse) if property.Format != "" { val, err := c.resolveFormat(property.Format, "") if err != nil { return err } - if val != "" { - c.values[name] = val - continue - } + c.values[name] = val + continue } // No default value defined for the property @@ -200,34 +199,33 @@ func (c *config) skipPrompt(p jsonschema.Property, r *renderer) (bool, error) { func (c *config) promptOnce(property *jsonschema.Schema, name, defaultVal, description string) error { var userInput string - var err error if property.Format != "" { + var err error userInput, err = c.resolveFormat(property.Format, description) if err != nil { return err } - } - - if userInput == "" { - if property.Enum != nil { - options, err := property.EnumStringSlice() - if err != nil { - return err - } - userInput, err = cmdio.AskSelect(c.ctx, description, options) - if err != nil { - return err - } - } else { - userInput, err = cmdio.Ask(c.ctx, description, defaultVal) - if err != nil { - return err - } + } else if property.Enum != nil { + // List options for the user to select from + options, err := property.EnumStringSlice() + if err != nil { + return err + } + userInput, err = cmdio.AskSelect(c.ctx, description, options) + if err != nil { + return err + } + } else { + var err error + userInput, err = cmdio.Ask(c.ctx, description, defaultVal) + if err != nil { + return err } } // Convert user input string back to a Go value + var err error c.values[name], err = property.ParseString(userInput) if err != nil { // Show error and retry if validation fails diff --git a/libs/template/templates/dbt-sql/template/{{.project_name}}/README.md.tmpl b/libs/template/templates/dbt-sql/template/{{.project_name}}/README.md.tmpl index 7d94d1890b..1b40e4f3df 100644 --- a/libs/template/templates/dbt-sql/template/{{.project_name}}/README.md.tmpl +++ b/libs/template/templates/dbt-sql/template/{{.project_name}}/README.md.tmpl @@ -93,9 +93,6 @@ These profiles define the default warehouse, catalog, schema, and any other target-specific settings. Read more about dbt profiles on Databricks at https://docs.databricks.com/en/workflows/jobs/how-to/use-dbt-in-workflows.html#advanced-run-dbt-with-a-custom-profile. -To change the warehouse, catalog, or schema used by deployed dbt jobs, -edit the settings in `dbt_profiles/profiles.yml`. - The target workspaces for staging and prod are defined in `databricks.yml`. You can manually deploy based on these configurations (see below). Or you can use CI/CD to automate deployment. See From 5bc507d22b5f354b7960bfd47138b6e4709998cf Mon Sep 17 00:00:00 2001 From: Lennart Kats Date: Tue, 23 Dec 2025 16:57:54 +0100 Subject: [PATCH 4/5] Revert apps-mcp warehouse changes (for separate PR) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- .../apps-mcp/lib/middlewares/warehouse.go | 59 ++++++++++++++++--- 1 file changed, 52 insertions(+), 7 deletions(-) diff --git a/experimental/apps-mcp/lib/middlewares/warehouse.go b/experimental/apps-mcp/lib/middlewares/warehouse.go index dd56a65828..9bf1b0a071 100644 --- a/experimental/apps-mcp/lib/middlewares/warehouse.go +++ b/experimental/apps-mcp/lib/middlewares/warehouse.go @@ -2,12 +2,15 @@ package middlewares import ( "context" + "errors" "fmt" + "net/url" + "sort" "sync" "github.com/databricks/cli/experimental/apps-mcp/lib/session" - "github.com/databricks/cli/libs/databrickscfg/cfgpickers" "github.com/databricks/cli/libs/env" + "github.com/databricks/databricks-sdk-go/httpclient" "github.com/databricks/databricks-sdk-go/service/sql" ) @@ -80,14 +83,13 @@ func GetWarehouseID(ctx context.Context) (string, error) { } func getDefaultWarehouse(ctx context.Context) (*sql.EndpointInfo, error) { - w, err := GetDatabricksClient(ctx) - if err != nil { - return nil, fmt.Errorf("get databricks client: %w", err) - } - // first resolve DATABRICKS_WAREHOUSE_ID env variable warehouseID := env.Get(ctx, "DATABRICKS_WAREHOUSE_ID") if warehouseID != "" { + w, err := GetDatabricksClient(ctx) + if err != nil { + return nil, fmt.Errorf("get databricks client: %w", err) + } warehouse, err := w.Warehouses.Get(ctx, sql.GetWarehouseRequest{ Id: warehouseID, }) @@ -101,5 +103,48 @@ func getDefaultWarehouse(ctx context.Context) (*sql.EndpointInfo, error) { }, nil } - return cfgpickers.GetDefaultWarehouse(ctx, w) + apiClient, err := GetApiClient(ctx) + if err != nil { + return nil, err + } + + apiPath := "/api/2.0/sql/warehouses" + params := url.Values{} + params.Add("skip_cannot_use", "true") + fullPath := fmt.Sprintf("%s?%s", apiPath, params.Encode()) + + var response sql.ListWarehousesResponse + err = apiClient.Do(ctx, "GET", fullPath, httpclient.WithResponseUnmarshal(&response)) + if err != nil { + return nil, err + } + + priorities := map[sql.State]int{ + sql.StateRunning: 1, + sql.StateStarting: 2, + sql.StateStopped: 3, + sql.StateStopping: 4, + sql.StateDeleted: 99, + sql.StateDeleting: 99, + } + + warehouses := response.Warehouses + sort.Slice(warehouses, func(i, j int) bool { + return priorities[warehouses[i].State] < priorities[warehouses[j].State] + }) + + if len(warehouses) == 0 { + return nil, errNoWarehouses() + } + + firstWarehouse := warehouses[0] + if firstWarehouse.State == sql.StateDeleted || firstWarehouse.State == sql.StateDeleting { + return nil, errNoWarehouses() + } + + return &firstWarehouse, nil +} + +func errNoWarehouses() error { + return errors.New("no warehouse found. You can explicitly set the warehouse ID using the DATABRICKS_WAREHOUSE_ID environment variable") } From 9e3669fff2a4dad4fcb74e92e03ce899f9723717 Mon Sep 17 00:00:00 2001 From: Lennart Kats Date: Tue, 23 Dec 2025 17:00:46 +0100 Subject: [PATCH 5/5] Add NEXT_CHANGELOG entry for warehouse picker MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- NEXT_CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 969164d869..8081e01251 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -8,6 +8,8 @@ ### Bundles +* Add interactive SQL warehouse picker to `default-sql` and `dbt-sql` bundle templates ([#4170](https://github.com/databricks/cli/pull/4170)) + ### Dependency updates ### API Changes