Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
83a8f43
Extract Lakebase target resolver into shared libs/lakebase/target
simonfaltum Apr 30, 2026
bfb6320
Add experimental postgres query command (autoscaling, text output)
simonfaltum Apr 30, 2026
edbd6be
Address review feedback on PR 1
simonfaltum Apr 30, 2026
030a279
Address review feedback round 2
simonfaltum Apr 30, 2026
5e0e3dd
Provisioned targeting + JSON/CSV streaming + typed values
simonfaltum Apr 30, 2026
fdd0e1b
Address PR 2 review feedback round 1
simonfaltum Apr 30, 2026
287dd62
Address PR 2 review feedback round 2
simonfaltum Apr 30, 2026
5a27bf0
Cut blast radius: keep target package + acceptance tests inside exper…
simonfaltum Apr 30, 2026
b8d6865
Merge branch 'simonfaltum/postgres-query-pr1-scaffold' into simonfalt…
simonfaltum Apr 30, 2026
1a07988
Extract output-mode handling into experimental/libs/sqlcli
simonfaltum Apr 30, 2026
a5dff81
Address nitpicker findings: NO_COLOR-safe TTY check, dup-column colli…
simonfaltum Apr 30, 2026
e81ab27
PR 1 lint fix: drop unused provisioned helpers from internal/target
simonfaltum May 1, 2026
33ba0b8
Merge branch 'simonfaltum/postgres-query-pr1-scaffold' into simonfalt…
simonfaltum May 1, 2026
f714c23
PR 2 lint fix: re-add provisioned.go with only the helpers used here
simonfaltum May 1, 2026
51063d0
Merge branch 'main' into simonfaltum/postgres-query-pr1-scaffold
simonfaltum May 4, 2026
f704c38
Merge remote-tracking branch 'origin/main' into simonfaltum/postgres-…
simonfaltum May 4, 2026
a994941
Merge remote-tracking branch 'origin/simonfaltum/postgres-query-pr1-s…
simonfaltum May 4, 2026
219c573
Merge remote-tracking branch 'origin/simonfaltum/postgres-query-pr1-s…
simonfaltum May 4, 2026
4c44334
Fix TLS missing in postgres query connect
simonfaltum May 5, 2026
0bd69e7
Merge remote-tracking branch 'origin/simonfaltum/postgres-query-pr1-s…
simonfaltum May 5, 2026
a51dc83
Use net.JoinHostPort in pgx DSN to satisfy nosprintfhostport
simonfaltum May 5, 2026
97a771b
Merge remote-tracking branch 'origin/simonfaltum/postgres-query-pr1-s…
simonfaltum May 5, 2026
6757d4e
Show connecting status as a spinner that clears on success
simonfaltum May 5, 2026
d498b97
Merge remote-tracking branch 'origin/simonfaltum/postgres-query-pr1-s…
simonfaltum May 5, 2026
560e041
Merge remote-tracking branch 'origin/main' into simonfaltum/postgres-…
simonfaltum May 5, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
65 changes: 27 additions & 38 deletions experimental/aitools/cmd/query.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,9 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/experimental/aitools/lib/middlewares"
"github.com/databricks/cli/experimental/aitools/lib/session"
"github.com/databricks/cli/experimental/libs/sqlcli"
"github.com/databricks/cli/libs/cmdctx"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/env"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go/service/sql"
"github.com/spf13/cobra"
Expand All @@ -35,16 +34,6 @@ const (

// cancelTimeout is how long to wait for server-side cancellation.
cancelTimeout = 10 * time.Second

// staticTableThreshold is the maximum number of rows rendered as a static table.
// Beyond this, an interactive scrollable table is used.
staticTableThreshold = 30

// outputCSV is the csv output format, supported only by the query command.
outputCSV = "csv"

// envOutputFormat matches the env var name in cmd/root/io.go.
envOutputFormat = "DATABRICKS_OUTPUT_FORMAT"
)

type queryOutputMode int
Expand All @@ -55,8 +44,13 @@ const (
queryOutputModeInteractiveTable
)

func selectQueryOutputMode(outputType flags.Output, stdoutInteractive, promptSupported bool, rowCount int) queryOutputMode {
if outputType == flags.OutputJSON {
// selectQueryOutputMode picks the rendering mode for a single-query result.
// JSON is the only machine-readable option; static and interactive are
// table variants chosen by row count and TTY capabilities. Sharing only
// the threshold with sqlcli; the three-way decision is aitools-specific
// because the postgres command's renderers have a different shape.
func selectQueryOutputMode(format sqlcli.Format, stdoutInteractive, promptSupported bool, rowCount int) queryOutputMode {
if format == sqlcli.OutputJSON {
return queryOutputModeJSON
}
if !stdoutInteractive {
Expand All @@ -67,7 +61,7 @@ func selectQueryOutputMode(outputType flags.Output, stdoutInteractive, promptSup
if !promptSupported {
return queryOutputModeStaticTable
}
if rowCount <= staticTableThreshold {
if rowCount <= sqlcli.StaticTableThreshold {
return queryOutputModeStaticTable
}
return queryOutputModeInteractiveTable
Expand Down Expand Up @@ -119,24 +113,15 @@ interactive table browser. Use --output csv to export results as CSV.`,
RunE: func(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()

// Normalize case to match root --output behavior (flags.Output.Set lowercases).
outputFormat = strings.ToLower(outputFormat)

// If --output wasn't explicitly passed, check the env var.
// Invalid env values are silently ignored, matching cmd/root/io.go.
if !cmd.Flag("output").Changed {
if v, ok := env.Lookup(ctx, envOutputFormat); ok {
switch flags.Output(strings.ToLower(v)) {
case flags.OutputText, flags.OutputJSON, outputCSV:
outputFormat = strings.ToLower(v)
}
}
}

switch flags.Output(outputFormat) {
case flags.OutputText, flags.OutputJSON, outputCSV:
default:
return fmt.Errorf("unsupported output format %q, accepted values: text, json, csv", outputFormat)
// Resolve the effective format via sqlcli so the env-var
// precedence and explicit-text-on-pipe handling stays in sync
// across commands. We pass stdoutTTY=true to keep the original
// aitools behavior of not auto-falling-back to JSON here; the
// per-result render mode further down already handles the pipe
// case via selectQueryOutputMode.
format, err := sqlcli.ResolveFormat(ctx, outputFormat, cmd.Flag("output").Changed, true)
if err != nil {
return err
}

sqls, err := resolveSQLs(ctx, cmd, args, filePaths)
Expand All @@ -146,7 +131,7 @@ interactive table browser. Use --output csv to export results as CSV.`,

// Reject incompatible flag combinations before any API call so the
// user sees the real error instead of an auth/warehouse failure.
if len(sqls) > 1 && flags.Output(outputFormat) != flags.OutputJSON {
if len(sqls) > 1 && format != sqlcli.OutputJSON {
return fmt.Errorf("multiple queries require --output json (got %q); pass --output json to receive a JSON array of per-statement results", outputFormat)
}

Expand All @@ -173,7 +158,7 @@ interactive table browser. Use --output csv to export results as CSV.`,
}

// CSV bypasses the normal output mode selection.
if flags.Output(outputFormat) == outputCSV {
if format == sqlcli.OutputCSV {
if len(columns) == 0 && len(rows) == 0 {
return nil
}
Expand All @@ -190,7 +175,7 @@ interactive table browser. Use --output csv to export results as CSV.`,
stdoutInteractive := cmdio.SupportsColor(ctx, cmd.OutOrStdout())
promptSupported := cmdio.IsPromptSupported(ctx)

switch selectQueryOutputMode(flags.Output(outputFormat), stdoutInteractive, promptSupported, len(rows)) {
switch selectQueryOutputMode(format, stdoutInteractive, promptSupported, len(rows)) {
case queryOutputModeJSON:
return renderJSON(cmd.OutOrStdout(), columns, rows)
case queryOutputModeStaticTable:
Expand All @@ -206,9 +191,13 @@ interactive table browser. Use --output csv to export results as CSV.`,
cmd.Flags().IntVar(&concurrency, "concurrency", defaultBatchConcurrency, "Maximum in-flight statements when running a batch of queries")
// Local --output flag shadows the root command's persistent --output flag,
// adding csv support for this command only.
cmd.Flags().StringVarP(&outputFormat, "output", "o", string(flags.OutputText), "Output format: text, json, or csv")
cmd.Flags().StringVarP(&outputFormat, "output", "o", string(sqlcli.OutputText), "Output format: text, json, or csv")
cmd.RegisterFlagCompletionFunc("output", func(*cobra.Command, []string, string) ([]string, cobra.ShellCompDirective) {
return []string{string(flags.OutputText), string(flags.OutputJSON), string(outputCSV)}, cobra.ShellCompDirectiveNoFileComp
out := make([]string, len(sqlcli.AllFormats))
for i, f := range sqlcli.AllFormats {
out[i] = string(f)
}
return out, cobra.ShellCompDirectiveNoFileComp
})

return cmd
Expand Down
22 changes: 11 additions & 11 deletions experimental/aitools/cmd/query_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,9 @@ import (
"time"

"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/experimental/libs/sqlcli"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/env"
"github.com/databricks/cli/libs/flags"
mocksql "github.com/databricks/databricks-sdk-go/experimental/mocks/service/sql"
"github.com/databricks/databricks-sdk-go/service/sql"
"github.com/spf13/cobra"
Expand Down Expand Up @@ -271,57 +271,57 @@ func TestResolveWarehouseIDWithFlag(t *testing.T) {
func TestSelectQueryOutputMode(t *testing.T) {
tests := []struct {
name string
outputType flags.Output
format sqlcli.Format
stdoutInteractive bool
promptSupported bool
rowCount int
want queryOutputMode
}{
{
name: "json flag always returns json",
outputType: flags.OutputJSON,
format: sqlcli.OutputJSON,
stdoutInteractive: true,
promptSupported: true,
rowCount: 999,
want: queryOutputModeJSON,
},
{
name: "non interactive stdout returns json",
outputType: flags.OutputText,
format: sqlcli.OutputText,
stdoutInteractive: false,
promptSupported: true,
rowCount: 5,
want: queryOutputModeJSON,
},
{
name: "missing stdin interactivity falls back to static table",
outputType: flags.OutputText,
format: sqlcli.OutputText,
stdoutInteractive: true,
promptSupported: false,
rowCount: staticTableThreshold + 10,
rowCount: sqlcli.StaticTableThreshold + 10,
want: queryOutputModeStaticTable,
},
{
name: "small results use static table",
outputType: flags.OutputText,
format: sqlcli.OutputText,
stdoutInteractive: true,
promptSupported: true,
rowCount: staticTableThreshold,
rowCount: sqlcli.StaticTableThreshold,
want: queryOutputModeStaticTable,
},
{
name: "large results use interactive table",
outputType: flags.OutputText,
format: sqlcli.OutputText,
stdoutInteractive: true,
promptSupported: true,
rowCount: staticTableThreshold + 1,
rowCount: sqlcli.StaticTableThreshold + 1,
want: queryOutputModeInteractiveTable,
},
}

for _, tc := range tests {
t.Run(tc.name, func(t *testing.T) {
got := selectQueryOutputMode(tc.outputType, tc.stdoutInteractive, tc.promptSupported, tc.rowCount)
got := selectQueryOutputMode(tc.format, tc.stdoutInteractive, tc.promptSupported, tc.rowCount)
assert.Equal(t, tc.want, got)
})
}
Expand Down
93 changes: 93 additions & 0 deletions experimental/libs/sqlcli/output.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
// Package sqlcli holds patterns shared by experimental SQL-running commands
// (currently `experimental aitools tools query` and `experimental postgres
// query`). The package lives under experimental/libs/ rather than libs/ so
// the commands depending on it inherit experimental-stability guarantees:
// when both consumers graduate, this package can be promoted alongside
// (or its API stabilised first).
package sqlcli

import (
"context"
"fmt"
"slices"
"strings"

"github.com/databricks/cli/libs/env"
)

// EnvOutputFormat matches the env var name in cmd/root/io.go.
// Reading it lets pipelines set DATABRICKS_OUTPUT_FORMAT once for all
// commands.
const EnvOutputFormat = "DATABRICKS_OUTPUT_FORMAT"

// StaticTableThreshold is the row count above which interactive callers may
// hand off to libs/tableview's scrollable viewer. Smaller results stay in a
// static tabwriter table so they pipe to scripts unchanged.
const StaticTableThreshold = 30

// Format is the user-selectable output shape. Using a string typedef instead
// of an int enum keeps the help text and DATABRICKS_OUTPUT_FORMAT env var
// values self-describing.
type Format string

const (
OutputText Format = "text"
OutputJSON Format = "json"
OutputCSV Format = "csv"
)

// AllFormats is the canonical order shown in completions / help. Sharing
// the slice avoids drift between consumers when a new format is added.
var AllFormats = []Format{OutputText, OutputJSON, OutputCSV}

// ResolveFormat picks the effective output format. Precedence:
//
// 1. The local --output flag if it was explicitly set.
// 2. DATABRICKS_OUTPUT_FORMAT env var if set to a known value (invalid
// values are silently ignored, matching cmd/root/io.go and aitools).
// 3. The flag default (whatever the caller passes as flagValue).
//
// Then the auto-selection rule applies: a *defaulted* text mode on a non-TTY
// stdout falls back to JSON, so scripts piping the output get machine-
// readable output by default. An *explicit* --output text (flag or env) is
// honoured even on a pipe; per AGENTS.md we don't silently override flags
// the user set.
//
// flagSet is true if the user explicitly passed --output on the CLI.
// stdoutTTY is true if stdout is a terminal.
func ResolveFormat(ctx context.Context, flagValue string, flagSet, stdoutTTY bool) (Format, error) {
chosen := Format(strings.ToLower(flagValue))
chosenExplicit := flagSet

if !flagSet {
if v, ok := env.Lookup(ctx, EnvOutputFormat); ok {
candidate := Format(strings.ToLower(v))
if IsKnown(candidate) {
chosen = candidate
chosenExplicit = true
}
}
}

if !IsKnown(chosen) {
return "", fmt.Errorf("unsupported output format %q; expected one of: %s", flagValue, joinFormats(AllFormats))
}

if chosen == OutputText && !stdoutTTY && !chosenExplicit {
return OutputJSON, nil
}
return chosen, nil
}

// IsKnown reports whether f is one of the formats in AllFormats.
func IsKnown(f Format) bool {
return slices.Contains(AllFormats, f)
}

func joinFormats(formats []Format) string {
parts := make([]string, len(formats))
for i, f := range formats {
parts[i] = string(f)
}
return strings.Join(parts, ", ")
}
Loading
Loading