Skip to content

Commit

Permalink
Merge branch 'main' into fix/always-workspace-prefix
Browse files Browse the repository at this point in the history
  • Loading branch information
andrewnester authored Sep 5, 2024
2 parents d70bc24 + ceefa80 commit 87c6b4d
Show file tree
Hide file tree
Showing 57 changed files with 874 additions and 233 deletions.
1 change: 1 addition & 0 deletions .codegen/service.go.tmpl
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,7 @@ func new{{.PascalName}}() *cobra.Command {
"provider-exchanges delete-listing-from-exchange"
"provider-exchanges list-exchanges-for-listing"
"provider-exchanges list-listings-for-exchange"
"storage-credentials get"
-}}
{{- $fullCommandName := (print $serviceName " " .KebabName) -}}
{{- $noPrompt := or .IsCrudCreate (in $excludeFromPrompts $fullCommandName) }}
Expand Down
52 changes: 52 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,57 @@
# Version changelog

## [Release] Release v0.228.0

CLI:
* Do not error if we cannot prompt for a profile in `auth login` ([#1745](https://github.com/databricks/cli/pull/1745)).

Bundles:

As of this release, the CLI will show a prompt if there are configuration changes that lead to DLT pipeline recreation.
Users can skip the prompt by specifying the `--auto-approve` flag.

* Pass along to Terraform process ([#1734](https://github.com/databricks/cli/pull/1734)).
* Add prompt when a pipeline recreation happens ([#1672](https://github.com/databricks/cli/pull/1672)).
* Use materialized views in the default-sql template ([#1709](https://github.com/databricks/cli/pull/1709)).
* Update templates to latest LTS DBR ([#1715](https://github.com/databricks/cli/pull/1715)).
* Make lock optional in the JSON schema ([#1738](https://github.com/databricks/cli/pull/1738)).
* Do not suppress normalisation diagnostics for resolving variables ([#1740](https://github.com/databricks/cli/pull/1740)).
* Include a permissions section in all templates ([#1713](https://github.com/databricks/cli/pull/1713)).
* Fixed complex variables are not being correctly merged from include files ([#1746](https://github.com/databricks/cli/pull/1746)).
* Fixed variable override in target with full variable syntax ([#1749](https://github.com/databricks/cli/pull/1749)).

Internal:
* Consider serverless clusters as compatible for Python wheel tasks ([#1733](https://github.com/databricks/cli/pull/1733)).
* PythonMutator: explain missing package error ([#1736](https://github.com/databricks/cli/pull/1736)).
* Add `dyn.Time` to box a timestamp with its original string value ([#1732](https://github.com/databricks/cli/pull/1732)).
* Fix streaming of stdout, stdin, stderr in cobra test runner ([#1742](https://github.com/databricks/cli/pull/1742)).

Dependency updates:
* Bump github.com/Masterminds/semver/v3 from 3.2.1 to 3.3.0 ([#1741](https://github.com/databricks/cli/pull/1741)).

## [Release] Release v0.227.1

CLI:
* Disable prompt for storage-credentials get command ([#1723](https://github.com/databricks/cli/pull/1723)).

Bundles:
* Do not treat empty path as a local path ([#1717](https://github.com/databricks/cli/pull/1717)).
* Correctly mark PyPI package name specs with multiple specifiers as remote libraries ([#1725](https://github.com/databricks/cli/pull/1725)).
* Improve error handling for /Volumes paths in mode: development ([#1716](https://github.com/databricks/cli/pull/1716)).

Internal:
* Ignore CLI version check on development builds of the CLI ([#1714](https://github.com/databricks/cli/pull/1714)).

API Changes:
* Added `databricks resource-quotas` command group.
* Added `databricks policy-compliance-for-clusters` command group.
* Added `databricks policy-compliance-for-jobs` command group.

OpenAPI commit 3eae49b444cac5a0118a3503e5b7ecef7f96527a (2024-08-21)
Dependency updates:
* Bump github.com/databricks/databricks-sdk-go from 0.44.0 to 0.45.0 ([#1719](https://github.com/databricks/cli/pull/1719)).
* Revert hc-install version to 0.7.0 ([#1711](https://github.com/databricks/cli/pull/1711)).

## [Release] Release v0.227.0

CLI:
Expand Down
7 changes: 1 addition & 6 deletions bundle/artifacts/expand_globs.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,12 +33,7 @@ func createGlobError(v dyn.Value, p dyn.Path, message string) diag.Diagnostic {
Severity: diag.Error,
Summary: fmt.Sprintf("%s: %s", source, message),
Locations: []dyn.Location{v.Location()},

Paths: []dyn.Path{
// Hack to clone the path. This path copy is mutable.
// To be addressed in a later PR.
p.Append(),
},
Paths: []dyn.Path{p},
}
}

Expand Down
2 changes: 1 addition & 1 deletion bundle/config/deployment.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,5 +6,5 @@ type Deployment struct {
FailOnActiveRuns bool `json:"fail_on_active_runs,omitempty"`

// Lock configures locking behavior on deployment.
Lock Lock `json:"lock"`
Lock Lock `json:"lock,omitempty"`
}
35 changes: 24 additions & 11 deletions bundle/config/mutator/process_target_mode.go
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ func transformDevelopmentMode(ctx context.Context, b *bundle.Bundle) {
}

func validateDevelopmentMode(b *bundle.Bundle) diag.Diagnostics {
var diags diag.Diagnostics
p := b.Config.Presets
u := b.Config.Workspace.CurrentUser

Expand All @@ -74,44 +75,56 @@ func validateDevelopmentMode(b *bundle.Bundle) diag.Diagnostics {
// status to UNPAUSED at the level of an individual object, whic hwas
// historically allowed.)
if p.TriggerPauseStatus == config.Unpaused {
return diag.Diagnostics{{
diags = diags.Append(diag.Diagnostic{
Severity: diag.Error,
Summary: "target with 'mode: development' cannot set trigger pause status to UNPAUSED by default",
Locations: []dyn.Location{b.Config.GetLocation("presets.trigger_pause_status")},
}}
})
}

// Make sure this development copy has unique names and paths to avoid conflicts
if path := findNonUserPath(b); path != "" {
return diag.Errorf("%s must start with '~/' or contain the current username when using 'mode: development'", path)
if path == "artifact_path" && strings.HasPrefix(b.Config.Workspace.ArtifactPath, "/Volumes") {
// For Volumes paths we recommend including the current username as a substring
diags = diags.Extend(diag.Errorf("%s should contain the current username or ${workspace.current_user.short_name} to ensure uniqueness when using 'mode: development'", path))
} else {
// For non-Volumes paths recommend simply putting things in the home folder
diags = diags.Extend(diag.Errorf("%s must start with '~/' or contain the current username to ensure uniqueness when using 'mode: development'", path))
}
}
if p.NamePrefix != "" && !strings.Contains(p.NamePrefix, u.ShortName) && !strings.Contains(p.NamePrefix, u.UserName) {
// Resources such as pipelines require a unique name, e.g. '[dev steve] my_pipeline'.
// For this reason we require the name prefix to contain the current username;
// it's a pitfall for users if they don't include it and later find out that
// only a single user can do development deployments.
return diag.Diagnostics{{
diags = diags.Append(diag.Diagnostic{
Severity: diag.Error,
Summary: "prefix should contain the current username or ${workspace.current_user.short_name} to ensure uniqueness when using 'mode: development'",
Locations: []dyn.Location{b.Config.GetLocation("presets.name_prefix")},
}}
})
}
return nil
return diags
}

// findNonUserPath finds the first workspace path such as root_path that doesn't
// contain the current username or current user's shortname.
func findNonUserPath(b *bundle.Bundle) string {
username := b.Config.Workspace.CurrentUser.UserName
containsName := func(path string) bool {
username := b.Config.Workspace.CurrentUser.UserName
shortname := b.Config.Workspace.CurrentUser.ShortName
return strings.Contains(path, username) || strings.Contains(path, shortname)
}

if b.Config.Workspace.RootPath != "" && !strings.Contains(b.Config.Workspace.RootPath, username) {
if b.Config.Workspace.RootPath != "" && !containsName(b.Config.Workspace.RootPath) {
return "root_path"
}
if b.Config.Workspace.StatePath != "" && !strings.Contains(b.Config.Workspace.StatePath, username) {
if b.Config.Workspace.StatePath != "" && !containsName(b.Config.Workspace.StatePath) {
return "state_path"
}
if b.Config.Workspace.FilePath != "" && !strings.Contains(b.Config.Workspace.FilePath, username) {
if b.Config.Workspace.FilePath != "" && !containsName(b.Config.Workspace.FilePath) {
return "file_path"
}
if b.Config.Workspace.ArtifactPath != "" && !strings.Contains(b.Config.Workspace.ArtifactPath, username) {
if b.Config.Workspace.ArtifactPath != "" && !containsName(b.Config.Workspace.ArtifactPath) {
return "artifact_path"
}
return ""
Expand Down
12 changes: 11 additions & 1 deletion bundle/config/mutator/process_target_mode_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -230,10 +230,20 @@ func TestValidateDevelopmentMode(t *testing.T) {
diags := validateDevelopmentMode(b)
require.NoError(t, diags.Error())

// Test with /Volumes path
b = mockBundle(config.Development)
b.Config.Workspace.ArtifactPath = "/Volumes/catalog/schema/lennart/libs"
diags = validateDevelopmentMode(b)
require.NoError(t, diags.Error())
b.Config.Workspace.ArtifactPath = "/Volumes/catalog/schema/libs"
diags = validateDevelopmentMode(b)
require.ErrorContains(t, diags.Error(), "artifact_path should contain the current username or ${workspace.current_user.short_name} to ensure uniqueness when using 'mode: development'")

// Test with a bundle that has a non-user path
b = mockBundle(config.Development)
b.Config.Workspace.RootPath = "/Shared/.bundle/x/y/state"
diags = validateDevelopmentMode(b)
require.ErrorContains(t, diags.Error(), "root_path")
require.ErrorContains(t, diags.Error(), "root_path must start with '~/' or contain the current username to ensure uniqueness when using 'mode: development'")

// Test with a bundle that has an unpaused trigger pause status
b = mockBundle(config.Development)
Expand Down
84 changes: 66 additions & 18 deletions bundle/config/mutator/python/python_mutator.go
Original file line number Diff line number Diff line change
@@ -1,15 +1,21 @@
package python

import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"os"
"path/filepath"

"github.com/databricks/cli/libs/python"
"github.com/databricks/databricks-sdk-go/logger"
"github.com/fatih/color"

"strings"

"github.com/databricks/cli/libs/python"

"github.com/databricks/cli/bundle/env"

Expand Down Expand Up @@ -169,7 +175,11 @@ func (m *pythonMutator) runPythonMutator(ctx context.Context, cacheDir string, r
return dyn.InvalidValue, diag.Errorf("failed to write input file: %s", err)
}

stderrWriter := newLogWriter(ctx, "stderr: ")
stderrBuf := bytes.Buffer{}
stderrWriter := io.MultiWriter(
newLogWriter(ctx, "stderr: "),
&stderrBuf,
)
stdoutWriter := newLogWriter(ctx, "stdout: ")

_, processErr := process.Background(
Expand Down Expand Up @@ -197,23 +207,54 @@ func (m *pythonMutator) runPythonMutator(ctx context.Context, cacheDir string, r
// process can fail without reporting errors in diagnostics file or creating it, for instance,
// venv doesn't have PyDABs library installed
if processErr != nil {
return dyn.InvalidValue, diag.Errorf("python mutator process failed: %sw, use --debug to enable logging", processErr)
diagnostic := diag.Diagnostic{
Severity: diag.Error,
Summary: fmt.Sprintf("python mutator process failed: %q, use --debug to enable logging", processErr),
Detail: explainProcessErr(stderrBuf.String()),
}

return dyn.InvalidValue, diag.Diagnostics{diagnostic}
}

// or we can fail to read diagnostics file, that should always be created
if pythonDiagnosticsErr != nil {
return dyn.InvalidValue, diag.Errorf("failed to load diagnostics: %s", pythonDiagnosticsErr)
}

output, err := loadOutputFile(rootPath, outputPath)
if err != nil {
return dyn.InvalidValue, diag.Errorf("failed to load Python mutator output: %s", err)
}
output, outputDiags := loadOutputFile(rootPath, outputPath)
pythonDiagnostics = pythonDiagnostics.Extend(outputDiags)

// we pass through pythonDiagnostic because it contains warnings
return output, pythonDiagnostics
}

const installExplanation = `If using Python wheels, ensure that 'databricks-pydabs' is included in the dependencies,
and that the wheel is installed in the Python environment:
$ .venv/bin/pip install -e .
If using a virtual environment, ensure it is specified as the venv_path property in databricks.yml,
or activate the environment before running CLI commands:
experimental:
pydabs:
venv_path: .venv
`

// explainProcessErr provides additional explanation for common errors.
// It's meant to be the best effort, and not all errors are covered.
// Output should be used only used for error reporting.
func explainProcessErr(stderr string) string {
// implemented in cpython/Lib/runpy.py and portable across Python 3.x, including pypy
if strings.Contains(stderr, "Error while finding module specification for 'databricks.bundles.build'") {
summary := color.CyanString("Explanation: ") + "'databricks-pydabs' library is not installed in the Python environment.\n"

return stderr + "\n" + summary + "\n" + installExplanation
}

return stderr
}

func writeInputFile(inputPath string, input dyn.Value) error {
// we need to marshal dyn.Value instead of bundle.Config to JSON to support
// non-string fields assigned with bundle variables
Expand All @@ -225,10 +266,10 @@ func writeInputFile(inputPath string, input dyn.Value) error {
return os.WriteFile(inputPath, rootConfigJson, 0600)
}

func loadOutputFile(rootPath string, outputPath string) (dyn.Value, error) {
func loadOutputFile(rootPath string, outputPath string) (dyn.Value, diag.Diagnostics) {
outputFile, err := os.Open(outputPath)
if err != nil {
return dyn.InvalidValue, fmt.Errorf("failed to open output file: %w", err)
return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to open output file: %w", err))
}

defer outputFile.Close()
Expand All @@ -243,27 +284,34 @@ func loadOutputFile(rootPath string, outputPath string) (dyn.Value, error) {
// for that, we pass virtualPath instead of outputPath as file location
virtualPath, err := filepath.Abs(filepath.Join(rootPath, "__generated_by_pydabs__.yml"))
if err != nil {
return dyn.InvalidValue, fmt.Errorf("failed to get absolute path: %w", err)
return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to get absolute path: %w", err))
}

generated, err := yamlloader.LoadYAML(virtualPath, outputFile)
if err != nil {
return dyn.InvalidValue, fmt.Errorf("failed to parse output file: %w", err)
return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to parse output file: %w", err))
}

normalized, diagnostic := convert.Normalize(config.Root{}, generated)
if diagnostic.Error() != nil {
return dyn.InvalidValue, fmt.Errorf("failed to normalize output: %w", diagnostic.Error())
}
return strictNormalize(config.Root{}, generated)
}

func strictNormalize(dst any, generated dyn.Value) (dyn.Value, diag.Diagnostics) {
normalized, diags := convert.Normalize(dst, generated)

// warnings shouldn't happen because output should be already normalized
// when it happens, it's a bug in the mutator, and should be treated as an error

for _, d := range diagnostic.Filter(diag.Warning) {
return dyn.InvalidValue, fmt.Errorf("failed to normalize output: %s", d.Summary)
strictDiags := diag.Diagnostics{}

for _, d := range diags {
if d.Severity == diag.Warning {
d.Severity = diag.Error
}

strictDiags = strictDiags.Append(d)
}

return normalized, nil
return normalized, strictDiags
}

// loadDiagnosticsFile loads diagnostics from a file.
Expand Down
Loading

0 comments on commit 87c6b4d

Please sign in to comment.