Skip to content

Commit

Permalink
Merge branch 'main' into bundle-diags
Browse files Browse the repository at this point in the history
  • Loading branch information
pietern authored Mar 25, 2024
2 parents 2180212 + 9cf3dbe commit b009c0a
Show file tree
Hide file tree
Showing 53 changed files with 733 additions and 135 deletions.
45 changes: 45 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,50 @@
# Version changelog

## 0.216.0

CLI:
* Propagate correct `User-Agent` for CLI during OAuth flow ([#1264](https://github.com/databricks/cli/pull/1264)).
* Add usage string when command fails with incorrect arguments ([#1276](https://github.com/databricks/cli/pull/1276)).

Bundles:
* Include `dyn.Path` as argument to the visit callback function ([#1260](https://github.com/databricks/cli/pull/1260)).
* Inline logic to set a value in `dyn.SetByPath` ([#1261](https://github.com/databricks/cli/pull/1261)).
* Add assertions for the `dyn.Path` argument to the visit callback ([#1265](https://github.com/databricks/cli/pull/1265)).
* Add `dyn.MapByPattern` to map a function to values with matching paths ([#1266](https://github.com/databricks/cli/pull/1266)).
* Filter current user from resource permissions ([#1262](https://github.com/databricks/cli/pull/1262)).
* Retain location annotation when expanding globs for pipeline libraries ([#1274](https://github.com/databricks/cli/pull/1274)).
* Added deployment state for bundles ([#1267](https://github.com/databricks/cli/pull/1267)).
* Do CheckRunningResource only after terraform.Write ([#1292](https://github.com/databricks/cli/pull/1292)).
* Rewrite relative paths using `dyn.Location` of the underlying value ([#1273](https://github.com/databricks/cli/pull/1273)).
* Push deployment state right after files upload ([#1293](https://github.com/databricks/cli/pull/1293)).
* Make `Append` function to `dyn.Path` return independent slice ([#1295](https://github.com/databricks/cli/pull/1295)).
* Move bundle tests into bundle/tests ([#1299](https://github.com/databricks/cli/pull/1299)).
* Upgrade Terraform provider to 1.38.0 ([#1308](https://github.com/databricks/cli/pull/1308)).

Internal:
* Add integration test for mlops-stacks initialization ([#1155](https://github.com/databricks/cli/pull/1155)).
* Update actions/setup-python to v5 ([#1290](https://github.com/databricks/cli/pull/1290)).
* Update codecov/codecov-action to v4 ([#1291](https://github.com/databricks/cli/pull/1291)).

API Changes:
* Changed `databricks catalogs list` command.
* Changed `databricks online-tables create` command.
* Changed `databricks lakeview publish` command.
* Added `databricks lakeview create` command.
* Added `databricks lakeview get` command.
* Added `databricks lakeview get-published` command.
* Added `databricks lakeview trash` command.
* Added `databricks lakeview update` command.
* Moved settings related commands to `databricks settings` and `databricks account settings`.

OpenAPI commit 93763b0d7ae908520c229c786fff28b8fd623261 (2024-03-20)

Dependency updates:
* Bump golang.org/x/oauth2 from 0.17.0 to 0.18.0 ([#1270](https://github.com/databricks/cli/pull/1270)).
* Bump golang.org/x/mod from 0.15.0 to 0.16.0 ([#1271](https://github.com/databricks/cli/pull/1271)).
* Update Go SDK to v0.35.0 ([#1300](https://github.com/databricks/cli/pull/1300)).
* Update Go SDK to v0.36.0 ([#1304](https://github.com/databricks/cli/pull/1304)).

## 0.215.0

CLI:
Expand Down
2 changes: 1 addition & 1 deletion bundle/internal/tf/codegen/schema/version.go
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
package schema

const ProviderVersion = "1.37.0"
const ProviderVersion = "1.38.0"
2 changes: 1 addition & 1 deletion bundle/internal/tf/schema/root.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ func NewRoot() *Root {
"required_providers": map[string]interface{}{
"databricks": map[string]interface{}{
"source": "databricks/databricks",
"version": "1.37.0",
"version": "1.38.0",
},
},
},
Expand Down
2 changes: 1 addition & 1 deletion internal/init_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ func TestAccBundleInitHelpers(t *testing.T) {
},
{
funcName: "{{is_service_principal}}",
expected: strconv.FormatBool(auth.IsServicePrincipal(me.Id)),
expected: strconv.FormatBool(auth.IsServicePrincipal(me.UserName)),
},
{
funcName: "{{smallest_node_type}}",
Expand Down
8 changes: 4 additions & 4 deletions libs/auth/service_principal.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,12 @@ import (
"github.com/google/uuid"
)

// Determines whether a given user id is a service principal.
// This function uses a heuristic: if the user id is a UUID, then we assume
// Determines whether a given user name is a service principal.
// This function uses a heuristic: if the user name is a UUID, then we assume
// it's a service principal. Unfortunately, the service principal listing API is too
// slow for our purposes. And the "users" and "service principals get" APIs
// only allow access by workspace admins.
func IsServicePrincipal(userId string) bool {
_, err := uuid.Parse(userId)
func IsServicePrincipal(userName string) bool {
_, err := uuid.Parse(userName)
return err == nil
}
2 changes: 1 addition & 1 deletion libs/dyn/convert/end_to_end_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import (
"testing"

"github.com/databricks/cli/libs/dyn"
"github.com/stretchr/testify/assert"
assert "github.com/databricks/cli/libs/dyn/dynassert"
"github.com/stretchr/testify/require"
)

Expand Down
33 changes: 27 additions & 6 deletions libs/dyn/convert/from_typed.go
Original file line number Diff line number Diff line change
Expand Up @@ -71,17 +71,28 @@ func fromTypedStruct(src reflect.Value, ref dyn.Value) (dyn.Value, error) {
return dyn.InvalidValue, fmt.Errorf("unhandled type: %s", ref.Kind())
}

out := make(map[string]dyn.Value)
refm, _ := ref.AsMap()
out := dyn.NewMapping()
info := getStructInfo(src.Type())
for k, v := range info.FieldValues(src) {
pair, ok := refm.GetPairByString(k)
refk := pair.Key
refv := pair.Value

// Use nil reference if there is no reference for this key
if !ok {
refk = dyn.V(k)
refv = dyn.NilValue
}

// Convert the field taking into account the reference value (may be equal to config.NilValue).
nv, err := fromTyped(v.Interface(), ref.Get(k))
nv, err := fromTyped(v.Interface(), refv)
if err != nil {
return dyn.InvalidValue, err
}

if nv != dyn.NilValue {
out[k] = nv
out.Set(refk, nv)
}
}

Expand All @@ -101,21 +112,31 @@ func fromTypedMap(src reflect.Value, ref dyn.Value) (dyn.Value, error) {
return dyn.NilValue, nil
}

out := make(map[string]dyn.Value)
refm, _ := ref.AsMap()
out := dyn.NewMapping()
iter := src.MapRange()
for iter.Next() {
k := iter.Key().String()
v := iter.Value()
pair, ok := refm.GetPairByString(k)
refk := pair.Key
refv := pair.Value

// Use nil reference if there is no reference for this key
if !ok {
refk = dyn.V(k)
refv = dyn.NilValue
}

// Convert entry taking into account the reference value (may be equal to dyn.NilValue).
nv, err := fromTyped(v.Interface(), ref.Get(k), includeZeroValues)
nv, err := fromTyped(v.Interface(), refv, includeZeroValues)
if err != nil {
return dyn.InvalidValue, err
}

// Every entry is represented, even if it is a nil.
// Otherwise, a map with zero-valued structs would yield a nil as well.
out[k] = nv
out.Set(refk, nv)
}

return dyn.NewValue(out, ref.Location()), nil
Expand Down
2 changes: 1 addition & 1 deletion libs/dyn/convert/from_typed_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import (
"testing"

"github.com/databricks/cli/libs/dyn"
"github.com/stretchr/testify/assert"
assert "github.com/databricks/cli/libs/dyn/dynassert"
"github.com/stretchr/testify/require"
)

Expand Down
35 changes: 20 additions & 15 deletions libs/dyn/convert/normalize.go
Original file line number Diff line number Diff line change
Expand Up @@ -74,30 +74,32 @@ func (n normalizeOptions) normalizeStruct(typ reflect.Type, src dyn.Value, seen

switch src.Kind() {
case dyn.KindMap:
out := make(map[string]dyn.Value)
out := dyn.NewMapping()
info := getStructInfo(typ)
for k, v := range src.MustMap() {
index, ok := info.Fields[k]
for _, pair := range src.MustMap().Pairs() {
pk := pair.Key
pv := pair.Value
index, ok := info.Fields[pk.MustString()]
if !ok {
diags = diags.Append(diag.Diagnostic{
Severity: diag.Warning,
Summary: fmt.Sprintf("unknown field: %s", k),
Location: src.Location(),
Summary: fmt.Sprintf("unknown field: %s", pk.MustString()),
Location: pk.Location(),
})
continue
}

// Normalize the value according to the field type.
v, err := n.normalizeType(typ.FieldByIndex(index).Type, v, seen)
nv, err := n.normalizeType(typ.FieldByIndex(index).Type, pv, seen)
if err != nil {
diags = diags.Extend(err)
// Skip the element if it cannot be normalized.
if !v.IsValid() {
if !nv.IsValid() {
continue
}
}

out[k] = v
out.Set(pk, nv)
}

// Return the normalized value if missing fields are not included.
Expand All @@ -107,7 +109,7 @@ func (n normalizeOptions) normalizeStruct(typ reflect.Type, src dyn.Value, seen

// Populate missing fields with their zero values.
for k, index := range info.Fields {
if _, ok := out[k]; ok {
if _, ok := out.GetByString(k); ok {
continue
}

Expand Down Expand Up @@ -143,7 +145,7 @@ func (n normalizeOptions) normalizeStruct(typ reflect.Type, src dyn.Value, seen
continue
}
if v.IsValid() {
out[k] = v
out.Set(dyn.V(k), v)
}
}

Expand All @@ -160,19 +162,22 @@ func (n normalizeOptions) normalizeMap(typ reflect.Type, src dyn.Value, seen []r

switch src.Kind() {
case dyn.KindMap:
out := make(map[string]dyn.Value)
for k, v := range src.MustMap() {
out := dyn.NewMapping()
for _, pair := range src.MustMap().Pairs() {
pk := pair.Key
pv := pair.Value

// Normalize the value according to the map element type.
v, err := n.normalizeType(typ.Elem(), v, seen)
nv, err := n.normalizeType(typ.Elem(), pv, seen)
if err != nil {
diags = diags.Extend(err)
// Skip the element if it cannot be normalized.
if !v.IsValid() {
if !nv.IsValid() {
continue
}
}

out[k] = v
out.Set(pk, nv)
}

return dyn.NewValue(out, src.Location()), diags
Expand Down
2 changes: 1 addition & 1 deletion libs/dyn/convert/normalize_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import (

"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn"
"github.com/stretchr/testify/assert"
assert "github.com/databricks/cli/libs/dyn/dynassert"
)

func TestNormalizeStruct(t *testing.T) {
Expand Down
2 changes: 1 addition & 1 deletion libs/dyn/convert/struct_info_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import (
"testing"

"github.com/databricks/cli/libs/dyn"
"github.com/stretchr/testify/assert"
assert "github.com/databricks/cli/libs/dyn/dynassert"
)

func TestStructInfoPlain(t *testing.T) {
Expand Down
19 changes: 12 additions & 7 deletions libs/dyn/convert/to_typed.go
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,11 @@ func toTypedStruct(dst reflect.Value, src dyn.Value) error {
dst.SetZero()

info := getStructInfo(dst.Type())
for k, v := range src.MustMap() {
index, ok := info.Fields[k]
for _, pair := range src.MustMap().Pairs() {
pk := pair.Key
pv := pair.Value

index, ok := info.Fields[pk.MustString()]
if !ok {
// Ignore unknown fields.
// A warning will be printed later. See PR #904.
Expand All @@ -82,7 +85,7 @@ func toTypedStruct(dst reflect.Value, src dyn.Value) error {
f = f.Field(x)
}

err := ToTyped(f.Addr().Interface(), v)
err := ToTyped(f.Addr().Interface(), pv)
if err != nil {
return err
}
Expand Down Expand Up @@ -112,12 +115,14 @@ func toTypedMap(dst reflect.Value, src dyn.Value) error {
m := src.MustMap()

// Always overwrite.
dst.Set(reflect.MakeMapWithSize(dst.Type(), len(m)))
for k, v := range m {
kv := reflect.ValueOf(k)
dst.Set(reflect.MakeMapWithSize(dst.Type(), m.Len()))
for _, pair := range m.Pairs() {
pk := pair.Key
pv := pair.Value
kv := reflect.ValueOf(pk.MustString())
kt := dst.Type().Key()
vv := reflect.New(dst.Type().Elem())
err := ToTyped(vv.Interface(), v)
err := ToTyped(vv.Interface(), pv)
if err != nil {
return err
}
Expand Down
2 changes: 1 addition & 1 deletion libs/dyn/convert/to_typed_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import (
"testing"

"github.com/databricks/cli/libs/dyn"
"github.com/stretchr/testify/assert"
assert "github.com/databricks/cli/libs/dyn/dynassert"
"github.com/stretchr/testify/require"
)

Expand Down
Loading

0 comments on commit b009c0a

Please sign in to comment.