From f8feaf07fdeff43bd80832c47ec15f645d7228a4 Mon Sep 17 00:00:00 2001
From: Matt Cary <34742400+mattcary@users.noreply.github.com>
Date: Wed, 13 Dec 2023 13:57:04 -0800
Subject: [PATCH 1/9] No pdcsi disable on create (#9557)
---
.../resource_container_cluster.go.erb | 48 +++++++++++++++----
.../resource_container_cluster_test.go.erb | 2 +-
2 files changed, 41 insertions(+), 9 deletions(-)
diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.erb b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.erb
index 8b9fa8414480..d73fb6bf1d99 100644
--- a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.erb
+++ b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.erb
@@ -2381,11 +2381,28 @@ func resourceContainerClusterCreate(d *schema.ResourceData, meta interface{}) er
cluster.SecurityPostureConfig = expandSecurityPostureConfig(v)
}
+ needUpdateAfterCreate := false
+
// For now PSC based cluster don't support `enable_private_endpoint` on `create`, but only on `update` API call.
// If cluster is PSC based and enable_private_endpoint is set to true we will ignore it on `create` call and update cluster right after creation.
enablePrivateEndpointPSCCluster := isEnablePrivateEndpointPSCCluster(cluster)
if enablePrivateEndpointPSCCluster {
cluster.PrivateClusterConfig.EnablePrivateEndpoint = false
+ needUpdateAfterCreate = true
+ }
+
+ enablePDCSI := isEnablePDCSI(cluster);
+ if !enablePDCSI {
+ // GcePersistentDiskCsiDriver cannot be disabled at cluster create, only on cluster update. Ignore on create then update after creation.
+ // If pdcsi is disabled, the config should be defined. But we will be paranoid and double-check.
+ needUpdateAfterCreate = true
+ if cluster.AddonsConfig == nil {
+ cluster.AddonsConfig = &container.AddonsConfig{}
+ }
+ if cluster.AddonsConfig.GcePersistentDiskCsiDriverConfig == nil {
+ cluster.AddonsConfig.GcePersistentDiskCsiDriverConfig = &container.GcePersistentDiskCsiDriverConfig{}
+ }
+ cluster.AddonsConfig.GcePersistentDiskCsiDriverConfig.Enabled = true
}
req := &container.CreateClusterRequest{
@@ -2472,14 +2489,22 @@ func resourceContainerClusterCreate(d *schema.ResourceData, meta interface{}) er
}
}
- if enablePrivateEndpointPSCCluster {
+ if needUpdateAfterCreate {
name := containerClusterFullName(project, location, clusterName)
- req := &container.UpdateClusterRequest{
- Update: &container.ClusterUpdate{
- DesiredEnablePrivateEndpoint: true,
- ForceSendFields: []string{"DesiredEnablePrivateEndpoint"},
- },
+ update := &container.ClusterUpdate{}
+ if enablePrivateEndpointPSCCluster {
+ update.DesiredEnablePrivateEndpoint = true
+ update.ForceSendFields = append(update.ForceSendFields, "DesiredEnablePrivateEndpoint");
+ }
+ if !enablePDCSI {
+ update.DesiredAddonsConfig = &container.AddonsConfig{
+ GcePersistentDiskCsiDriverConfig: &container.GcePersistentDiskCsiDriverConfig{
+ Enabled: false,
+ },
+ }
+ update.ForceSendFields = append(update.ForceSendFields, "DesiredAddonsConfig.GcePersistentDiskCsiDriverConfig.Enabled");
}
+ req := &container.UpdateClusterRequest{Update: update}
err = transport_tpg.Retry(transport_tpg.RetryOptions{
RetryFunc: func() error {
@@ -2492,12 +2517,12 @@ func resourceContainerClusterCreate(d *schema.ResourceData, meta interface{}) er
},
})
if err != nil {
- return errwrap.Wrapf("Error updating enable private endpoint: {{err}}", err)
+ return errwrap.Wrapf(fmt.Sprintf("Error updating cluster for %v: {{err}}", update.ForceSendFields), err)
}
err = ContainerOperationWait(config, op, project, location, "updating enable private endpoint", userAgent, d.Timeout(schema.TimeoutCreate))
if err != nil {
- return errwrap.Wrapf("Error while waiting to enable private endpoint: {{err}}", err)
+ return errwrap.Wrapf(fmt.Sprintf("Error while waiting on cluster update for %v: {{err}}", update.ForceSendFields), err)
}
}
@@ -4946,6 +4971,13 @@ func isEnablePrivateEndpointPSCCluster(cluster *container.Cluster) bool {
return false
}
+func isEnablePDCSI(cluster *container.Cluster) bool {
+ if cluster.AddonsConfig == nil || cluster.AddonsConfig.GcePersistentDiskCsiDriverConfig == nil {
+ return true; // PDCSI is enabled by default.
+ }
+ return cluster.AddonsConfig.GcePersistentDiskCsiDriverConfig.Enabled
+}
+
func expandPrivateClusterConfig(configured interface{}) *container.PrivateClusterConfig {
l := configured.([]interface{})
if len(l) == 0 {
diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.erb b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.erb
index 2428c885beef..34f0aa301a43 100644
--- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.erb
+++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.erb
@@ -127,7 +127,6 @@ func TestAccContainerCluster_misc(t *testing.T) {
}
func TestAccContainerCluster_withAddons(t *testing.T) {
- t.Skipf("Skipping test %s due to https://github.com/hashicorp/terraform-provider-google/issues/16114", t.Name())
t.Parallel()
clusterName := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10))
@@ -4803,6 +4802,7 @@ resource "google_container_cluster" "primary" {
enabled = true
}
<% end -%>
+ }
deletion_protection = false
network = "%s"
subnetwork = "%s"
From daa26f918c289b2fbf73b36d4028e5d27ac4328e Mon Sep 17 00:00:00 2001
From: sunchengxuanivy <39369372+sunchengxuanivy@users.noreply.github.com>
Date: Fri, 15 Dec 2023 01:32:37 +0800
Subject: [PATCH 2/9] add support for auxiliary_node_groups in
google_dataproc_cluster (#9484)
---
.../dataproc/resource_dataproc_cluster.go | 270 ++++++++++++++++++
.../resource_dataproc_cluster_test.go.erb | 98 +++++++
.../docs/r/dataproc_cluster.html.markdown | 73 +++++
3 files changed, 441 insertions(+)
diff --git a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster.go b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster.go
index 210e77b9b791..8636606f219b 100644
--- a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster.go
+++ b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster.go
@@ -113,6 +113,7 @@ var (
"cluster_config.0.lifecycle_config",
"cluster_config.0.endpoint_config",
"cluster_config.0.dataproc_metric_config",
+ "cluster_config.0.auxiliary_node_groups",
}
)
@@ -1507,6 +1508,7 @@ by Dataproc`,
"dataproc_metric_config": {
Type: schema.TypeList,
Optional: true,
+ Computed: true,
MaxItems: 1,
Description: `The config for Dataproc metrics.`,
AtLeastOneOf: clusterConfigKeys,
@@ -1521,6 +1523,129 @@ by Dataproc`,
},
},
},
+ "auxiliary_node_groups": {
+ Type: schema.TypeList,
+ Optional: true,
+ Computed: true,
+ Description: `The node group settings.`,
+ AtLeastOneOf: clusterConfigKeys,
+ Elem: &schema.Resource{
+ Schema: map[string]*schema.Schema{
+ "node_group": {
+ Required: true,
+ Description: `Node group configuration.`,
+ Type: schema.TypeList,
+ Elem: &schema.Resource{
+ Schema: map[string]*schema.Schema{
+ "name": {
+ Description: `The Node group resource name.`,
+ Type: schema.TypeString,
+ Computed: true,
+ },
+ "roles": {
+ Type: schema.TypeList,
+ Description: `Node group roles.`,
+ Required: true,
+ ForceNew: true,
+ Elem: &schema.Schema{
+ Type: schema.TypeString,
+ ValidateFunc: validation.StringInSlice([]string{"ROLE_UNSPECIFIED", "DRIVER"}, false),
+ },
+ },
+ "node_group_config": {
+ Description: `The node group instance group configuration.`,
+ Optional: true,
+ Computed: true,
+ MaxItems: 1,
+ Type: schema.TypeList,
+ Elem: &schema.Resource{
+ Schema: map[string]*schema.Schema{
+ "num_instances": {
+ Type: schema.TypeInt,
+ Optional: true,
+ ForceNew: true,
+ Computed: true,
+ Description: `Specifies the number of auxiliary nodes to create. If not specified, GCP will default to a predetermined computed value.`,
+ },
+ "machine_type": {
+ Type: schema.TypeString,
+ Optional: true,
+ Computed: true,
+ ForceNew: true,
+ Description: `The name of a Google Compute Engine machine type to create for the master`,
+ },
+ "min_cpu_platform": {
+ Type: schema.TypeString,
+ Optional: true,
+ Computed: true,
+ ForceNew: true,
+ Description: `The name of a minimum generation of CPU family for the auxiliary node group. If not specified, GCP will default to a predetermined computed value for each zone.`,
+ },
+ "disk_config": {
+
+ Type: schema.TypeList,
+ Optional: true,
+ Computed: true,
+ MaxItems: 1,
+ Description: `Disk Config`,
+ Elem: &schema.Resource{
+ Schema: map[string]*schema.Schema{
+ "num_local_ssds": {
+ Type: schema.TypeInt,
+ Optional: true,
+ Computed: true,
+ Description: `The amount of local SSD disks that will be attached to each master cluster node. Defaults to 0.`,
+ ForceNew: true,
+ },
+
+ "boot_disk_size_gb": {
+ Type: schema.TypeInt,
+ Optional: true,
+ Computed: true,
+ Description: `Size of the primary disk attached to each node, specified in GB. The primary disk contains the boot volume and system libraries, and the smallest allowed disk size is 10GB. GCP will default to a predetermined computed value if not set (currently 500GB). Note: If SSDs are not attached, it also contains the HDFS data blocks and Hadoop working directories.`,
+ ForceNew: true,
+ ValidateFunc: validation.IntAtLeast(10),
+ },
+
+ "boot_disk_type": {
+ Type: schema.TypeString,
+ Optional: true,
+ Description: `The disk type of the primary disk attached to each node. Such as "pd-ssd" or "pd-standard". Defaults to "pd-standard".`,
+ ForceNew: true,
+ Default: "pd-standard",
+ },
+ },
+ },
+ },
+ "accelerators": {
+ Type: schema.TypeSet,
+ Optional: true,
+ ForceNew: true,
+ Elem: acceleratorsSchema(),
+ Description: `The Compute Engine accelerator (GPU) configuration for these instances. Can be specified multiple times.`,
+ },
+ "instance_names": {
+ Type: schema.TypeList,
+ Computed: true,
+ Elem: &schema.Schema{Type: schema.TypeString},
+ Description: `List of auxiliary node group instance names which have been assigned to the cluster.`,
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ "node_group_id": {
+ Computed: true,
+ Optional: true,
+ ForceNew: true,
+ Type: schema.TypeString,
+ Description: `A node group ID. Generated if not specified. The ID must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of from 3 to 33 characters.`,
+ },
+ },
+ },
+ },
},
},
},
@@ -1893,9 +2018,89 @@ func expandClusterConfig(d *schema.ResourceData, config *transport_tpg.Config) (
log.Println("[INFO] got preemptible worker config")
conf.SecondaryWorkerConfig = expandPreemptibleInstanceGroupConfig(cfg)
}
+
+ if v, ok := d.GetOk("cluster_config.0.auxiliary_node_groups"); ok {
+ log.Println("[INFO] got auxiliary node group config")
+ conf.AuxiliaryNodeGroups = expandAuxiliaryNodeGroupsConfig(v)
+ }
return conf, nil
}
+func expandAuxiliaryNodeGroupsConfig(v interface{}) []*dataproc.AuxiliaryNodeGroup {
+ auxiliaryNodeGroupsList := v.([]interface{})
+
+ auxiliaryNodeGroups := []*dataproc.AuxiliaryNodeGroup{}
+ for _, v1 := range auxiliaryNodeGroupsList {
+ auxiliaryNodeGroupItem := v1.(map[string]interface{})
+ auxiliaryNodeGroup := &dataproc.AuxiliaryNodeGroup{
+ NodeGroup: expandNodeGroup(auxiliaryNodeGroupItem["node_group"].([]interface{})[0].(map[string]interface{})),
+ }
+ if x, ok := auxiliaryNodeGroupItem["node_group_id"]; ok {
+ auxiliaryNodeGroup.NodeGroupId = x.(string)
+ }
+ auxiliaryNodeGroups = append(auxiliaryNodeGroups, auxiliaryNodeGroup)
+ }
+
+ return auxiliaryNodeGroups
+}
+
+func expandNodeGroup(cfg map[string]interface{}) *dataproc.NodeGroup {
+ conf := &dataproc.NodeGroup{}
+ roles := []string{}
+ roleList := cfg["roles"]
+ for _, v1 := range roleList.([]interface{}) {
+ roles = append(roles, v1.(string))
+ }
+ conf.Roles = roles
+
+ if v, ok := cfg["name"]; ok {
+ conf.Name = v.(string)
+ }
+
+ if v, ok := cfg["node_group_config"]; ok {
+ ng := v.([]interface{})
+ if len(ng) > 0 {
+ conf.NodeGroupConfig = expandNodeGroupConfig(v.([]interface{})[0].(map[string]interface{}))
+ }
+ }
+ return conf
+}
+
+func expandNodeGroupConfig(cfg map[string]interface{}) *dataproc.InstanceGroupConfig {
+ icg := &dataproc.InstanceGroupConfig{}
+
+ if v, ok := cfg["num_instances"]; ok {
+ icg.NumInstances = int64(v.(int))
+ }
+ if v, ok := cfg["machine_type"]; ok {
+ icg.MachineTypeUri = tpgresource.GetResourceNameFromSelfLink(v.(string))
+ }
+ if v, ok := cfg["min_cpu_platform"]; ok {
+ icg.MinCpuPlatform = v.(string)
+ }
+
+ if dc, ok := cfg["disk_config"]; ok {
+ d := dc.([]interface{})
+ if len(d) > 0 {
+ dcfg := d[0].(map[string]interface{})
+ icg.DiskConfig = &dataproc.DiskConfig{}
+
+ if v, ok := dcfg["boot_disk_size_gb"]; ok {
+ icg.DiskConfig.BootDiskSizeGb = int64(v.(int))
+ }
+ if v, ok := dcfg["num_local_ssds"]; ok {
+ icg.DiskConfig.NumLocalSsds = int64(v.(int))
+ }
+ if v, ok := dcfg["boot_disk_type"]; ok {
+ icg.DiskConfig.BootDiskType = v.(string)
+ }
+ }
+ }
+
+ icg.Accelerators = expandAccelerators(cfg["accelerators"].(*schema.Set).List())
+ return icg
+}
+
func expandGceClusterConfig(d *schema.ResourceData, config *transport_tpg.Config) (*dataproc.GceClusterConfig, error) {
conf := &dataproc.GceClusterConfig{}
@@ -2621,6 +2826,15 @@ func flattenClusterConfig(d *schema.ResourceData, cfg *dataproc.ClusterConfig) (
}
data["initialization_action"] = val
}
+
+ if len(cfg.AuxiliaryNodeGroups) > 0 {
+ val, err := flattenAuxiliaryNodeGroups(cfg.AuxiliaryNodeGroups)
+ if err != nil {
+ return nil, err
+ }
+ data["auxiliary_node_groups"] = val
+ }
+
return []map[string]interface{}{data}, nil
}
@@ -2785,6 +2999,62 @@ func flattenInitializationActions(nia []*dataproc.NodeInitializationAction) ([]m
}
+func flattenAuxiliaryNodeGroups(ang []*dataproc.AuxiliaryNodeGroup) ([]map[string]interface{}, error) {
+
+ auxiliaryNodeGroups := []map[string]interface{}{}
+ for _, v := range ang {
+ nodeGroup := map[string]interface{}{
+ "node_group": flatternNodeGroup(v.NodeGroup),
+ }
+ if len(v.NodeGroupId) > 0 {
+ nodeGroup["node_group_id"] = v.NodeGroupId
+ }
+
+ auxiliaryNodeGroups = append(auxiliaryNodeGroups, nodeGroup)
+ }
+ return auxiliaryNodeGroups, nil
+
+}
+
+func flatternNodeGroup(ng *dataproc.NodeGroup) []map[string]interface{} {
+ nodeGroup := map[string]interface{}{
+ "roles": ng.Roles,
+ }
+
+ if ng.Name != "" {
+ nodeGroup["name"] = ng.Name
+ }
+
+ if ng.NodeGroupConfig != nil {
+ nodeGroup["node_group_config"] = flattenNodeGroupConfig(ng.NodeGroupConfig)
+ }
+
+ return []map[string]interface{}{nodeGroup}
+
+}
+
+func flattenNodeGroupConfig(icg *dataproc.InstanceGroupConfig) []map[string]interface{} {
+ disk := map[string]interface{}{}
+ data := map[string]interface{}{}
+
+ if icg != nil {
+ data["num_instances"] = icg.NumInstances
+ data["machine_type"] = tpgresource.GetResourceNameFromSelfLink(icg.MachineTypeUri)
+ data["min_cpu_platform"] = icg.MinCpuPlatform
+ data["instance_names"] = icg.InstanceNames
+ if icg.DiskConfig != nil {
+ disk["boot_disk_size_gb"] = icg.DiskConfig.BootDiskSizeGb
+ disk["num_local_ssds"] = icg.DiskConfig.NumLocalSsds
+ disk["boot_disk_type"] = icg.DiskConfig.BootDiskType
+ }
+ data["accelerators"] = flattenAccelerators(icg.Accelerators)
+
+ }
+
+ data["disk_config"] = []map[string]interface{}{disk}
+ return []map[string]interface{}{data}
+}
+
func flattenGceClusterConfig(d *schema.ResourceData, gcc *dataproc.GceClusterConfig) []map[string]interface{} {
if gcc == nil {
return []map[string]interface{}{}
diff --git a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_test.go.erb b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_test.go.erb
index c1e050b94491..6c003db6e7b0 100644
--- a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_test.go.erb
+++ b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_test.go.erb
@@ -173,6 +173,24 @@ func TestAccDataprocCluster_withAccelerators(t *testing.T) {
})
}
+func testAccCheckDataprocAuxiliaryNodeGroupAccelerator(cluster *dataproc.Cluster, project string) resource.TestCheckFunc {
+ return func(s *terraform.State) error {
+ expectedUri := fmt.Sprintf("projects/%s/zones/.*/acceleratorTypes/nvidia-tesla-k80", project)
+ r := regexp.MustCompile(expectedUri)
+
+ nodeGroup := cluster.Config.AuxiliaryNodeGroups[0].NodeGroup.NodeGroupConfig.Accelerators
+ if len(nodeGroup) != 1 {
+ return fmt.Errorf("Saw %d nodeGroup accelerator types instead of 1", len(nodeGroup))
+ }
+
+ matches := r.FindStringSubmatch(nodeGroup[0].AcceleratorTypeUri)
+ if len(matches) != 1 {
+ return fmt.Errorf("Saw %s master accelerator type instead of %s", nodeGroup[0].AcceleratorTypeUri, expectedUri)
+ }
+ return nil
+ }
+}
+
func testAccCheckDataprocClusterAccelerator(cluster *dataproc.Cluster, project string, masterCount int, workerCount int) resource.TestCheckFunc {
return func(s *terraform.State) error {
expectedUri := fmt.Sprintf("projects/%s/zones/.*/acceleratorTypes/nvidia-tesla-k80", project)
@@ -514,6 +532,38 @@ func TestAccDataprocCluster_spotWithInstanceFlexibilityPolicy(t *testing.T) {
})
}
+func TestAccDataprocCluster_spotWithAuxiliaryNodeGroups(t *testing.T) {
+ t.Parallel()
+
+ project := envvar.GetTestProjectFromEnv()
+ rnd := acctest.RandString(t, 10)
+ var cluster dataproc.Cluster
+ acctest.VcrTest(t, resource.TestCase{
+ PreCheck: func() { acctest.AccTestPreCheck(t) },
+ ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t),
+ CheckDestroy: testAccCheckDataprocClusterDestroy(t),
+ Steps: []resource.TestStep{
+ {
+ Config: testAccDataprocCluster_withAuxiliaryNodeGroups(rnd),
+ Check: resource.ComposeTestCheckFunc(
+ testAccCheckDataprocClusterExists(t, "google_dataproc_cluster.with_auxiliary_node_groups", &cluster),
+ resource.TestCheckResourceAttr("google_dataproc_cluster.with_auxiliary_node_groups", "cluster_config.0.auxiliary_node_groups.0.node_group.0.roles.0", "DRIVER"),
+ resource.TestCheckResourceAttr("google_dataproc_cluster.with_auxiliary_node_groups", "cluster_config.0.auxiliary_node_groups.0.node_group.0.node_group_config.0.num_instances", "2"),
+ resource.TestCheckResourceAttr("google_dataproc_cluster.with_auxiliary_node_groups", "cluster_config.0.auxiliary_node_groups.0.node_group.0.node_group_config.0.machine_type", "n1-standard-2"),
+ resource.TestCheckResourceAttr("google_dataproc_cluster.with_auxiliary_node_groups", "cluster_config.0.auxiliary_node_groups.0.node_group.0.node_group_config.0.min_cpu_platform", "AMD Rome"),
+ resource.TestCheckResourceAttr("google_dataproc_cluster.with_auxiliary_node_groups", "cluster_config.0.auxiliary_node_groups.0.node_group.0.node_group_config.0.disk_config.0.boot_disk_size_gb", "35"),
+ resource.TestCheckResourceAttr("google_dataproc_cluster.with_auxiliary_node_groups", "cluster_config.0.auxiliary_node_groups.0.node_group.0.node_group_config.0.disk_config.0.boot_disk_type", "pd-standard"),
+ resource.TestCheckResourceAttr("google_dataproc_cluster.with_auxiliary_node_groups", "cluster_config.0.auxiliary_node_groups.0.node_group.0.node_group_config.0.disk_config.0.num_local_ssds", "1"),
+ resource.TestCheckResourceAttr("google_dataproc_cluster.with_auxiliary_node_groups", "cluster_config.0.auxiliary_node_groups.0.node_group.0.node_group_config.0.accelerators.0.accelerator_count", "1"),
+ resource.TestCheckResourceAttr("google_dataproc_cluster.with_auxiliary_node_groups", "cluster_config.0.auxiliary_node_groups.0.node_group_id", "node-group-id"),
+ testAccCheckDataprocAuxiliaryNodeGroupAccelerator(&cluster, project),
+
+ ),
+ },
+ },
+ })
+}
+
func TestAccDataprocCluster_withStagingBucket(t *testing.T) {
t.Parallel()
@@ -1881,6 +1931,54 @@ resource "google_dataproc_cluster" "spot_with_instance_flexibility_policy" {
`, rnd)
}
+func testAccDataprocCluster_withAuxiliaryNodeGroups(rnd string) string {
+ return fmt.Sprintf(`
+resource "google_dataproc_cluster" "with_auxiliary_node_groups" {
+ name = "tf-test-dproc-%s"
+ region = "us-central1"
+
+ cluster_config {
+ master_config {
+ num_instances = "1"
+ machine_type = "e2-medium"
+ disk_config {
+ boot_disk_size_gb = 35
+ }
+ }
+
+ worker_config {
+ num_instances = "2"
+ machine_type = "e2-medium"
+ disk_config {
+ boot_disk_size_gb = 35
+ }
+ }
+
+ auxiliary_node_groups{
+ node_group_id="node-group-id"
+ node_group {
+ roles = ["DRIVER"]
+ node_group_config{
+ num_instances=2
+ machine_type="n1-standard-2"
+ min_cpu_platform = "AMD Rome"
+ disk_config {
+ boot_disk_size_gb = 35
+ boot_disk_type = "pd-standard"
+ num_local_ssds = 1
+ }
+ accelerators {
+ accelerator_count = 1
+ accelerator_type = "nvidia-tesla-k80"
+ }
+ }
+ }
+ }
+ }
+}
+ `, rnd)
+}
+
func testAccDataprocCluster_withStagingBucketOnly(bucketName string) string {
return fmt.Sprintf(`
resource "google_storage_bucket" "bucket" {
diff --git a/mmv1/third_party/terraform/website/docs/r/dataproc_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/dataproc_cluster.html.markdown
index 32762397ff06..a09753653a04 100644
--- a/mmv1/third_party/terraform/website/docs/r/dataproc_cluster.html.markdown
+++ b/mmv1/third_party/terraform/website/docs/r/dataproc_cluster.html.markdown
@@ -381,6 +381,9 @@ resource "google_dataproc_cluster" "accelerated_cluster" {
* `dataproc_metric_config` (Optional) The Compute Engine accelerator (GPU) configuration for these instances. Can be specified multiple times.
Structure [defined below](#nested_dataproc_metric_config).
+* `auxiliary_node_groups` (Optional) A Dataproc NodeGroup resource is a group of Dataproc cluster nodes that execute an assigned role.
+ Structure [defined below](#nested_auxiliary_node_groups).
+
* `metastore_config` (Optional) The config setting for metastore service with the cluster.
Structure [defined below](#nested_metastore_config).
- - -
@@ -828,6 +831,76 @@ dataproc_metric_config {
- - -
+The `auxiliary_node_groups` block supports:
+
+```hcl
+auxiliary_node_groups{
+ node_group {
+ roles = ["DRIVER"]
+ node_group_config{
+ num_instances=2
+ machine_type="n1-standard-2"
+ min_cpu_platform = "AMD Rome"
+ disk_config {
+ boot_disk_size_gb = 35
+ boot_disk_type = "pd-standard"
+ num_local_ssds = 1
+ }
+ accelerators {
+ accelerator_count = 1
+ accelerator_type = "nvidia-tesla-t4"
+ }
+ }
+ }
+}
+```
+
+
+* `node_group` - (Required) Node group configuration.
+
+ * `roles` - (Required) Node group roles.
+ One of `"DRIVER"`.
+
+ * `name` - (Optional) The Node group resource name.
+
+ * `node_group_config` - (Optional) The node group instance group configuration.
+
+ * `num_instances`- (Optional, Computed) Specifies the number of master nodes to create.
+ Please set a number greater than 0. Node Group must have at least 1 instance.
+
+ * `machine_type` - (Optional, Computed) The name of a Google Compute Engine machine type
+ to create for the node group. If not specified, GCP will default to a predetermined
+ computed value (currently `n1-standard-4`).
+
+ * `min_cpu_platform` - (Optional, Computed) The name of a minimum generation of CPU family
+ for the node group. If not specified, GCP will default to a predetermined computed value
+ for each zone. See [the guide](https://cloud.google.com/compute/docs/instances/specify-min-cpu-platform)
+ for details about which CPU families are available (and defaulted) for each zone.
+
+ * `disk_config` (Optional) Disk Config
+
+ * `boot_disk_type` - (Optional) The disk type of the primary disk attached to each node.
+ One of `"pd-ssd"` or `"pd-standard"`. Defaults to `"pd-standard"`.
+
+ * `boot_disk_size_gb` - (Optional, Computed) Size of the primary disk attached to each node, specified
+ in GB. The primary disk contains the boot volume and system libraries, and the
+ smallest allowed disk size is 10GB. GCP will default to a predetermined
+ computed value if not set (currently 500GB). Note: If SSDs are not
+ attached, it also contains the HDFS data blocks and Hadoop working directories.
+
+ * `num_local_ssds` - (Optional) The amount of local SSD disks that will be attached to each master cluster node.
+ Defaults to 0.
+
+ * `accelerators` (Optional) The Compute Engine accelerator (GPU) configuration for these instances. Can be specified
+ multiple times.
+
+ * `accelerator_type` - (Required) The short name of the accelerator type to expose to this instance. For example, `nvidia-tesla-k80`.
+
+ * `accelerator_count` - (Required) The number of the accelerator cards of this type exposed to this instance. Often restricted to one of `1`, `2`, `4`, or `8`.
+
+
+- - -
+
The `lifecycle_config` block supports:
```hcl
From 13e0b26a522ca2dc631e0cffa6710bebad7cd584 Mon Sep 17 00:00:00 2001
From: Cameron Thornton
Date: Thu, 14 Dec 2023 12:36:21 -0600
Subject: [PATCH 3/9] Add c2thorn to vacation reviewers for December (#9643)
---
.ci/magician/github/membership.go | 1 +
1 file changed, 1 insertion(+)
diff --git a/.ci/magician/github/membership.go b/.ci/magician/github/membership.go
index 21aaea6f459b..993c9ddd0e62 100644
--- a/.ci/magician/github/membership.go
+++ b/.ci/magician/github/membership.go
@@ -49,6 +49,7 @@ var (
// This is for reviewers who are "on vacation": will not receive new review assignments but will still receive re-requests for assigned PRs.
onVacationReviewers = []string{
"hao-nan-li",
+ "c2thorn",
}
)
From 5e89b3b459087978b89d3405bd4821d9af1436f3 Mon Sep 17 00:00:00 2001
From: Zach Berger
Date: Thu, 14 Dec 2023 11:48:23 -0800
Subject: [PATCH 4/9] Add resources for Cloud Logging default settings (#9409)
* Add folder and organization setting resources
* Try GetTestOrgTargetFromEnv instead of GetTestOrgFromEnv.
* Use correct organization for testing.
* Remove key rotation to fix VCR test.
* Don't specify fields matching default values.
* Specify true instead of yes.
* Use BootstrapKMSKeyInLocation instead of creating new keys.
* Add missing quotes
* Add additional examples to generate additional tests.
* Remove unneeded resources from examples.
* Simplify tests to be one full resource creation and one update.
* Fix typo in test
* Document and cleanup example.
---
mmv1/products/logging/FolderSettings.yaml | 84 ++++++++++++++++++
.../logging/OrganizationSettings.yaml | 82 ++++++++++++++++++
.../logging_folder_settings_all.tf.erb | 22 +++++
.../logging_organization_settings_all.tf.erb | 17 ++++
.../resource_logging_folder_settings_test.go | 86 +++++++++++++++++++
...urce_logging_organization_settings_test.go | 74 ++++++++++++++++
6 files changed, 365 insertions(+)
create mode 100644 mmv1/products/logging/FolderSettings.yaml
create mode 100644 mmv1/products/logging/OrganizationSettings.yaml
create mode 100644 mmv1/templates/terraform/examples/logging_folder_settings_all.tf.erb
create mode 100644 mmv1/templates/terraform/examples/logging_organization_settings_all.tf.erb
create mode 100644 mmv1/third_party/terraform/services/logging/resource_logging_folder_settings_test.go
create mode 100644 mmv1/third_party/terraform/services/logging/resource_logging_organization_settings_test.go
diff --git a/mmv1/products/logging/FolderSettings.yaml b/mmv1/products/logging/FolderSettings.yaml
new file mode 100644
index 000000000000..8cd0f8f3b5c7
--- /dev/null
+++ b/mmv1/products/logging/FolderSettings.yaml
@@ -0,0 +1,84 @@
+# Copyright 2023 Google Inc.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+--- !ruby/object:Api::Resource
+name: 'FolderSettings'
+description: |
+ Default resource settings control whether CMEK is required for new log buckets. These settings also determine the storage location for the _Default and _Required log buckets, and whether the _Default sink is enabled or disabled.
+references: !ruby/object:Api::Resource::ReferenceLinks
+ guides:
+ 'Configure default settings for organizations and folders': 'https://cloud.google.com/logging/docs/default-settings'
+ api: 'https://cloud.google.com/logging/docs/reference/v2/rest/v2/TopLevel/getSettings'
+base_url: 'folders/{{folder}}/settings'
+self_link: 'folders/{{folder}}/settings'
+import_format: ['folders/{{folder}}/settings']
+# Hardcode the updateMask since d.HasChanged does not work on create.
+create_url: 'folders/{{folder}}/settings?updateMask=disableDefaultSink,storageLocation,kmsKeyName'
+update_url: 'folders/{{folder}}/settings?updateMask=disableDefaultSink,storageLocation,kmsKeyName'
+# This is a singleton resource that already is created, so create
+# is really an update, and therefore should be PATCHed.
+create_verb: :PATCH
+update_verb: :PATCH
+# update_mask: true
+# This is a singleton resource that cannot be deleted, so skip delete.
+skip_delete: true
+examples:
+ - !ruby/object:Provider::Terraform::Examples
+ name: "logging_folder_settings_all"
+ primary_resource_id: "example"
+ vars:
+ key_name: "kms-key"
+ folder_name: "folder-name"
+ test_env_vars:
+ org_id: :ORG_ID
+ test_vars_overrides:
+ key_name: 'acctest.BootstrapKMSKeyInLocation(t, "us-central1").CryptoKey.Name'
+parameters:
+ - !ruby/object:Api::Type::String
+ name: 'folder'
+ required: true
+ immutable: true
+ url_param_only: true
+ description: |
+ The folder for which to retrieve settings.
+properties:
+ - !ruby/object:Api::Type::String
+ name: name
+ output: true
+ description: |
+ The resource name of the settings.
+ - !ruby/object:Api::Type::String
+ name: kmsKeyName
+ default_from_api: true
+ description: |
+ The resource name for the configured Cloud KMS key.
+ - !ruby/object:Api::Type::String
+ name: kmsServiceAccountId
+ output: true
+ description: |
+ The service account that will be used by the Log Router to access your Cloud KMS key.
+ - !ruby/object:Api::Type::String
+ name: storageLocation
+ default_from_api: true
+ description: |
+ The storage location that Cloud Logging will use to create new resources when a location is needed but not explicitly provided.
+ - !ruby/object:Api::Type::Boolean
+ name: disableDefaultSink
+ default_from_api: true
+ description: |
+ If set to true, the _Default sink in newly created projects and folders will created in a disabled state. This can be used to automatically disable log storage if there is already an aggregated sink configured in the hierarchy. The _Default sink can be re-enabled manually if needed.
+ - !ruby/object:Api::Type::String
+ name: loggingServiceAccountId
+ output: true
+ description: |
+ The service account for the given container. Sinks use this service account as their writerIdentity if no custom service account is provided.
diff --git a/mmv1/products/logging/OrganizationSettings.yaml b/mmv1/products/logging/OrganizationSettings.yaml
new file mode 100644
index 000000000000..cf5acbabb4eb
--- /dev/null
+++ b/mmv1/products/logging/OrganizationSettings.yaml
@@ -0,0 +1,82 @@
+# Copyright 2023 Google Inc.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+--- !ruby/object:Api::Resource
+name: 'OrganizationSettings'
+description: |
+ Default resource settings control whether CMEK is required for new log buckets. These settings also determine the storage location for the _Default and _Required log buckets, and whether the _Default sink is enabled or disabled.
+references: !ruby/object:Api::Resource::ReferenceLinks
+ guides:
+ 'Configure default settings for organizations and folders': 'https://cloud.google.com/logging/docs/default-settings'
+ api: 'https://cloud.google.com/logging/docs/reference/v2/rest/v2/TopLevel/getSettings'
+base_url: 'organizations/{{organization}}/settings'
+self_link: 'organizations/{{organization}}/settings'
+import_format: ['organizations/{{organization}}/settings']
+# Hardcode the updateMask since d.HasChanged does not work on create.
+create_url: 'organizations/{{organization}}/settings?updateMask=disableDefaultSink,storageLocation,kmsKeyName'
+update_url: 'organizations/{{organization}}/settings?updateMask=disableDefaultSink,storageLocation,kmsKeyName'
+# This is a singleton resource that already is created, so create
+# is really an update, and therefore should be PATCHed.
+create_verb: :PATCH
+update_verb: :PATCH
+# This is a singleton resource that cannot be deleted, so skip delete.
+skip_delete: true
+examples:
+ - !ruby/object:Provider::Terraform::Examples
+ name: "logging_organization_settings_all"
+ primary_resource_id: "example"
+ # Covered by update test.
+ skip_test: true
+ vars:
+ key_name: "kms-key"
+ test_env_vars:
+ org_id: :ORG_TARGET
+parameters:
+ - !ruby/object:Api::Type::String
+ name: 'organization'
+ required: true
+ immutable: true
+ url_param_only: true
+ description: |
+ The organization for which to retrieve or configure settings.
+properties:
+ - !ruby/object:Api::Type::String
+ name: name
+ output: true
+ description: |
+ The resource name of the settings.
+ - !ruby/object:Api::Type::String
+ name: kmsKeyName
+ default_from_api: true
+ description: |
+ The resource name for the configured Cloud KMS key.
+ - !ruby/object:Api::Type::String
+ name: kmsServiceAccountId
+ output: true
+ description: |
+ The service account that will be used by the Log Router to access your Cloud KMS key.
+ - !ruby/object:Api::Type::String
+ name: storageLocation
+ default_from_api: true
+ description: |
+ The storage location that Cloud Logging will use to create new resources when a location is needed but not explicitly provided.
+ - !ruby/object:Api::Type::Boolean
+ name: disableDefaultSink
+ default_from_api: true
+ description: |
+ If set to true, the _Default sink in newly created projects and folders will created in a disabled state. This can be used to automatically disable log storage if there is already an aggregated sink configured in the hierarchy. The _Default sink can be re-enabled manually if needed.
+ - !ruby/object:Api::Type::String
+ name: loggingServiceAccountId
+ output: true
+ description: |
+ The service account for the given container. Sinks use this service account as their writerIdentity if no custom service account is provided.
diff --git a/mmv1/templates/terraform/examples/logging_folder_settings_all.tf.erb b/mmv1/templates/terraform/examples/logging_folder_settings_all.tf.erb
new file mode 100644
index 000000000000..35a35ccd6fac
--- /dev/null
+++ b/mmv1/templates/terraform/examples/logging_folder_settings_all.tf.erb
@@ -0,0 +1,22 @@
+resource "google_logging_folder_settings" "<%= ctx[:primary_resource_id] %>" {
+ disable_default_sink = true
+ folder = google_folder.my_folder.folder_id
+ kms_key_name = "<%= ctx[:vars]['key_name'] %>"
+ storage_location = "us-central1"
+ depends_on = [ google_kms_crypto_key_iam_member.iam ]
+}
+
+resource "google_folder" "my_folder" {
+ display_name = "<%= ctx[:vars]['folder_name'] %>"
+ parent = "organizations/<%= ctx[:test_env_vars]['org_id'] %>"
+}
+
+data "google_logging_folder_settings" "settings" {
+ folder = google_folder.my_folder.folder_id
+}
+
+resource "google_kms_crypto_key_iam_member" "iam" {
+ crypto_key_id = "<%= ctx[:vars]['key_name'] %>"
+ role = "roles/cloudkms.cryptoKeyEncrypterDecrypter"
+ member = "serviceAccount:${data.google_logging_folder_settings.settings.kms_service_account_id}"
+}
\ No newline at end of file
diff --git a/mmv1/templates/terraform/examples/logging_organization_settings_all.tf.erb b/mmv1/templates/terraform/examples/logging_organization_settings_all.tf.erb
new file mode 100644
index 000000000000..dd4b38bde6dd
--- /dev/null
+++ b/mmv1/templates/terraform/examples/logging_organization_settings_all.tf.erb
@@ -0,0 +1,17 @@
+resource "google_logging_organization_settings" "<%= ctx[:primary_resource_id] %>" {
+ disable_default_sink = true
+ kms_key_name = "<%= ctx[:vars]['key_name'] %>"
+ organization = "<%= ctx[:test_env_vars]['org_id'] %>"
+ storage_location = "us-central1"
+ depends_on = [ google_kms_crypto_key_iam_member.iam ]
+}
+
+data "google_logging_organization_settings" "settings" {
+ organization = "<%= ctx[:test_env_vars]['org_id'] %>"
+}
+
+resource "google_kms_crypto_key_iam_member" "iam" {
+ crypto_key_id = "<%= ctx[:vars]['key_name'] %>"
+ role = "roles/cloudkms.cryptoKeyEncrypterDecrypter"
+ member = "serviceAccount:${data.google_logging_organization_settings.settings.kms_service_account_id}"
+}
\ No newline at end of file
diff --git a/mmv1/third_party/terraform/services/logging/resource_logging_folder_settings_test.go b/mmv1/third_party/terraform/services/logging/resource_logging_folder_settings_test.go
new file mode 100644
index 000000000000..e88dc0d529a5
--- /dev/null
+++ b/mmv1/third_party/terraform/services/logging/resource_logging_folder_settings_test.go
@@ -0,0 +1,86 @@
+package logging_test
+
+import (
+ "testing"
+
+ "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
+
+ "github.com/hashicorp/terraform-provider-google/google/acctest"
+ "github.com/hashicorp/terraform-provider-google/google/envvar"
+)
+
+func TestAccLoggingFolderSettings_update(t *testing.T) {
+ t.Parallel()
+
+ context := map[string]interface{}{
+ "org_id": envvar.GetTestOrgFromEnv(t),
+ "random_suffix": acctest.RandString(t, 10),
+ "original_key": acctest.BootstrapKMSKeyInLocation(t, "us-central1").CryptoKey.Name,
+ "updated_key": acctest.BootstrapKMSKeyInLocation(t, "us-east1").CryptoKey.Name,
+ }
+
+ acctest.VcrTest(t, resource.TestCase{
+ PreCheck: func() { acctest.AccTestPreCheck(t) },
+ ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t),
+ Steps: []resource.TestStep{
+ {
+ Config: testAccLoggingFolderSettings_onlyRequired(context),
+ },
+ {
+ ResourceName: "google_logging_folder_settings.example",
+ ImportState: true,
+ ImportStateVerify: true,
+ ImportStateVerifyIgnore: []string{"folder"},
+ },
+ {
+ Config: testAccLoggingFolderSettings_full(context),
+ },
+ {
+ ResourceName: "google_logging_folder_settings.example",
+ ImportState: true,
+ ImportStateVerify: true,
+ ImportStateVerifyIgnore: []string{"folder"},
+ },
+ },
+ })
+}
+
+func testAccLoggingFolderSettings_full(context map[string]interface{}) string {
+ return acctest.Nprintf(`
+resource "google_logging_folder_settings" "example" {
+ disable_default_sink = true
+ folder = google_folder.my_folder.folder_id
+ kms_key_name = "%{original_key}"
+ storage_location = "us-central1"
+ depends_on = [ google_kms_crypto_key_iam_member.iam ]
+}
+
+resource "google_folder" "my_folder" {
+ display_name = "tf-test-folder-%{random_suffix}"
+ parent = "organizations/%{org_id}"
+}
+
+data "google_logging_folder_settings" "settings" {
+ folder = google_folder.my_folder.folder_id
+}
+
+resource "google_kms_crypto_key_iam_member" "iam" {
+ crypto_key_id = "%{original_key}"
+ role = "roles/cloudkms.cryptoKeyEncrypterDecrypter"
+ member = "serviceAccount:${data.google_logging_folder_settings.settings.kms_service_account_id}"
+}
+`, context)
+}
+
+func testAccLoggingFolderSettings_onlyRequired(context map[string]interface{}) string {
+ return acctest.Nprintf(`
+resource "google_logging_folder_settings" "example" {
+ folder = google_folder.my_folder.folder_id
+}
+
+resource "google_folder" "my_folder" {
+ display_name = "tf-test-folder-%{random_suffix}"
+ parent = "organizations/%{org_id}"
+}
+`, context)
+}
diff --git a/mmv1/third_party/terraform/services/logging/resource_logging_organization_settings_test.go b/mmv1/third_party/terraform/services/logging/resource_logging_organization_settings_test.go
new file mode 100644
index 000000000000..ce2896c4e181
--- /dev/null
+++ b/mmv1/third_party/terraform/services/logging/resource_logging_organization_settings_test.go
@@ -0,0 +1,74 @@
+package logging_test
+
+import (
+ "testing"
+
+ "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
+
+ "github.com/hashicorp/terraform-provider-google/google/acctest"
+ "github.com/hashicorp/terraform-provider-google/google/envvar"
+)
+
+func TestAccLoggingOrganizationSettings_update(t *testing.T) {
+ context := map[string]interface{}{
+ "org_id": envvar.GetTestOrgTargetFromEnv(t),
+ "random_suffix": acctest.RandString(t, 10),
+ "original_key": acctest.BootstrapKMSKeyInLocation(t, "us-central1").CryptoKey.Name,
+ "updated_key": acctest.BootstrapKMSKeyInLocation(t, "us-east1").CryptoKey.Name,
+ }
+
+ acctest.VcrTest(t, resource.TestCase{
+ PreCheck: func() { acctest.AccTestPreCheck(t) },
+ ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t),
+ Steps: []resource.TestStep{
+ {
+ Config: testAccLoggingOrganizationSettings_onlyRequired(context),
+ },
+ {
+ ResourceName: "google_logging_organization_settings.example",
+ ImportState: true,
+ ImportStateVerify: true,
+ ImportStateVerifyIgnore: []string{"organization"},
+ },
+ {
+ Config: testAccLoggingOrganizationSettings_full(context),
+ },
+ {
+ ResourceName: "google_logging_organization_settings.example",
+ ImportState: true,
+ ImportStateVerify: true,
+ ImportStateVerifyIgnore: []string{"organization"},
+ },
+ },
+ })
+}
+
+func testAccLoggingOrganizationSettings_full(context map[string]interface{}) string {
+ return acctest.Nprintf(`
+resource "google_logging_organization_settings" "example" {
+ disable_default_sink = false
+ kms_key_name = "%{original_key}"
+ organization = "%{org_id}"
+ storage_location = "us-central1"
+ depends_on = [ google_kms_crypto_key_iam_member.iam ]
+}
+
+data "google_logging_organization_settings" "settings" {
+ organization = "%{org_id}"
+}
+
+resource "google_kms_crypto_key_iam_member" "iam" {
+ crypto_key_id = "%{original_key}"
+ role = "roles/cloudkms.cryptoKeyEncrypterDecrypter"
+ member = "serviceAccount:${data.google_logging_organization_settings.settings.kms_service_account_id}"
+}
+`, context)
+}
+
+func testAccLoggingOrganizationSettings_onlyRequired(context map[string]interface{}) string {
+ return acctest.Nprintf(`
+resource "google_logging_organization_settings" "example" {
+ organization = "%{org_id}"
+}
+`, context)
+}
From 06deb0484e7db634bb29304aaeb9d01e594e011a Mon Sep 17 00:00:00 2001
From: "Stephen Lewis (Burrows)"
Date: Thu, 14 Dec 2023 11:48:36 -0800
Subject: [PATCH 5/9] Added vmware engine increase (#9646)
* Added vmware engine increase
* Update README.md
---
.ci/infra/terraform/README.md | 1 +
1 file changed, 1 insertion(+)
diff --git a/.ci/infra/terraform/README.md b/.ci/infra/terraform/README.md
index 99a467952681..76c872f13f0c 100644
--- a/.ci/infra/terraform/README.md
+++ b/.ci/infra/terraform/README.md
@@ -69,3 +69,4 @@ Quotas that will need to be adjusted to support all tests:
- compute.googleapis.com/routers
- compute.googleapis.com/c2_cpus (us-central1)
- compute.googleapis.com/n2_cpus (us-central1) to 36+
+- VMware Engine standard 72 vCPUs nodes per region - southamerica-east1 to 21
From 617cca41c65236cbbb39730359cdcda653fde17a Mon Sep 17 00:00:00 2001
From: Shuya Ma <87669292+shuyama1@users.noreply.github.com>
Date: Thu, 14 Dec 2023 12:05:39 -0800
Subject: [PATCH 6/9] Revert "The version field should be under
configmanagement instead of under oci" (#9645)
---
mmv1/products/gkehub2/Feature.yaml | 6 +++---
.../services/gkehub2/resource_gke_hub_feature_test.go.erb | 2 --
2 files changed, 3 insertions(+), 5 deletions(-)
diff --git a/mmv1/products/gkehub2/Feature.yaml b/mmv1/products/gkehub2/Feature.yaml
index b3968b337586..67be3b1efc02 100644
--- a/mmv1/products/gkehub2/Feature.yaml
+++ b/mmv1/products/gkehub2/Feature.yaml
@@ -215,9 +215,6 @@ properties:
name: configmanagement
description: Config Management spec
properties:
- - !ruby/object:Api::Type::String
- name: version
- description: 'Version of ACM installed'
- !ruby/object:Api::Type::NestedObject
name: configSync
description: 'ConfigSync configuration for the cluster'
@@ -274,6 +271,9 @@ properties:
- !ruby/object:Api::Type::String
name: syncWaitSecs
description: 'Period in seconds between consecutive syncs. Default: 15'
+ - !ruby/object:Api::Type::String
+ name: version
+ description: 'Version of ACM installed'
- !ruby/object:Api::Type::NestedObject
name: policycontroller
description: Policy Controller spec
diff --git a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.erb b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.erb
index 5af1d572cfd7..f2c14b2d223f 100644
--- a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.erb
+++ b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.erb
@@ -460,7 +460,6 @@ resource "google_gke_hub_feature" "feature" {
location = "global"
fleet_default_member_config {
configmanagement {
- version = "1.16.0"
config_sync {
source_format = "hierarchy"
git {
@@ -487,7 +486,6 @@ resource "google_gke_hub_feature" "feature" {
location = "global"
fleet_default_member_config {
configmanagement {
- version = "1.16.1"
config_sync {
source_format = "unstructured"
oci {
From 0c03a04663f6880034c08529349224741bfc3642 Mon Sep 17 00:00:00 2001
From: Thomas Rodgers
Date: Thu, 14 Dec 2023 12:35:44 -0800
Subject: [PATCH 7/9] Add custom flattener for diagflowcx agent git integration
settings field (#9597)
* Add a custom flattener for the parent field
* Update mmv1/templates/terraform/custom_flatten/dialogflowcx_agent_git_integration_settings_github_settings.go.erb
Co-authored-by: Stephen Lewis (Burrows)
---------
Co-authored-by: Stephen Lewis (Burrows)
---
mmv1/products/dialogflowcx/Agent.yaml | 2 +-
...ntegration_settings_github_settings.go.erb | 32 +++++++++++++++++++
...ttings_github_settings_access_token.go.erb | 17 ----------
3 files changed, 33 insertions(+), 18 deletions(-)
create mode 100644 mmv1/templates/terraform/custom_flatten/dialogflowcx_agent_git_integration_settings_github_settings.go.erb
delete mode 100644 mmv1/templates/terraform/custom_flatten/dialogflowcx_agent_git_integration_settings_github_settings_access_token.go.erb
diff --git a/mmv1/products/dialogflowcx/Agent.yaml b/mmv1/products/dialogflowcx/Agent.yaml
index 086400417dda..c661bb18ecbc 100644
--- a/mmv1/products/dialogflowcx/Agent.yaml
+++ b/mmv1/products/dialogflowcx/Agent.yaml
@@ -164,6 +164,7 @@ properties:
name: 'githubSettings'
description: |
Settings of integration with GitHub.
+ custom_flatten: 'templates/terraform/custom_flatten/dialogflowcx_agent_git_integration_settings_github_settings.go.erb' # accessToken comes from config, no response
properties:
- !ruby/object:Api::Type::String
name: 'displayName'
@@ -183,7 +184,6 @@ properties:
The access token used to authenticate the access to the GitHub repository.
sensitive: true
ignore_read: true
- custom_flatten: 'templates/terraform/custom_flatten/dialogflowcx_agent_git_integration_settings_github_settings_access_token.go.erb'
- !ruby/object:Api::Type::Array
name: 'branches'
description: |
diff --git a/mmv1/templates/terraform/custom_flatten/dialogflowcx_agent_git_integration_settings_github_settings.go.erb b/mmv1/templates/terraform/custom_flatten/dialogflowcx_agent_git_integration_settings_github_settings.go.erb
new file mode 100644
index 000000000000..bf689e0f2566
--- /dev/null
+++ b/mmv1/templates/terraform/custom_flatten/dialogflowcx_agent_git_integration_settings_github_settings.go.erb
@@ -0,0 +1,32 @@
+<%# The license inside this block applies to this file.
+ # Copyright 2018 Google Inc.
+ # Licensed under the Apache License, Version 2.0 (the "License");
+ # you may not use this file except in compliance with the License.
+ # You may obtain a copy of the License at
+ #
+ # http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+-%>
+func flattenDialogflowCXAgentGitIntegrationSettingsGithubSettings(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} {
+ if v == nil {
+ return nil
+ }
+ original := v.(map[string]interface{})
+ // ignore access_token, which is always returned as REDACTED
+ delete(original, "access_token");
+ if len(original) == 0 {
+ return nil
+ }
+ transformed := make(map[string]interface{})
+ transformed["display_name"] = original["displayName"]
+ transformed["repository_uri"] = original["repositoryUri"]
+ transformed["tracking_branch"] = original["trackingBranch"]
+ transformed["access_token"] = d.Get("git_integration_settings.0.github_settings.0.access_token")
+ transformed["branches"] = original["branches"]
+ return []interface{}{transformed}
+}
diff --git a/mmv1/templates/terraform/custom_flatten/dialogflowcx_agent_git_integration_settings_github_settings_access_token.go.erb b/mmv1/templates/terraform/custom_flatten/dialogflowcx_agent_git_integration_settings_github_settings_access_token.go.erb
deleted file mode 100644
index 7102f1e6fb07..000000000000
--- a/mmv1/templates/terraform/custom_flatten/dialogflowcx_agent_git_integration_settings_github_settings_access_token.go.erb
+++ /dev/null
@@ -1,17 +0,0 @@
-<%# The license inside this block applies to this file.
- # Copyright 2018 Google Inc.
- # Licensed under the Apache License, Version 2.0 (the "License");
- # you may not use this file except in compliance with the License.
- # You may obtain a copy of the License at
- #
- # http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
--%>
-func flatten<%= prefix -%><%= titlelize_property(property) -%>(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} {
- return d.Get("git_integration_settings.0.github_settings.0.access_token")
-}
From 05f4a8d1d6b7e577e7cf563f2b6f10827a8cebd7 Mon Sep 17 00:00:00 2001
From: Derrek Bertrand
Date: Thu, 14 Dec 2023 15:54:43 -0500
Subject: [PATCH 8/9] Remove unnecessary lifecycle ignore_changes from examples
(#9571)
---
.../terraform/examples/cloudrunv2_job_basic.tf.erb | 8 +-------
.../terraform/examples/cloudrunv2_job_directvpc.tf.erb | 6 ------
.../terraform/examples/cloudrunv2_job_emptydir.tf.erb | 6 ------
.../terraform/examples/cloudrunv2_job_secret.tf.erb | 6 ------
.../terraform/examples/cloudrunv2_job_sql.tf.erb | 6 ------
.../terraform/examples/cloudrunv2_job_vpcaccess.tf.erb | 6 ------
.../examples/cloudrunv2_service_custom_audiences.tf.erb | 8 +-------
.../examples/cloudrunv2_service_multicontainer.tf.erb | 6 ------
8 files changed, 2 insertions(+), 50 deletions(-)
diff --git a/mmv1/templates/terraform/examples/cloudrunv2_job_basic.tf.erb b/mmv1/templates/terraform/examples/cloudrunv2_job_basic.tf.erb
index 8e675f0344cf..c37ab87a9f3d 100644
--- a/mmv1/templates/terraform/examples/cloudrunv2_job_basic.tf.erb
+++ b/mmv1/templates/terraform/examples/cloudrunv2_job_basic.tf.erb
@@ -9,10 +9,4 @@ resource "google_cloud_run_v2_job" "<%= ctx[:primary_resource_id] %>" {
}
}
}
-
- lifecycle {
- ignore_changes = [
- launch_stage,
- ]
- }
-}
\ No newline at end of file
+}
diff --git a/mmv1/templates/terraform/examples/cloudrunv2_job_directvpc.tf.erb b/mmv1/templates/terraform/examples/cloudrunv2_job_directvpc.tf.erb
index a28913e7fc13..9b38d9722a59 100644
--- a/mmv1/templates/terraform/examples/cloudrunv2_job_directvpc.tf.erb
+++ b/mmv1/templates/terraform/examples/cloudrunv2_job_directvpc.tf.erb
@@ -17,10 +17,4 @@ resource "google_cloud_run_v2_job" "<%= ctx[:primary_resource_id] %>" {
}
}
}
-
- lifecycle {
- ignore_changes = [
- launch_stage,
- ]
- }
}
diff --git a/mmv1/templates/terraform/examples/cloudrunv2_job_emptydir.tf.erb b/mmv1/templates/terraform/examples/cloudrunv2_job_emptydir.tf.erb
index 107b03c3c53c..0c51bbcd36df 100644
--- a/mmv1/templates/terraform/examples/cloudrunv2_job_emptydir.tf.erb
+++ b/mmv1/templates/terraform/examples/cloudrunv2_job_emptydir.tf.erb
@@ -21,10 +21,4 @@ resource "google_cloud_run_v2_job" "<%= ctx[:primary_resource_id] %>" {
}
}
}
-
- lifecycle {
- ignore_changes = [
- launch_stage,
- ]
- }
}
diff --git a/mmv1/templates/terraform/examples/cloudrunv2_job_secret.tf.erb b/mmv1/templates/terraform/examples/cloudrunv2_job_secret.tf.erb
index 387d0baf2b05..137234fdcea2 100644
--- a/mmv1/templates/terraform/examples/cloudrunv2_job_secret.tf.erb
+++ b/mmv1/templates/terraform/examples/cloudrunv2_job_secret.tf.erb
@@ -26,12 +26,6 @@ resource "google_cloud_run_v2_job" "<%= ctx[:primary_resource_id] %>" {
}
}
- lifecycle {
- ignore_changes = [
- launch_stage,
- ]
- }
-
depends_on = [
google_secret_manager_secret_version.secret-version-data,
google_secret_manager_secret_iam_member.secret-access,
diff --git a/mmv1/templates/terraform/examples/cloudrunv2_job_sql.tf.erb b/mmv1/templates/terraform/examples/cloudrunv2_job_sql.tf.erb
index 24740a509e58..d94410960200 100644
--- a/mmv1/templates/terraform/examples/cloudrunv2_job_sql.tf.erb
+++ b/mmv1/templates/terraform/examples/cloudrunv2_job_sql.tf.erb
@@ -34,12 +34,6 @@ resource "google_cloud_run_v2_job" "<%= ctx[:primary_resource_id] %>" {
}
}
}
-
- lifecycle {
- ignore_changes = [
- launch_stage,
- ]
- }
}
data "google_project" "project" {
diff --git a/mmv1/templates/terraform/examples/cloudrunv2_job_vpcaccess.tf.erb b/mmv1/templates/terraform/examples/cloudrunv2_job_vpcaccess.tf.erb
index a90af1a27972..409c48dfef9a 100644
--- a/mmv1/templates/terraform/examples/cloudrunv2_job_vpcaccess.tf.erb
+++ b/mmv1/templates/terraform/examples/cloudrunv2_job_vpcaccess.tf.erb
@@ -13,12 +13,6 @@ resource "google_cloud_run_v2_job" "<%= ctx[:primary_resource_id] %>" {
}
}
}
-
- lifecycle {
- ignore_changes = [
- launch_stage,
- ]
- }
}
resource "google_vpc_access_connector" "connector" {
diff --git a/mmv1/templates/terraform/examples/cloudrunv2_service_custom_audiences.tf.erb b/mmv1/templates/terraform/examples/cloudrunv2_service_custom_audiences.tf.erb
index 6147add9ab27..50445e63e7e5 100644
--- a/mmv1/templates/terraform/examples/cloudrunv2_service_custom_audiences.tf.erb
+++ b/mmv1/templates/terraform/examples/cloudrunv2_service_custom_audiences.tf.erb
@@ -11,10 +11,4 @@ resource "google_cloud_run_v2_service" "<%= ctx[:primary_resource_id] %>" {
image = "us-docker.pkg.dev/cloudrun/container/hello"
}
}
-
- lifecycle {
- ignore_changes = [
- launch_stage,
- ]
- }
-}
\ No newline at end of file
+}
diff --git a/mmv1/templates/terraform/examples/cloudrunv2_service_multicontainer.tf.erb b/mmv1/templates/terraform/examples/cloudrunv2_service_multicontainer.tf.erb
index 03391c9bb2b9..bb5a86133d9b 100644
--- a/mmv1/templates/terraform/examples/cloudrunv2_service_multicontainer.tf.erb
+++ b/mmv1/templates/terraform/examples/cloudrunv2_service_multicontainer.tf.erb
@@ -29,10 +29,4 @@ resource "google_cloud_run_v2_service" "<%= ctx[:primary_resource_id] %>" {
}
}
}
-
- lifecycle {
- ignore_changes = [
- launch_stage,
- ]
- }
}
From a080fd37344e552c5bf62a11e5e1f0425b8b7b40 Mon Sep 17 00:00:00 2001
From: Thomas Rodgers
Date: Thu, 14 Dec 2023 13:00:56 -0800
Subject: [PATCH 9/9] Magician refactor generate comment (#9613)
* Run cassettes on main
Revert local testing changes
Test: make magician-check-vcr-cassettes the only step
Also collect passing and skipping tests
collectResult no longer returns an error
Remove extra return value
gitignore magician binary
Print logs
Actually set logPaths and cassettePaths
Rework management of environment variables
Remove extra ]
Also print all_tests.log
Echo home
Include HOME in env
Echo path
Use GOCACHE instead of HOME
add -vet=off
Run all tests, skip printing logs
Run 24 tests and upload logs
Add -r and remove logs
Also upload cassettes
Run tests for one resource in recording
Run tests for one resource in replaying
Also capture PATH
Run recording again
Clone hashicorp provider instead of mm
Run all tests
Run replaying
Move check cassettes to push-downstream
Remove echo PATH
change to GA in doc (#9491)
Co-authored-by: Edward Sun
Refactor magician structs (#9605)
* Refactored github interfaces
Fixed bug in overriding breaking changes
* gofmt
* Removed GetPullRequestLabelIDs
Use magician for generate comment
Fix formatting of breaking changes
Keep diff string empty
Add missing newline
Add copyright notices
Add missing space
Revert changes from running generate-comment
Run cassettes on main
Print logs
Rework management of environment variables
change to GA in doc (#9491)
Co-authored-by: Edward Sun
git checkout main gcb-generate-diffs-new.yml
* Move Version into provider package and Repo into source package
* Remove second walk func
* Add ,
* Add source Runner interface
* Remove check cassettes changes
* Fix issues with diff processor build
* Fix test
---
.ci/magician/cmd/generate_comment.go | 237 +++++++++++-----------
.ci/magician/cmd/generate_comment_test.go | 87 +++++---
.ci/magician/cmd/interfaces.go | 4 +-
.ci/magician/cmd/mock_runner_test.go | 97 +++++----
.ci/magician/exec/runner.go | 36 +++-
.ci/magician/provider/version.go | 37 ++++
.ci/magician/source/repo.go | 85 ++++++++
.ci/scripts/go-plus/magician/exec.sh | 2 +-
.gitignore | 3 +
9 files changed, 394 insertions(+), 194 deletions(-)
create mode 100644 .ci/magician/provider/version.go
create mode 100644 .ci/magician/source/repo.go
diff --git a/.ci/magician/cmd/generate_comment.go b/.ci/magician/cmd/generate_comment.go
index b6d457382111..2684f3068aa5 100644
--- a/.ci/magician/cmd/generate_comment.go
+++ b/.ci/magician/cmd/generate_comment.go
@@ -19,6 +19,8 @@ import (
"fmt"
"magician/exec"
"magician/github"
+ "magician/provider"
+ "magician/source"
"os"
"path/filepath"
"regexp"
@@ -29,14 +31,16 @@ import (
const allowBreakingChangesLabel = "override-breaking-change"
-type ProviderVersion string
-
-type Repository struct {
- Name string // Name in GitHub (e.g. magic-modules)
- Title string // Title for display (e.g. Magic Modules)
- Path string // local Path once checked out, including Name
- Version ProviderVersion
- DiffCanFail bool // whether to allow the command to continue if cloning or diffing the repo fails
+var gcEnvironmentVariables = [...]string{
+ "BUILD_ID",
+ "BUILD_STEP",
+ "COMMIT_SHA",
+ "GITHUB_TOKEN",
+ "GOPATH",
+ "HOME",
+ "PATH",
+ "PR_NUMBER",
+ "PROJECT_ID",
}
var generateCommentCmd = &cobra.Command{
@@ -45,12 +49,7 @@ var generateCommentCmd = &cobra.Command{
Long: `This command processes pull requests and performs various validations and actions based on the PR's metadata and author.
The following PR details are expected as environment variables:
- 1. BUILD_ID
- 2. PROJECT_ID
- 3. BUILD_STEP
- 4. COMMIT_SHA
- 5. PR_NUMBER
- 6. GITHUB_TOKEN
+` + listGCEnvironmentVariables() + `
The command performs the following steps:
1. Clone the tpg, tpgb, tfc, and tfoics repos from modular-magician.
@@ -61,25 +60,14 @@ var generateCommentCmd = &cobra.Command{
6. Run unit tests for the missing test detector.
`,
Run: func(cmd *cobra.Command, args []string) {
- buildID := os.Getenv("BUILD_ID")
- fmt.Println("Build ID: ", buildID)
-
- projectID := os.Getenv("PROJECT_ID")
- fmt.Println("Project ID: ", projectID)
-
- buildStep := os.Getenv("BUILD_STEP")
- fmt.Println("Build Step: ", buildStep)
-
- commit := os.Getenv("COMMIT_SHA")
- fmt.Println("Commit SHA: ", commit)
-
- prNumber := os.Getenv("PR_NUMBER")
- fmt.Println("PR Number: ", prNumber)
-
- githubToken, ok := os.LookupEnv("GITHUB_TOKEN")
- if !ok {
- fmt.Println("Did not provide GITHUB_TOKEN environment variable")
- os.Exit(1)
+ env := make(map[string]string, len(gcEnvironmentVariables))
+ for _, ev := range gcEnvironmentVariables {
+ val, ok := os.LookupEnv(ev)
+ if !ok {
+ fmt.Printf("Did not provide %s environment variable\n", ev)
+ os.Exit(1)
+ }
+ env[ev] = val
}
gh := github.NewClient()
@@ -88,14 +76,23 @@ var generateCommentCmd = &cobra.Command{
fmt.Println("Error creating a runner: ", err)
os.Exit(1)
}
- execGenerateComment(buildID, projectID, buildStep, commit, prNumber, githubToken, gh, rnr)
+ ctlr := source.NewController(filepath.Join("workspace", "go"), "modular-magician", env["GITHUB_TOKEN"], rnr)
+ execGenerateComment(env, gh, rnr, ctlr)
},
}
-func execGenerateComment(buildID, projectID, buildStep, commit, prNumber, githubToken string, gh GithubClient, r ExecRunner) {
- newBranch := "auto-pr-" + prNumber
- oldBranch := "auto-pr-" + prNumber + "-old"
- wd := r.GetCWD()
+func listGCEnvironmentVariables() string {
+ var result string
+ for i, ev := range gcEnvironmentVariables {
+ result += fmt.Sprintf("\t%2d. %s\n", i+1, ev)
+ }
+ return result
+}
+
+func execGenerateComment(env map[string]string, gh GithubClient, rnr ExecRunner, ctlr *source.Controller) {
+ newBranch := "auto-pr-" + env["PR_NUMBER"]
+ oldBranch := "auto-pr-" + env["PR_NUMBER"] + "-old"
+ wd := rnr.GetCWD()
mmLocalPath := filepath.Join(wd, "..", "..")
tpgRepoName := "terraform-provider-google"
tpgLocalPath := filepath.Join(mmLocalPath, "..", "tpg")
@@ -108,7 +105,7 @@ func execGenerateComment(buildID, projectID, buildStep, commit, prNumber, github
tfcLocalPath := filepath.Join(mmLocalPath, "..", "tfc")
var diffs string
- for _, repo := range []Repository{
+ for _, repo := range []*source.Repo{
{
Name: tpgRepoName,
Title: "Terraform GA",
@@ -132,7 +129,7 @@ func execGenerateComment(buildID, projectID, buildStep, commit, prNumber, github
},
} {
// TPG/TPGB difference
- repoDiffs, err := cloneAndDiff(repo, oldBranch, newBranch, githubToken, r)
+ repoDiffs, err := cloneAndDiff(repo, oldBranch, newBranch, ctlr)
if err != nil {
fmt.Printf("Error cloning and diffing tpg repo: %v\n", err)
if !repo.DiffCanFail {
@@ -148,37 +145,43 @@ func execGenerateComment(buildID, projectID, buildStep, commit, prNumber, github
var err error
diffProcessorPath := filepath.Join(mmLocalPath, "tools", "diff-processor")
// versionedBreakingChanges is a map of breaking change output by provider version.
- versionedBreakingChanges := make(map[ProviderVersion]string, 2)
-
- for _, repo := range []Repository{
+ versionedBreakingChanges := make(map[provider.Version]string, 2)
+
+ env["OLD_REF"] = oldBranch
+ env["NEW_REF"] = newBranch
+ for _, repo := range []struct {
+ Title string
+ Path string
+ Version provider.Version
+ }{
{
Title: "TPG",
Path: tpgLocalPath,
- Version: "ga",
+ Version: provider.GA,
},
{
Title: "TPGB",
Path: tpgbLocalPath,
- Version: "beta",
+ Version: provider.Beta,
},
} {
- // TPG diff processor
- err = buildDiffProcessor(diffProcessorPath, repo.Path, oldBranch, newBranch, r)
+ // TPG(B) diff processor
+ err = buildDiffProcessor(diffProcessorPath, repo.Path, env, rnr)
if err != nil {
fmt.Println(err)
os.Exit(1)
}
- output, err := computeBreakingChanges(diffProcessorPath, r)
+ output, err := computeBreakingChanges(diffProcessorPath, rnr)
if err != nil {
fmt.Println("Error computing TPG breaking changes: ", err)
showBreakingChangesFailed = true
}
versionedBreakingChanges[repo.Version] = strings.TrimSuffix(output, "\n")
- err = addLabels(diffProcessorPath, githubToken, prNumber, r)
+ err = addLabels(diffProcessorPath, env, rnr)
if err != nil {
fmt.Println("Error adding TPG labels to PR: ", err)
}
- err = cleanDiffProcessor(diffProcessorPath, r)
+ err = cleanDiffProcessor(diffProcessorPath, rnr)
if err != nil {
fmt.Println("Error cleaning up diff processor: ", err)
os.Exit(1)
@@ -190,11 +193,11 @@ func execGenerateComment(buildID, projectID, buildStep, commit, prNumber, github
breakingChanges = `## Breaking Change Detection Failed
The breaking change detector crashed during execution. This is usually due to the downstream provider(s) failing to compile. Please investigate or follow up with your reviewer.`
} else {
- breakingChanges = combineBreakingChanges(versionedBreakingChanges["ga"], versionedBreakingChanges["beta"])
+ breakingChanges = combineBreakingChanges(versionedBreakingChanges[provider.GA], versionedBreakingChanges[provider.Beta])
}
// Missing test detector
- missingTests, err := detectMissingTests(mmLocalPath, tpgbLocalPath, oldBranch, r)
+ missingTests, err := detectMissingTests(mmLocalPath, tpgbLocalPath, oldBranch, rnr)
if err != nil {
fmt.Println("Error setting up missing test detector: ", err)
os.Exit(1)
@@ -205,7 +208,7 @@ The breaking change detector crashed during execution. This is usually due to th
if breakingChanges != "" {
message += breakingChanges + "\n\n"
- pullRequest, err := gh.GetPullRequest(prNumber)
+ pullRequest, err := gh.GetPullRequest(env["PR_NUMBER"])
if err != nil {
fmt.Printf("Error getting pull request: %v\n", err)
os.Exit(1)
@@ -232,100 +235,98 @@ The breaking change detector crashed during execution. This is usually due to th
}
}
- if err := gh.PostComment(prNumber, message); err != nil {
- fmt.Printf("Error posting comment to PR %s: %v\n", prNumber, err)
+ if err := gh.PostComment(env["PR_NUMBER"], message); err != nil {
+ fmt.Printf("Error posting comment to PR %s: %v\n", env["PR_NUMBER"], err)
}
- targetURL := fmt.Sprintf("https://console.cloud.google.com/cloud-build/builds;region=global/%s;step=%s?project=%s", buildID, buildStep, projectID)
- if err := gh.PostBuildStatus(prNumber, "terraform-provider-breaking-change-test", breakingState, targetURL, commit); err != nil {
- fmt.Printf("Error posting build status for pr %s commit %s: %v\n", prNumber, commit, err)
+ targetURL := fmt.Sprintf("https://console.cloud.google.com/cloud-build/builds;region=global/%s;step=%s?project=%s", env["BUILD_ID"], env["BUILD_STEP"], env["PROJECT_ID"])
+ if err := gh.PostBuildStatus(env["PR_NUMBER"], "terraform-provider-breaking-change-test", breakingState, targetURL, env["COMMIT_SHA"]); err != nil {
+ fmt.Printf("Error posting build status for pr %s commit %s: %v\n", env["PR_NUMBER"], env["COMMIT_SHA"], err)
os.Exit(1)
}
- if err := r.PushDir(mmLocalPath); err != nil {
+ if err := rnr.PushDir(mmLocalPath); err != nil {
fmt.Println(err)
os.Exit(1)
}
- if diffs := r.MustRun("git", []string{"diff", "HEAD", "origin/main", "tools/missing-test-detector"}, nil); diffs != "" {
+ if diffs := rnr.MustRun("git", []string{"diff", "HEAD", "origin/main", "tools/missing-test-detector"}, nil); diffs != "" {
fmt.Printf("Found diffs in missing test detector:\n%s\nRunning tests.\n", diffs)
- if err := testTools(mmLocalPath, tpgbLocalPath, prNumber, commit, buildID, buildStep, projectID, gh, r); err != nil {
+ if err := testTools(mmLocalPath, tpgbLocalPath, env, gh, rnr); err != nil {
fmt.Printf("Error testing tools in %s: %v\n", mmLocalPath, err)
os.Exit(1)
}
}
- if err := r.PopDir(); err != nil {
+ if err := rnr.PopDir(); err != nil {
fmt.Println(err)
os.Exit(1)
}
}
-func cloneAndDiff(repo Repository, oldBranch, newBranch, githubToken string, r ExecRunner) (string, error) {
+func cloneAndDiff(repo *source.Repo, oldBranch, newBranch string, ctlr *source.Controller) (string, error) {
// Clone the repo to the desired repo.Path.
- url := fmt.Sprintf("https://modular-magician:%s@github.com/modular-magician/%s", githubToken, repo.Name)
- if _, err := r.Run("git", []string{"clone", "-b", newBranch, url, repo.Path}, nil); err != nil {
+ repo.Branch = newBranch
+ if err := ctlr.Clone(repo); err != nil {
return "", fmt.Errorf("error cloning %s: %v\n", repo.Name, err)
}
- // Push dir to the newly cloned repo.
- if err := r.PushDir(repo.Path); err != nil {
+ if err := ctlr.Fetch(repo, oldBranch); err != nil {
return "", err
}
- if _, err := r.Run("git", []string{"fetch", "origin", oldBranch}, nil); err != nil {
- return "", fmt.Errorf("error fetching branch %s in repo %s: %v\n", oldBranch, repo.Name, err)
- }
- // Return summary, if any, and return to original directory.
- if summary, err := r.Run("git", []string{"diff", "origin/" + oldBranch, "origin/" + newBranch, "--shortstat"}, nil); err != nil {
- return "", fmt.Errorf("error diffing %s and %s: %v\n", oldBranch, newBranch, err)
- } else if summary != "" {
- summary = strings.TrimSuffix(summary, "\n")
- return fmt.Sprintf("%s: [Diff](https://github.com/modular-magician/%s/compare/%s..%s) (%s)", repo.Title, repo.Name, oldBranch, newBranch, summary), r.PopDir()
+ // Return summary, if any.
+ diffs, err := ctlr.Diff(repo, oldBranch, newBranch)
+ if err != nil {
+ return "", err
+ }
+ if diffs == "" {
+ return "", nil
}
- return "", r.PopDir()
+ diffs = strings.TrimSuffix(diffs, "\n")
+ return fmt.Sprintf("%s: [Diff](https://github.com/modular-magician/%s/compare/%s..%s) (%s)", repo.Title, repo.Name, oldBranch, newBranch, diffs), nil
}
// Build the diff processor for tpg or tpgb
-func buildDiffProcessor(diffProcessorPath, providerLocalPath, oldBranch, newBranch string, r ExecRunner) error {
- if err := r.PushDir(diffProcessorPath); err != nil {
+func buildDiffProcessor(diffProcessorPath, providerLocalPath string, env map[string]string, rnr ExecRunner) error {
+ if err := rnr.PushDir(diffProcessorPath); err != nil {
return err
}
for _, path := range []string{"old", "new"} {
- if err := r.Copy(providerLocalPath, filepath.Join(diffProcessorPath, path)); err != nil {
+ if err := rnr.Copy(providerLocalPath, filepath.Join(diffProcessorPath, path)); err != nil {
return err
}
}
- if _, err := r.Run("make", []string{"build"}, []string{"OLD_REF=" + oldBranch, "NEW_REF=" + newBranch}); err != nil {
+ if _, err := rnr.Run("make", []string{"build"}, env); err != nil {
return fmt.Errorf("Error running make build in %s: %v\n", diffProcessorPath, err)
}
- return r.PopDir()
+ return rnr.PopDir()
}
-func computeBreakingChanges(diffProcessorPath string, r ExecRunner) (string, error) {
- if err := r.PushDir(diffProcessorPath); err != nil {
+func computeBreakingChanges(diffProcessorPath string, rnr ExecRunner) (string, error) {
+ if err := rnr.PushDir(diffProcessorPath); err != nil {
return "", err
}
- breakingChanges, err := r.Run("bin/diff-processor", []string{"breaking-changes"}, nil)
+ breakingChanges, err := rnr.Run("bin/diff-processor", []string{"breaking-changes"}, nil)
if err != nil {
return "", err
}
- return breakingChanges, r.PopDir()
+ return breakingChanges, rnr.PopDir()
}
-func addLabels(diffProcessorPath, githubToken, prNumber string, r ExecRunner) error {
- if err := r.PushDir(diffProcessorPath); err != nil {
+func addLabels(diffProcessorPath string, env map[string]string, rnr ExecRunner) error {
+ if err := rnr.PushDir(diffProcessorPath); err != nil {
return err
}
- output, err := r.Run("bin/diff-processor", []string{"add-labels", prNumber}, []string{fmt.Sprintf("GITHUB_TOKEN=%s", githubToken)})
+ output, err := rnr.Run("bin/diff-processor", []string{"add-labels", env["PR_NUMBER"]}, env)
fmt.Println(output)
if err != nil {
return err
}
- return r.PopDir()
+ return rnr.PopDir()
}
-func cleanDiffProcessor(diffProcessorPath string, r ExecRunner) error {
+func cleanDiffProcessor(diffProcessorPath string, rnr ExecRunner) error {
for _, path := range []string{"old", "new", "bin"} {
- if err := r.RemoveAll(filepath.Join(diffProcessorPath, path)); err != nil {
+ if err := rnr.RemoveAll(filepath.Join(diffProcessorPath, path)); err != nil {
return err
}
}
@@ -377,93 +378,93 @@ An ` + "`override-breaking-change`" + ` label can be added to allow merging.
// Run the missing test detector and return the results.
// Returns an empty string unless there are missing tests.
// Error will be nil unless an error occurs during setup.
-func detectMissingTests(mmLocalPath, tpgbLocalPath, oldBranch string, r ExecRunner) (string, error) {
+func detectMissingTests(mmLocalPath, tpgbLocalPath, oldBranch string, rnr ExecRunner) (string, error) {
tpgbLocalPathOld := tpgbLocalPath + "old"
- if err := r.Copy(tpgbLocalPath, tpgbLocalPathOld); err != nil {
+ if err := rnr.Copy(tpgbLocalPath, tpgbLocalPathOld); err != nil {
return "", err
}
- if err := r.PushDir(tpgbLocalPathOld); err != nil {
+ if err := rnr.PushDir(tpgbLocalPathOld); err != nil {
return "", err
}
- if _, err := r.Run("git", []string{"checkout", "origin/" + oldBranch}, nil); err != nil {
+ if _, err := rnr.Run("git", []string{"checkout", "origin/" + oldBranch}, nil); err != nil {
return "", err
}
- if err := updatePackageName("old", tpgbLocalPathOld, r); err != nil {
+ if err := updatePackageName("old", tpgbLocalPathOld, rnr); err != nil {
return "", err
}
- if err := updatePackageName("new", tpgbLocalPath, r); err != nil {
+ if err := updatePackageName("new", tpgbLocalPath, rnr); err != nil {
return "", err
}
- if err := r.PopDir(); err != nil {
+ if err := rnr.PopDir(); err != nil {
return "", err
}
missingTestDetectorPath := filepath.Join(mmLocalPath, "tools", "missing-test-detector")
- if err := r.PushDir(missingTestDetectorPath); err != nil {
+ if err := rnr.PushDir(missingTestDetectorPath); err != nil {
return "", err
}
- if _, err := r.Run("go", []string{"mod", "edit", "-replace", fmt.Sprintf("google/provider/%s=%s", "new", tpgbLocalPath)}, nil); err != nil {
+ if _, err := rnr.Run("go", []string{"mod", "edit", "-replace", fmt.Sprintf("google/provider/%s=%s", "new", tpgbLocalPath)}, nil); err != nil {
fmt.Printf("Error running go mod edit: %v\n", err)
}
- if _, err := r.Run("go", []string{"mod", "edit", "-replace", fmt.Sprintf("google/provider/%s=%s", "old", tpgbLocalPathOld)}, nil); err != nil {
+ if _, err := rnr.Run("go", []string{"mod", "edit", "-replace", fmt.Sprintf("google/provider/%s=%s", "old", tpgbLocalPathOld)}, nil); err != nil {
fmt.Printf("Error running go mod edit: %v\n", err)
}
- if _, err := r.Run("go", []string{"mod", "tidy"}, nil); err != nil {
+ if _, err := rnr.Run("go", []string{"mod", "tidy"}, nil); err != nil {
fmt.Printf("Error running go mod tidy: %v\n", err)
}
- missingTests, err := r.Run("go", []string{"run", ".", fmt.Sprintf("-services-dir=%s/google-beta/services", tpgbLocalPath)}, nil)
+ missingTests, err := rnr.Run("go", []string{"run", ".", fmt.Sprintf("-services-dir=%s/google-beta/services", tpgbLocalPath)}, nil)
if err != nil {
fmt.Printf("Error running missing test detector: %v\n", err)
missingTests = ""
} else {
fmt.Printf("Successfully ran missing test detector:\n%s\n", missingTests)
}
- return missingTests, r.PopDir()
+ return missingTests, rnr.PopDir()
}
// Update the provider package name to the given name in the given path.
// name should be either "old" or "new".
-func updatePackageName(name, path string, r ExecRunner) error {
+func updatePackageName(name, path string, rnr ExecRunner) error {
oldPackageName := "github.com/hashicorp/terraform-provider-google-beta"
newPackageName := "google/provider/" + name
fmt.Printf("Updating package name in %s from %s to %s\n", path, oldPackageName, newPackageName)
- if err := r.PushDir(path); err != nil {
+ if err := rnr.PushDir(path); err != nil {
return err
}
- if _, err := r.Run("find", []string{".", "-type", "f", "-name", "*.go", "-exec", "sed", "-i.bak", fmt.Sprintf("s~%s~%s~g", oldPackageName, newPackageName), "{}", "+"}, nil); err != nil {
+ if _, err := rnr.Run("find", []string{".", "-type", "f", "-name", "*.go", "-exec", "sed", "-i.bak", fmt.Sprintf("s~%s~%s~g", oldPackageName, newPackageName), "{}", "+"}, nil); err != nil {
return fmt.Errorf("error running find: %v\n", err)
}
- if _, err := r.Run("sed", []string{"-i.bak", fmt.Sprintf("s|%s|%s|g", oldPackageName, newPackageName), "go.mod"}, nil); err != nil {
+ if _, err := rnr.Run("sed", []string{"-i.bak", fmt.Sprintf("s|%s|%s|g", oldPackageName, newPackageName), "go.mod"}, nil); err != nil {
return fmt.Errorf("error running sed: %v\n", err)
}
- if _, err := r.Run("sed", []string{"-i.bak", fmt.Sprintf("s|%s|%s|g", oldPackageName, newPackageName), "go.sum"}, nil); err != nil {
+ if _, err := rnr.Run("sed", []string{"-i.bak", fmt.Sprintf("s|%s|%s|g", oldPackageName, newPackageName), "go.sum"}, nil); err != nil {
return fmt.Errorf("error running sed: %v\n", err)
}
- return r.PopDir()
+ return rnr.PopDir()
}
// Run unit tests for the missing test detector and diff processor.
// Report results using Github API.
-func testTools(mmLocalPath, tpgbLocalPath, prNumber, commit, buildID, buildStep, projectID string, gh GithubClient, r ExecRunner) error {
+func testTools(mmLocalPath, tpgbLocalPath string, env map[string]string, gh GithubClient, rnr ExecRunner) error {
missingTestDetectorPath := filepath.Join(mmLocalPath, "tools", "missing-test-detector")
- r.PushDir(missingTestDetectorPath)
- if _, err := r.Run("go", []string{"mod", "tidy"}, nil); err != nil {
+ rnr.PushDir(missingTestDetectorPath)
+ if _, err := rnr.Run("go", []string{"mod", "tidy"}, nil); err != nil {
fmt.Printf("error running go mod tidy in %s: %v\n", missingTestDetectorPath, err)
}
servicesDir := filepath.Join(tpgbLocalPath, "google-beta", "services")
state := "success"
- if _, err := r.Run("go", []string{"test"}, []string{"SERVICES_DIR=" + servicesDir}); err != nil {
+ if _, err := rnr.Run("go", []string{"test"}, map[string]string{"SERVICES_DIR": servicesDir}); err != nil {
fmt.Printf("error from running go test in %s: %v\n", missingTestDetectorPath, err)
state = "failure"
}
- targetURL := fmt.Sprintf("https://console.cloud.google.com/cloud-build/builds;region=global/%s;step=%s?project=%s", buildID, buildStep, projectID)
- if err := gh.PostBuildStatus(prNumber, "unit-tests-missing-test-detector", state, targetURL, commit); err != nil {
+ targetURL := fmt.Sprintf("https://console.cloud.google.com/cloud-build/builds;region=global/%s;step=%s?project=%s", env["BUILD_ID"], env["BUILD_STEP"], env["PROJECT_ID"])
+ if err := gh.PostBuildStatus(env["PR_NUMBER"], "unit-tests-missing-test-detector", state, targetURL, env["COMMIT_SHA"]); err != nil {
return err
}
- return r.PopDir()
+ return rnr.PopDir()
}
func init() {
diff --git a/.ci/magician/cmd/generate_comment_test.go b/.ci/magician/cmd/generate_comment_test.go
index 88b0a28f38d8..46f90886f180 100644
--- a/.ci/magician/cmd/generate_comment_test.go
+++ b/.ci/magician/cmd/generate_comment_test.go
@@ -16,6 +16,7 @@
package cmd
import (
+ "magician/source"
"reflect"
"testing"
)
@@ -25,9 +26,28 @@ func TestExecGenerateComment(t *testing.T) {
gh := &mockGithub{
calledMethods: make(map[string][][]any),
}
- execGenerateComment("build1", "project1", "17", "sha1", "pr1", "*******", gh, mr)
+ ctlr := source.NewController("/mock/dir/go", "modular-magician", "*******", mr)
+ env := map[string]string{
+ "BUILD_ID": "build1",
+ "BUILD_STEP": "17",
+ "COMMIT_SHA": "sha1",
+ "GITHUB_TOKEN": "*******",
+ "PR_NUMBER": "pr1",
+ "PROJECT_ID": "project1",
+ }
+ diffProcessorEnv := map[string]string{
+ "BUILD_ID": "build1",
+ "BUILD_STEP": "17",
+ "COMMIT_SHA": "sha1",
+ "GITHUB_TOKEN": "*******",
+ "NEW_REF": "auto-pr-pr1",
+ "OLD_REF": "auto-pr-pr1-old",
+ "PR_NUMBER": "pr1",
+ "PROJECT_ID": "project1",
+ }
+ execGenerateComment(env, gh, mr, ctlr)
- for method, expectedCalls := range map[string][][]any{
+ for method, expectedCalls := range map[string][]ParameterList{
"Copy": {
{"/mock/dir/tpg", "/mock/dir/magic-modules/tools/diff-processor/old"},
{"/mock/dir/tpg", "/mock/dir/magic-modules/tools/diff-processor/new"},
@@ -44,38 +64,39 @@ func TestExecGenerateComment(t *testing.T) {
{"/mock/dir/magic-modules/tools/diff-processor/bin"},
},
"Run": {
- {"/mock/dir/magic-modules/.ci/magician", "git", []string{"clone", "-b", "auto-pr-pr1", "https://modular-magician:*******@github.com/modular-magician/terraform-provider-google", "/mock/dir/tpg"}, []string(nil)},
- {"/mock/dir/tpg", "git", []string{"fetch", "origin", "auto-pr-pr1-old"}, []string(nil)},
- {"/mock/dir/tpg", "git", []string{"diff", "origin/auto-pr-pr1-old", "origin/auto-pr-pr1", "--shortstat"}, []string(nil)},
- {"/mock/dir/magic-modules/.ci/magician", "git", []string{"clone", "-b", "auto-pr-pr1", "https://modular-magician:*******@github.com/modular-magician/terraform-provider-google-beta", "/mock/dir/tpgb"}, []string(nil)},
- {"/mock/dir/tpgb", "git", []string{"fetch", "origin", "auto-pr-pr1-old"}, []string(nil)},
- {"/mock/dir/tpgb", "git", []string{"diff", "origin/auto-pr-pr1-old", "origin/auto-pr-pr1", "--shortstat"}, []string(nil)},
- {"/mock/dir/magic-modules/.ci/magician", "git", []string{"clone", "-b", "auto-pr-pr1", "https://modular-magician:*******@github.com/modular-magician/terraform-google-conversion", "/mock/dir/tfc"}, []string(nil)},
- {"/mock/dir/tfc", "git", []string{"fetch", "origin", "auto-pr-pr1-old"}, []string(nil)},
- {"/mock/dir/tfc", "git", []string{"diff", "origin/auto-pr-pr1-old", "origin/auto-pr-pr1", "--shortstat"}, []string(nil)},
- {"/mock/dir/magic-modules/.ci/magician", "git", []string{"clone", "-b", "auto-pr-pr1", "https://modular-magician:*******@github.com/modular-magician/docs-examples", "/mock/dir/tfoics"}, []string(nil)},
- {"/mock/dir/tfoics", "git", []string{"fetch", "origin", "auto-pr-pr1-old"}, []string(nil)},
- {"/mock/dir/tfoics", "git", []string{"diff", "origin/auto-pr-pr1-old", "origin/auto-pr-pr1", "--shortstat"}, []string(nil)},
- {"/mock/dir/magic-modules/tools/diff-processor", "make", []string{"build"}, []string{"OLD_REF=auto-pr-pr1-old", "NEW_REF=auto-pr-pr1"}},
- {"/mock/dir/magic-modules/tools/diff-processor", "bin/diff-processor", []string{"breaking-changes"}, []string(nil)},
- {"/mock/dir/magic-modules/tools/diff-processor", "bin/diff-processor", []string{"add-labels", "pr1"}, []string{"GITHUB_TOKEN=*******"}},
- {"/mock/dir/magic-modules/tools/diff-processor", "make", []string{"build"}, []string{"OLD_REF=auto-pr-pr1-old", "NEW_REF=auto-pr-pr1"}},
- {"/mock/dir/magic-modules/tools/diff-processor", "bin/diff-processor", []string{"breaking-changes"}, []string(nil)},
- {"/mock/dir/magic-modules/tools/diff-processor", "bin/diff-processor", []string{"add-labels", "pr1"}, []string{"GITHUB_TOKEN=*******"}},
- {"/mock/dir/tpgbold", "git", []string{"checkout", "origin/auto-pr-pr1-old"}, []string(nil)},
- {"/mock/dir/tpgbold", "find", []string{".", "-type", "f", "-name", "*.go", "-exec", "sed", "-i.bak", "s~github.com/hashicorp/terraform-provider-google-beta~google/provider/old~g", "{}", "+"}, []string(nil)},
- {"/mock/dir/tpgbold", "sed", []string{"-i.bak", "s|github.com/hashicorp/terraform-provider-google-beta|google/provider/old|g", "go.mod"}, []string(nil)},
- {"/mock/dir/tpgbold", "sed", []string{"-i.bak", "s|github.com/hashicorp/terraform-provider-google-beta|google/provider/old|g", "go.sum"}, []string(nil)},
- {"/mock/dir/tpgb", "find", []string{".", "-type", "f", "-name", "*.go", "-exec", "sed", "-i.bak", "s~github.com/hashicorp/terraform-provider-google-beta~google/provider/new~g", "{}", "+"}, []string(nil)},
- {"/mock/dir/tpgb", "sed", []string{"-i.bak", "s|github.com/hashicorp/terraform-provider-google-beta|google/provider/new|g", "go.mod"}, []string(nil)},
- {"/mock/dir/tpgb", "sed", []string{"-i.bak", "s|github.com/hashicorp/terraform-provider-google-beta|google/provider/new|g", "go.sum"}, []string(nil)},
- {"/mock/dir/magic-modules/tools/missing-test-detector", "go", []string{"mod", "edit", "-replace", "google/provider/new=/mock/dir/tpgb"}, []string(nil)},
- {"/mock/dir/magic-modules/tools/missing-test-detector", "go", []string{"mod", "edit", "-replace", "google/provider/old=/mock/dir/tpgbold"}, []string(nil)},
- {"/mock/dir/magic-modules/tools/missing-test-detector", "go", []string{"mod", "tidy"}, []string(nil)},
- {"/mock/dir/magic-modules/tools/missing-test-detector", "go", []string{"run", ".", "-services-dir=/mock/dir/tpgb/google-beta/services"}, []string(nil)},
- {"/mock/dir/magic-modules", "git", []string{"diff", "HEAD", "origin/main", "tools/missing-test-detector"}, []string(nil)}},
+ {"/mock/dir/magic-modules/.ci/magician", "git", []string{"clone", "-b", "auto-pr-pr1", "https://modular-magician:*******@github.com/modular-magician/terraform-provider-google", "/mock/dir/tpg"}, map[string]string(nil)},
+ {"/mock/dir/tpg", "git", []string{"fetch", "origin", "auto-pr-pr1-old"}, map[string]string(nil)},
+ {"/mock/dir/tpg", "git", []string{"diff", "origin/auto-pr-pr1-old", "origin/auto-pr-pr1", "--shortstat"}, map[string]string(nil)},
+ {"/mock/dir/magic-modules/.ci/magician", "git", []string{"clone", "-b", "auto-pr-pr1", "https://modular-magician:*******@github.com/modular-magician/terraform-provider-google-beta", "/mock/dir/tpgb"}, map[string]string(nil)},
+ {"/mock/dir/tpgb", "git", []string{"fetch", "origin", "auto-pr-pr1-old"}, map[string]string(nil)},
+ {"/mock/dir/tpgb", "git", []string{"diff", "origin/auto-pr-pr1-old", "origin/auto-pr-pr1", "--shortstat"}, map[string]string(nil)},
+ {"/mock/dir/magic-modules/.ci/magician", "git", []string{"clone", "-b", "auto-pr-pr1", "https://modular-magician:*******@github.com/modular-magician/terraform-google-conversion", "/mock/dir/tfc"}, map[string]string(nil)},
+ {"/mock/dir/tfc", "git", []string{"fetch", "origin", "auto-pr-pr1-old"}, map[string]string(nil)},
+ {"/mock/dir/tfc", "git", []string{"diff", "origin/auto-pr-pr1-old", "origin/auto-pr-pr1", "--shortstat"}, map[string]string(nil)},
+ {"/mock/dir/magic-modules/.ci/magician", "git", []string{"clone", "-b", "auto-pr-pr1", "https://modular-magician:*******@github.com/modular-magician/docs-examples", "/mock/dir/tfoics"}, map[string]string(nil)},
+ {"/mock/dir/tfoics", "git", []string{"fetch", "origin", "auto-pr-pr1-old"}, map[string]string(nil)},
+ {"/mock/dir/tfoics", "git", []string{"diff", "origin/auto-pr-pr1-old", "origin/auto-pr-pr1", "--shortstat"}, map[string]string(nil)},
+ {"/mock/dir/magic-modules/tools/diff-processor", "make", []string{"build"}, diffProcessorEnv},
+ {"/mock/dir/magic-modules/tools/diff-processor", "bin/diff-processor", []string{"breaking-changes"}, map[string]string(nil)},
+ {"/mock/dir/magic-modules/tools/diff-processor", "bin/diff-processor", []string{"add-labels", "pr1"}, diffProcessorEnv},
+ {"/mock/dir/magic-modules/tools/diff-processor", "make", []string{"build"}, diffProcessorEnv},
+ {"/mock/dir/magic-modules/tools/diff-processor", "bin/diff-processor", []string{"breaking-changes"}, map[string]string(nil)},
+ {"/mock/dir/magic-modules/tools/diff-processor", "bin/diff-processor", []string{"add-labels", "pr1"}, diffProcessorEnv},
+ {"/mock/dir/tpgbold", "git", []string{"checkout", "origin/auto-pr-pr1-old"}, map[string]string(nil)},
+ {"/mock/dir/tpgbold", "find", []string{".", "-type", "f", "-name", "*.go", "-exec", "sed", "-i.bak", "s~github.com/hashicorp/terraform-provider-google-beta~google/provider/old~g", "{}", "+"}, map[string]string(nil)},
+ {"/mock/dir/tpgbold", "sed", []string{"-i.bak", "s|github.com/hashicorp/terraform-provider-google-beta|google/provider/old|g", "go.mod"}, map[string]string(nil)},
+ {"/mock/dir/tpgbold", "sed", []string{"-i.bak", "s|github.com/hashicorp/terraform-provider-google-beta|google/provider/old|g", "go.sum"}, map[string]string(nil)},
+ {"/mock/dir/tpgb", "find", []string{".", "-type", "f", "-name", "*.go", "-exec", "sed", "-i.bak", "s~github.com/hashicorp/terraform-provider-google-beta~google/provider/new~g", "{}", "+"}, map[string]string(nil)},
+ {"/mock/dir/tpgb", "sed", []string{"-i.bak", "s|github.com/hashicorp/terraform-provider-google-beta|google/provider/new|g", "go.mod"}, map[string]string(nil)},
+ {"/mock/dir/tpgb", "sed", []string{"-i.bak", "s|github.com/hashicorp/terraform-provider-google-beta|google/provider/new|g", "go.sum"}, map[string]string(nil)},
+ {"/mock/dir/magic-modules/tools/missing-test-detector", "go", []string{"mod", "edit", "-replace", "google/provider/new=/mock/dir/tpgb"}, map[string]string(nil)},
+ {"/mock/dir/magic-modules/tools/missing-test-detector", "go", []string{"mod", "edit", "-replace", "google/provider/old=/mock/dir/tpgbold"}, map[string]string(nil)},
+ {"/mock/dir/magic-modules/tools/missing-test-detector", "go", []string{"mod", "tidy"}, map[string]string(nil)},
+ {"/mock/dir/magic-modules/tools/missing-test-detector", "go", []string{"run", ".", "-services-dir=/mock/dir/tpgb/google-beta/services"}, map[string]string(nil)},
+ {"/mock/dir/magic-modules", "git", []string{"diff", "HEAD", "origin/main", "tools/missing-test-detector"}, map[string]string(nil)},
+ },
} {
- if actualCalls, ok := mr.calledMethods[method]; !ok {
+ if actualCalls, ok := mr.Calls(method); !ok {
t.Fatalf("Found no calls for %s", method)
} else if len(actualCalls) != len(expectedCalls) {
t.Fatalf("Unexpected number of calls for %s, got %d, expected %d", method, len(actualCalls), len(expectedCalls))
diff --git a/.ci/magician/cmd/interfaces.go b/.ci/magician/cmd/interfaces.go
index a03bce062439..fa87eedca9cc 100644
--- a/.ci/magician/cmd/interfaces.go
+++ b/.ci/magician/cmd/interfaces.go
@@ -44,6 +44,6 @@ type ExecRunner interface {
RemoveAll(path string) error
PushDir(path string) error
PopDir() error
- Run(name string, args, env []string) (string, error)
- MustRun(name string, args, env []string) string
+ Run(name string, args []string, env map[string]string) (string, error)
+ MustRun(name string, args []string, env map[string]string) string
}
diff --git a/.ci/magician/cmd/mock_runner_test.go b/.ci/magician/cmd/mock_runner_test.go
index 6743e3a1e6c5..5789d9878a3f 100644
--- a/.ci/magician/cmd/mock_runner_test.go
+++ b/.ci/magician/cmd/mock_runner_test.go
@@ -20,45 +20,53 @@ import (
"errors"
"fmt"
"log"
+ "path/filepath"
)
+type ParameterList []any
+
+type MockRunner interface {
+ ExecRunner
+ Calls(method string) ([]ParameterList, bool)
+}
+
type mockRunner struct {
- calledMethods map[string][][]any
+ calledMethods map[string][]ParameterList
cmdResults map[string]string
cwd string
dirStack *list.List
}
-func NewMockRunner() *mockRunner {
+func NewMockRunner() MockRunner {
return &mockRunner{
- calledMethods: make(map[string][][]any),
+ calledMethods: make(map[string][]ParameterList),
cmdResults: map[string]string{
- "/mock/dir/tfc git [clone -b auto-pr-pr1 https://modular-magician:*******@github.com/modular-magician/docs-examples /mock/dir/tfoics] []": "",
- "/mock/dir/tpgb git [clone -b auto-pr-pr1 https://modular-magician:*******@github.com/modular-magician/terraform-google-conversion /mock/dir/tfc] []": "",
- " git [clone -b auto-pr-pr1 https://modular-magician:*******@github.com/modular-magician/terraform-provider-google /mock/dir/tpg] []": "",
- "/mock/dir/tpg git [clone -b auto-pr-pr1 https://modular-magician:*******@github.com/modular-magician/terraform-provider-google-beta /mock/dir/tpgb] []": "",
- "/mock/dir/magic-modules git [diff HEAD origin/main tools/missing-test-detector] []": "",
- "/mock/dir/magic-modules/tools/diff-processor bin/diff-processor [breaking-changes] []": "",
- "/mock/dir/magic-modules/tools/diff-processor make [build] [OLD_REF=auto-pr-pr1-old NEW_REF=auto-pr-pr1]": "",
- "/mock/dir/magic-modules/tools/missing-test-detector go [mod edit -replace google/provider/new=/mock/dir/tpgb] []": "",
- "/mock/dir/magic-modules/tools/missing-test-detector go [mod edit -replace google/provider/old=/mock/dir/tpgbold] []": "",
- "/mock/dir/magic-modules/tools/missing-test-detector go [mod tidy] []": "",
- "/mock/dir/magic-modules/tools/missing-test-detector go [run . -services-dir=/mock/dir/tpgb/google-beta/services] []": "## Missing test report\nYour PR includes resource fields which are not covered by any test.\n\nResource: `google_folder_access_approval_settings` (3 total tests)\nPlease add an acceptance test which includes these fields. The test should include the following:\n\n```hcl\nresource \"google_folder_access_approval_settings\" \"primary\" {\n uncovered_field = # value needed\n}\n\n```\n",
- "/mock/dir/tfc git [diff origin/auto-pr-pr1-old origin/auto-pr-pr1 --shortstat] []": " 1 file changed, 10 insertions(+)\n",
- "/mock/dir/tfc git [fetch origin auto-pr-pr1-old] []": "",
- "/mock/dir/tfoics git [diff origin/auto-pr-pr1-old origin/auto-pr-pr1 --shortstat] []": "",
- "/mock/dir/tfoics git [fetch origin auto-pr-pr1-old] []": "",
- "/mock/dir/tpg git [diff origin/auto-pr-pr1-old origin/auto-pr-pr1 --shortstat] []": " 2 files changed, 40 insertions(+)\n",
- "/mock/dir/tpg git [fetch origin auto-pr-pr1-old] []": "",
- "/mock/dir/tpgb find [. -type f -name *.go -exec sed -i.bak s~github.com/hashicorp/terraform-provider-google-beta~google/provider/new~g {} +] []": "",
- "/mock/dir/tpgb git [diff origin/auto-pr-pr1-old origin/auto-pr-pr1 --shortstat] []": " 2 files changed, 40 insertions(+)\n",
- "/mock/dir/tpgb git [fetch origin auto-pr-pr1-old] []": "",
- "/mock/dir/tpgb sed [-i.bak s|github.com/hashicorp/terraform-provider-google-beta|google/provider/new|g go.mod] []": "",
- "/mock/dir/tpgb sed [-i.bak s|github.com/hashicorp/terraform-provider-google-beta|google/provider/new|g go.sum] []": "",
- "/mock/dir/tpgbold find [. -type f -name *.go -exec sed -i.bak s~github.com/hashicorp/terraform-provider-google-beta~google/provider/old~g {} +] []": "",
- "/mock/dir/tpgbold git [checkout origin/auto-pr-pr1-old] []": "",
- "/mock/dir/tpgbold sed [-i.bak s|github.com/hashicorp/terraform-provider-google-beta|google/provider/old|g go.mod] []": "",
- "/mock/dir/tpgbold sed [-i.bak s|github.com/hashicorp/terraform-provider-google-beta|google/provider/old|g go.sum] []": "",
+ "/mock/dir/tfc git [clone -b auto-pr-pr1 https://modular-magician:*******@github.com/modular-magician/docs-examples /mock/dir/tfoics] map[]": "",
+ "/mock/dir/tpgb git [clone -b auto-pr-pr1 https://modular-magician:*******@github.com/modular-magician/terraform-google-conversion /mock/dir/tfc] map[]": "",
+ " git [clone -b auto-pr-pr1 https://modular-magician:*******@github.com/modular-magician/terraform-provider-google /mock/dir/tpg] map[]": "",
+ "/mock/dir/tpg git [clone -b auto-pr-pr1 https://modular-magician:*******@github.com/modular-magician/terraform-provider-google-beta /mock/dir/tpgb] map[]": "",
+ "/mock/dir/magic-modules git [diff HEAD origin/main tools/missing-test-detector] map[]": "",
+ "/mock/dir/magic-modules/tools/diff-processor bin/diff-processor [breaking-changes] map[]": "",
+ "/mock/dir/magic-modules/tools/diff-processor make [build] map[OLD_REF:auto-pr-pr1-old NEW_REF:auto-pr-pr1]": "",
+ "/mock/dir/magic-modules/tools/missing-test-detector go [mod edit -replace google/provider/new=/mock/dir/tpgb] map[]": "",
+ "/mock/dir/magic-modules/tools/missing-test-detector go [mod edit -replace google/provider/old=/mock/dir/tpgbold] map[]": "",
+ "/mock/dir/magic-modules/tools/missing-test-detector go [mod tidy] map[]": "",
+ "/mock/dir/magic-modules/tools/missing-test-detector go [run . -services-dir=/mock/dir/tpgb/google-beta/services] map[]": "## Missing test report\nYour PR includes resource fields which are not covered by any test.\n\nResource: `google_folder_access_approval_settings` (3 total tests)\nPlease add an acceptance test which includes these fields. The test should include the following:\n\n```hcl\nresource \"google_folder_access_approval_settings\" \"primary\" {\n uncovered_field = # value needed\n}\n\n```\n",
+ "/mock/dir/tfc git [diff origin/auto-pr-pr1-old origin/auto-pr-pr1 --shortstat] map[]": " 1 file changed, 10 insertions(+)\n",
+ "/mock/dir/tfc git [fetch origin auto-pr-pr1-old] map[]": "",
+ "/mock/dir/tfoics git [diff origin/auto-pr-pr1-old origin/auto-pr-pr1 --shortstat] map[]": "",
+ "/mock/dir/tfoics git [fetch origin auto-pr-pr1-old] map[]": "",
+ "/mock/dir/tpg git [diff origin/auto-pr-pr1-old origin/auto-pr-pr1 --shortstat] map[]": " 2 files changed, 40 insertions(+)\n",
+ "/mock/dir/tpg git [fetch origin auto-pr-pr1-old] map[]": "",
+ "/mock/dir/tpgb find [. -type f -name *.go -exec sed -i.bak s~github.com/hashicorp/terraform-provider-google-beta~google/provider/new~g {} +] map[]": "",
+ "/mock/dir/tpgb git [diff origin/auto-pr-pr1-old origin/auto-pr-pr1 --shortstat] map[]": " 2 files changed, 40 insertions(+)\n",
+ "/mock/dir/tpgb git [fetch origin auto-pr-pr1-old] map[]": "",
+ "/mock/dir/tpgb sed [-i.bak s|github.com/hashicorp/terraform-provider-google-beta|google/provider/new|g go.mod] map[]": "",
+ "/mock/dir/tpgb sed [-i.bak s|github.com/hashicorp/terraform-provider-google-beta|google/provider/new|g go.sum] map[]": "",
+ "/mock/dir/tpgbold find [. -type f -name *.go -exec sed -i.bak s~github.com/hashicorp/terraform-provider-google-beta~google/provider/old~g {} +] map[]": "",
+ "/mock/dir/tpgbold git [checkout origin/auto-pr-pr1-old] map[]": "",
+ "/mock/dir/tpgbold sed [-i.bak s|github.com/hashicorp/terraform-provider-google-beta|google/provider/old|g go.mod] map[]": "",
+ "/mock/dir/tpgbold sed [-i.bak s|github.com/hashicorp/terraform-provider-google-beta|google/provider/old|g go.sum] map[]": "",
},
cwd: "/mock/dir/magic-modules/.ci/magician",
dirStack: list.New(),
@@ -69,13 +77,29 @@ func (mr *mockRunner) GetCWD() string {
return mr.cwd
}
+func (mr *mockRunner) Mkdir(path string) error {
+ return nil
+}
+
+func (mr *mockRunner) Walk(root string, fn filepath.WalkFunc) error {
+ return nil
+}
+
+func (mr *mockRunner) ReadFile(name string) (string, error) {
+ return "", nil
+}
+
+func (mr *mockRunner) WriteFile(name, data string) error {
+ return nil
+}
+
func (mr *mockRunner) Copy(src, dest string) error {
- mr.calledMethods["Copy"] = append(mr.calledMethods["Copy"], []any{src, dest})
+ mr.calledMethods["Copy"] = append(mr.calledMethods["Copy"], ParameterList{src, dest})
return nil
}
func (mr *mockRunner) RemoveAll(path string) error {
- mr.calledMethods["RemoveAll"] = append(mr.calledMethods["RemoveAll"], []any{path})
+ mr.calledMethods["RemoveAll"] = append(mr.calledMethods["RemoveAll"], ParameterList{path})
return nil
}
@@ -101,8 +125,8 @@ func (mr *mockRunner) PopDir() error {
return nil
}
-func (mr *mockRunner) Run(name string, args, env []string) (string, error) {
- mr.calledMethods["Run"] = append(mr.calledMethods["Run"], []any{mr.cwd, name, args, env})
+func (mr *mockRunner) Run(name string, args []string, env map[string]string) (string, error) {
+ mr.calledMethods["Run"] = append(mr.calledMethods["Run"], ParameterList{mr.cwd, name, args, env})
cmd := fmt.Sprintf("%s %s %v %v", mr.cwd, name, args, env)
if result, ok := mr.cmdResults[cmd]; ok {
return result, nil
@@ -111,10 +135,15 @@ func (mr *mockRunner) Run(name string, args, env []string) (string, error) {
return "", nil
}
-func (mr *mockRunner) MustRun(name string, args, env []string) string {
+func (mr *mockRunner) MustRun(name string, args []string, env map[string]string) string {
out, err := mr.Run(name, args, env)
if err != nil {
log.Fatal(err)
}
return out
}
+
+func (mr *mockRunner) Calls(method string) ([]ParameterList, bool) {
+ calls, ok := mr.calledMethods[method]
+ return calls, ok
+}
diff --git a/.ci/magician/exec/runner.go b/.ci/magician/exec/runner.go
index 5d746f7a963d..4d85ece99189 100644
--- a/.ci/magician/exec/runner.go
+++ b/.ci/magician/exec/runner.go
@@ -19,6 +19,7 @@ import (
"container/list"
"errors"
"fmt"
+ "io/fs"
"log"
"os"
"os/exec"
@@ -51,6 +52,14 @@ func (ar *Runner) Copy(src, dest string) error {
return cp.Copy(ar.abs(src), ar.abs(dest))
}
+func (ar *Runner) Mkdir(path string) error {
+ return os.MkdirAll(ar.abs(path), 0777)
+}
+
+func (ar *Runner) Walk(root string, fn filepath.WalkFunc) error {
+ return filepath.Walk(root, fn)
+}
+
func (ar *Runner) RemoveAll(path string) error {
return os.RemoveAll(ar.abs(path))
}
@@ -83,19 +92,34 @@ func (ar *Runner) WriteFile(name, data string) error {
return os.WriteFile(ar.abs(name), []byte(data), 0644)
}
-func (ar *Runner) Run(name string, args, env []string) (string, error) {
+func (ar *Runner) ReadFile(name string) (string, error) {
+ data, err := os.ReadFile(ar.abs(name))
+ if err != nil {
+ return "", err
+ }
+ return string(data), nil
+}
+
+// Run the given command with the given args and env, return output and error if any
+func (ar *Runner) Run(name string, args []string, env map[string]string) (string, error) {
cmd := exec.Command(name, args...)
cmd.Dir = ar.cwd
- cmd.Env = append(os.Environ(), env...)
+ for ev, val := range env {
+ cmd.Env = append(cmd.Env, fmt.Sprintf("%s=%s", ev, val))
+ }
out, err := cmd.Output()
- if err != nil {
- exitErr := err.(*exec.ExitError)
- return string(out), fmt.Errorf("error running %s: %v\nstdout:\n%sstderr:\n%s", name, err, out, exitErr.Stderr)
+ switch typedErr := err.(type) {
+ case *exec.ExitError:
+ return string(out), fmt.Errorf("error running %s: %v\nstdout:\n%sstderr:\n%s", name, err, out, typedErr.Stderr)
+ case *fs.PathError:
+ return "", fmt.Errorf("path error running %s: %v", name, typedErr)
+
}
return string(out), nil
}
-func (ar *Runner) MustRun(name string, args, env []string) string {
+// Run the command and exit if there's an error.
+func (ar *Runner) MustRun(name string, args []string, env map[string]string) string {
out, err := ar.Run(name, args, env)
if err != nil {
log.Fatal(err)
diff --git a/.ci/magician/provider/version.go b/.ci/magician/provider/version.go
new file mode 100644
index 000000000000..7c98f3817e29
--- /dev/null
+++ b/.ci/magician/provider/version.go
@@ -0,0 +1,37 @@
+package provider
+
+type Version int
+
+const (
+ GA Version = iota
+ Beta
+)
+
+const NumVersions = 2
+
+func (v Version) String() string {
+ switch v {
+ case GA:
+ return "ga"
+ case Beta:
+ return "beta"
+ }
+ return "unknown"
+}
+
+func (v Version) BucketPath() string {
+ if v == GA {
+ return ""
+ }
+ return v.String() + "/"
+}
+
+func (v Version) RepoName() string {
+ switch v {
+ case GA:
+ return "terraform-provider-google"
+ case Beta:
+ return "terraform-provider-google-beta"
+ }
+ return "unknown"
+}
diff --git a/.ci/magician/source/repo.go b/.ci/magician/source/repo.go
new file mode 100644
index 000000000000..20b7a4f52ec4
--- /dev/null
+++ b/.ci/magician/source/repo.go
@@ -0,0 +1,85 @@
+package source
+
+import (
+ "fmt"
+ "path/filepath"
+ "strings"
+)
+
+type Repo struct {
+ Name string // Name in GitHub (e.g. magic-modules)
+ Title string // Title for display (e.g. Magic Modules)
+ Branch string // Branch to clone, optional
+ Path string // local Path once cloned, including Name
+ DiffCanFail bool // whether to allow the command to continue if cloning or diffing the repo fails
+}
+
+type Controller struct {
+ rnr Runner
+ username string
+ token string
+ goPath string
+}
+
+type Runner interface {
+ PushDir(path string) error
+ PopDir() error
+ Run(name string, args []string, env map[string]string) (string, error)
+}
+
+func NewController(goPath, username, token string, rnr Runner) *Controller {
+ return &Controller{
+ rnr: rnr,
+ username: username,
+ token: token,
+ goPath: goPath,
+ }
+}
+
+func (gc Controller) SetPath(repo *Repo) {
+ repo.Path = filepath.Join(gc.goPath, "src", "github.com", gc.username, repo.Name)
+}
+
+func (gc Controller) Clone(repo *Repo) error {
+ var err error
+ url := fmt.Sprintf("https://%s:%s@github.com/%s/%s", gc.username, gc.token, gc.username, repo.Name)
+ if repo.Branch == "" {
+ _, err = gc.rnr.Run("git", []string{"clone", url, repo.Path}, nil)
+ } else {
+ _, err = gc.rnr.Run("git", []string{"clone", "-b", repo.Branch, url, repo.Path}, nil)
+ }
+ if err != nil {
+ if strings.Contains(err.Error(), "already exists and is not an empty directory") {
+ return nil
+ }
+ }
+ return err
+}
+
+func (gc Controller) Fetch(repo *Repo, branch string) error {
+ if err := gc.rnr.PushDir(repo.Path); err != nil {
+ return err
+ }
+ if _, err := gc.rnr.Run("git", []string{"fetch", "origin", branch}, nil); err != nil {
+ return fmt.Errorf("error fetching branch %s in repo %s: %v\n", branch, repo.Name, err)
+ }
+ return gc.rnr.PopDir()
+}
+
+func (gc Controller) Diff(repo *Repo, oldBranch, newBranch string) (string, error) {
+ if err := gc.rnr.PushDir(repo.Path); err != nil {
+ return "", err
+ }
+ diffs, err := gc.rnr.Run("git", []string{"diff", "origin/" + oldBranch, "origin/" + newBranch, "--shortstat"}, nil)
+ if err != nil {
+ return "", fmt.Errorf("error diffing %s and %s: %v", oldBranch, newBranch, err)
+ }
+ return diffs, gc.rnr.PopDir()
+}
+
+func (gc Controller) Cleanup(repo *Repo) error {
+ if _, err := gc.rnr.Run("rm", []string{"-rf", repo.Path}, nil); err != nil {
+ return err
+ }
+ return nil
+}
diff --git a/.ci/scripts/go-plus/magician/exec.sh b/.ci/scripts/go-plus/magician/exec.sh
index acd9714f09bd..a0cb4078e6b6 100755
--- a/.ci/scripts/go-plus/magician/exec.sh
+++ b/.ci/scripts/go-plus/magician/exec.sh
@@ -4,7 +4,7 @@
DIR="$(dirname $(realpath $0))"
# Construct the path to the Go program directory and binary
-GO_PROGRAM_DIR="$DIR/../../../magician/"
+GO_PROGRAM_DIR="$DIR/../../../magician"
GO_BINARY="$GO_PROGRAM_DIR/magician_binary"
pushd $GO_PROGRAM_DIR
diff --git a/.gitignore b/.gitignore
index 76a5461db4c2..4aa28d6971f5 100644
--- a/.gitignore
+++ b/.gitignore
@@ -52,3 +52,6 @@ mmv1/build/*
tpgtools/temp.serial
tpgtools/serialization.go
+
+# ignore magician binary
+.ci/magician/magician