diff --git a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go index f0e1747ce96b..148ab8c0af82 100644 --- a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go +++ b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go @@ -69,6 +69,7 @@ func ResourceStorageTransferJob() *schema.Resource { Schema: map[string]*schema.Schema{ "name": { Type: schema.TypeString, + Optional: true, Computed: true, Description: `The name of the Transfer Job.`, }, @@ -594,6 +595,7 @@ func resourceStorageTransferJobCreate(d *schema.ResourceData, meta interface{}) } transferJob := &storagetransfer.TransferJob{ + Name: d.Get("name").(string), Description: d.Get("description").(string), ProjectId: project, Status: d.Get("status").(string), diff --git a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go index 60a30559d84d..e434e04ac672 100644 --- a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go +++ b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go @@ -78,6 +78,31 @@ func TestAccStorageTransferJob_basic(t *testing.T) { }) } +func TestAccStorageTransferJob_transferJobName(t *testing.T) { + t.Parallel() + + testDataSourceBucketName := acctest.RandString(t, 10) + testDataSinkName := acctest.RandString(t, 10) + testTransferJobDescription := acctest.RandString(t, 10) + testTransferJobName := fmt.Sprintf("tf-test-transfer-job-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageTransferJobDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageTransferJob_transferJobName(envvar.GetTestProjectFromEnv(), testDataSourceBucketName, testDataSinkName, testTransferJobDescription, testTransferJobName), + }, + { + ResourceName: "google_storage_transfer_job.transfer_job", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + func TestAccStorageTransferJob_omitScheduleEndDate(t *testing.T) { t.Parallel() @@ -701,6 +726,84 @@ resource "google_storage_transfer_job" "transfer_job" { `, project, dataSourceBucketName, project, dataSinkBucketName, project, pubsubTopicName, transferJobDescription, project) } +func testAccStorageTransferJob_transferJobName(project string, dataSourceBucketName string, dataSinkBucketName string, transferJobDescription string, testTransferJobName string) string { + return fmt.Sprintf(` + data "google_storage_transfer_project_service_account" "default" { + project = "%s" + } + + resource "google_storage_bucket" "data_source" { + name = "%s" + project = "%s" + location = "US" + force_destroy = true + uniform_bucket_level_access = true + } + + resource "google_storage_bucket_iam_member" "data_source" { + bucket = google_storage_bucket.data_source.name + role = "roles/storage.admin" + member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" + } + + resource "google_storage_bucket" "data_sink" { + name = "%s" + project = "%s" + location = "US" + force_destroy = true + uniform_bucket_level_access = true + } + + resource "google_storage_bucket_iam_member" "data_sink" { + bucket = google_storage_bucket.data_sink.name + role = "roles/storage.admin" + member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" + } + + resource "google_storage_transfer_job" "transfer_job" { + name = "transferJobs/%s" + description = "%s" + project = "%s" + + transfer_spec { + gcs_data_source { + bucket_name = google_storage_bucket.data_source.name + path = "foo/bar/" + } + gcs_data_sink { + bucket_name = google_storage_bucket.data_sink.name + path = "foo/bar/" + } + } + + schedule { + schedule_start_date { + year = 2018 + month = 10 + day = 1 + } + schedule_end_date { + year = 2019 + month = 10 + day = 1 + } + start_time_of_day { + hours = 0 + minutes = 30 + seconds = 0 + nanos = 0 + } + repeat_interval = "604800s" + } + + depends_on = [ + google_storage_bucket_iam_member.data_source, + google_storage_bucket_iam_member.data_sink, + ] + } + `, project, dataSourceBucketName, project, dataSinkBucketName, project, testTransferJobName, transferJobDescription, project) +} + func testAccStorageTransferJob_omitScheduleEndDate(project string, dataSourceBucketName string, dataSinkBucketName string, transferJobDescription string) string { return fmt.Sprintf(` data "google_storage_transfer_project_service_account" "default" { diff --git a/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown b/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown index b9eafb0d2d3c..c354672672d2 100644 --- a/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown @@ -112,6 +112,8 @@ resource "google_storage_transfer_job" "s3-bucket-nightly-backup" { The following arguments are supported: +* `name` - (Optional) The name of the Transfer Job. This name must start with "transferJobs/" prefix and end with a letter or a number, and should be no more than 128 characters ( `transferJobs/^(?!OPI)[A-Za-z0-9-._~]*[A-Za-z0-9]$` ). For transfers involving PosixFilesystem, this name must start with transferJobs/OPI specifically ( `transferJobs/OPI^[A-Za-z0-9-._~]*[A-Za-z0-9]$` ). For all other transfer types, this name must not start with transferJobs/OPI. Default the provider will assign a random unique name with `transferJobs/{{name}}` format, where `name` is a numeric value. + * `description` - (Required) Unique description to identify the Transfer Job. * `transfer_spec` - (Required) Transfer specification. Structure [documented below](#nested_transfer_spec).