diff --git a/google/resource_cloudfunctions_function_test.go b/google/resource_cloudfunctions_function_test.go index 0b600aab..a2403de1 100644 --- a/google/resource_cloudfunctions_function_test.go +++ b/google/resource_cloudfunctions_function_test.go @@ -239,6 +239,7 @@ func TestAccCloudFunctionsFunction_sourceRepo(t *testing.T) { funcResourceName := "google_cloudfunctions_function.function" functionName := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + proj := getTestProjectFromEnv() resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -246,7 +247,7 @@ func TestAccCloudFunctionsFunction_sourceRepo(t *testing.T) { CheckDestroy: testAccCheckCloudFunctionsFunctionDestroy, Steps: []resource.TestStep{ { - Config: testAccCloudFunctionsFunction_sourceRepo(functionName), + Config: testAccCloudFunctionsFunction_sourceRepo(functionName, proj), }, { ResourceName: funcResourceName, @@ -588,7 +589,7 @@ resource "google_cloudfunctions_function" "function" { }`, bucketName, zipFilePath, functionName) } -func testAccCloudFunctionsFunction_sourceRepo(functionName string) string { +func testAccCloudFunctionsFunction_sourceRepo(functionName, project string) string { return fmt.Sprintf(` resource "google_cloudfunctions_function" "function" { name = "%s" @@ -596,13 +597,13 @@ resource "google_cloudfunctions_function" "function" { source_repository { // There isn't yet an API that'll allow us to create a source repository and // put code in it, so we created this repository outside the test to be used - // here. If this test is run outside of CI, it may fail because of permissions - // errors. - url = "https://source.developers.google.com/projects/hc-terraform-testing/repos/cloudfunctions-test-do-not-delete/moveable-aliases/master/paths/" + // here. If this test is run outside of CI, you may need to create your own + // source repo. + url = "https://source.developers.google.com/projects/%s/repos/cloudfunctions-test-do-not-delete/moveable-aliases/master/paths/" } trigger_http = true entry_point = "helloGET" } -`, functionName) +`, functionName, project) } diff --git a/google/resource_compute_disk.go b/google/resource_compute_disk.go index 3e18a5e4..325986d7 100644 --- a/google/resource_compute_disk.go +++ b/google/resource_compute_disk.go @@ -223,9 +223,9 @@ func suppressWindowsSqlFamilyDiff(imageName, familyName string) bool { // e.g. image: windows-server-1709-dc-core-for-containers-v20180109, family: "windows-1709-core-for-containers func suppressWindowsFamilyDiff(imageName, familyName string) bool { updatedFamilyString := strings.Replace(familyName, "windows-", "windows-server-", 1) - updatedFamilyString = strings.Replace(updatedFamilyString, "-core", "-dc-core", 1) + updatedImageName := strings.Replace(imageName, "-dc-", "-", 1) - if strings.Contains(imageName, updatedFamilyString) { + if strings.Contains(updatedImageName, updatedFamilyString) { return true } diff --git a/google/resource_compute_instance_migrate.go b/google/resource_compute_instance_migrate.go index 53c21321..313def41 100644 --- a/google/resource_compute_instance_migrate.go +++ b/google/resource_compute_instance_migrate.go @@ -218,9 +218,12 @@ func migrateStateV3toV4(is *terraform.InstanceState, meta interface{}) (*terrafo } } - disks, err := strconv.Atoi(is.Attributes["disk.#"]) - if err != nil { - return is, fmt.Errorf("migration error: found disk.# value in unexpected format: %s", err) + disks := 0 + if v := is.Attributes["disk.#"]; v != "" { + disks, err = strconv.Atoi(is.Attributes["disk.#"]) + if err != nil { + return is, fmt.Errorf("migration error: found disk.# value in unexpected format: %s", err) + } } for i := 0; i < disks; i++ { @@ -320,7 +323,11 @@ func getInstanceFromInstanceState(config *Config, is *terraform.InstanceState) ( zone, ok := is.Attributes["zone"] if !ok { - return nil, fmt.Errorf("could not determine 'zone'") + if config.Zone == "" { + return nil, fmt.Errorf("could not determine 'zone'") + } else { + zone = config.Zone + } } instance, err := config.clientCompute.Instances.Get( @@ -344,7 +351,11 @@ func getAllDisksFromInstanceState(config *Config, is *terraform.InstanceState) ( zone, ok := is.Attributes["zone"] if !ok { - return nil, fmt.Errorf("could not determine 'zone'") + if config.Zone == "" { + return nil, fmt.Errorf("could not determine 'zone'") + } else { + zone = config.Zone + } } diskList := []*compute.Disk{} diff --git a/google/resource_compute_instance_migrate_test.go b/google/resource_compute_instance_migrate_test.go index 871e22b0..65ba697e 100644 --- a/google/resource_compute_instance_migrate_test.go +++ b/google/resource_compute_instance_migrate_test.go @@ -25,12 +25,14 @@ func TestComputeInstanceMigrateState(t *testing.T) { "v0.4.2 and earlier": { StateVersion: 0, Attributes: map[string]string{ + "disk.#": "0", "metadata.#": "2", "metadata.0.foo": "bar", "metadata.1.baz": "qux", "metadata.2.with.dots": "should.work", }, Expected: map[string]string{ + "create_timeout": "4", "metadata.foo": "bar", "metadata.baz": "qux", "metadata.with.dots": "should.work", @@ -77,8 +79,37 @@ func TestComputeInstanceMigrateState(t *testing.T) { } config := getInitializedConfig(t) + + instanceName := fmt.Sprintf("instance-test-%s", acctest.RandString(10)) + instance := &compute.Instance{ + Name: instanceName, + Disks: []*compute.AttachedDisk{ + { + Boot: true, + InitializeParams: &compute.AttachedDiskInitializeParams{ + SourceImage: "projects/debian-cloud/global/images/family/debian-9", + }, + }, + }, + MachineType: "zones/" + config.Zone + "/machineTypes/n1-standard-1", + NetworkInterfaces: []*compute.NetworkInterface{ + { + Network: "global/networks/default", + }, + }, + } + op, err := config.clientCompute.Instances.Insert(config.Project, config.Zone, instance).Do() + if err != nil { + t.Fatalf("Error creating instance: %s", err) + } + waitErr := computeSharedOperationWait(config.clientCompute, op, config.Project, "instance to create") + if waitErr != nil { + t.Fatal(waitErr) + } + defer cleanUpInstance(config, instanceName, config.Zone) + for tn, tc := range cases { - runInstanceMigrateTest(t, "i-abc123", tn, tc.StateVersion, tc.Attributes, tc.Expected, config) + runInstanceMigrateTest(t, instanceName, tn, tc.StateVersion, tc.Attributes, tc.Expected, config) } } @@ -868,6 +899,7 @@ func getInitializedConfig(t *testing.T) *Config { Project: getTestProjectFromEnv(), Credentials: getTestCredsFromEnv(), Region: getTestRegionFromEnv(), + Zone: getTestZoneFromEnv(), } err := config.loadAndValidate() if err != nil { diff --git a/google/resource_dataproc_cluster_test.go b/google/resource_dataproc_cluster_test.go index 9fa74418..02bf6040 100644 --- a/google/resource_dataproc_cluster_test.go +++ b/google/resource_dataproc_cluster_test.go @@ -870,7 +870,7 @@ resource "google_dataproc_cluster" "with_config_overrides" { machine_type = "n1-standard-1" disk_config { boot_disk_type = "pd-standard" - boot_disk_size_gb = 11 + boot_disk_size_gb = 16 num_local_ssds = 1 } } @@ -879,7 +879,7 @@ resource "google_dataproc_cluster" "with_config_overrides" { num_instances = 1 disk_config { boot_disk_type = "pd-ssd" - boot_disk_size_gb = 12 + boot_disk_size_gb = 17 num_local_ssds = 1 } } diff --git a/google/resource_dataproc_job_test.go b/google/resource_dataproc_job_test.go index 9128d6f2..de971945 100644 --- a/google/resource_dataproc_job_test.go +++ b/google/resource_dataproc_job_test.go @@ -608,12 +608,12 @@ func testAccDataprocJob_hadoop(rnd string) string { args = [ "wordcount", "file:///usr/lib/spark/NOTICE", - "gs://${google_dataproc_cluster.basic.cluster_config.0.bucket}/hadoopjob_output" + "gs://${google_dataproc_cluster.basic.cluster_config.0.bucket}/hadoopjob_output_%s" ] } } - `, rnd) + `, rnd, rnd) } diff --git a/google/resource_sql_ssl_cert_test.go b/google/resource_sql_ssl_cert_test.go index 7d605f87..f0fe4735 100644 --- a/google/resource_sql_ssl_cert_test.go +++ b/google/resource_sql_ssl_cert_test.go @@ -126,6 +126,11 @@ func testGoogleSqlClientCert_postgres(instance string) string { settings { tier = "db-f1-micro" } + + timeouts { + create = "20m" + delete = "20m" + } } resource "google_sql_ssl_cert" "cert" {