Detect changes to cloud storage object when using source field. (#789)

* Detect server or local changes to bucket object when using source field

* Improve tests by removing global err and tf vars

* Address Dana's comments
This commit is contained in:
Vincent Roseberry 2017-11-27 16:25:32 -08:00 committed by GitHub
parent 4794245a1e
commit a9fd5fddcb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 116 additions and 47 deletions

View File

@ -810,7 +810,7 @@ func runInstanceMigrateTest(t *testing.T, id, testName string, version int, attr
ID: id,
Attributes: attributes,
}
is, err = resourceComputeInstanceMigrateState(version, is, meta)
is, err := resourceComputeInstanceMigrateState(version, is, meta)
if err != nil {
t.Fatal(err)
}

View File

@ -965,6 +965,7 @@ func checkMapMatch(attributes map[string]string, attr string, gcpMap map[string]
func checkBoolMatch(attributes map[string]string, attr string, gcpBool bool) string {
// Handle the case where an unset value defaults to false
var tf bool
var err error
if attributes[attr] == "" {
tf = false
} else {

View File

@ -9,8 +9,11 @@ import (
"github.com/hashicorp/terraform/helper/schema"
"crypto/md5"
"encoding/base64"
"google.golang.org/api/googleapi"
"google.golang.org/api/storage/v1"
"io/ioutil"
)
func resourceStorageBucketObject() *schema.Resource {
@ -77,7 +80,28 @@ func resourceStorageBucketObject() *schema.Resource {
"md5hash": &schema.Schema{
Type: schema.TypeString,
Computed: true,
Optional: true,
ForceNew: true,
// Makes the diff message nicer:
// md5hash: "1XcnP/iFw/hNrbhXi7QTmQ==" => "different hash" (forces new resource)
// Instead of the more confusing:
// md5hash: "1XcnP/iFw/hNrbhXi7QTmQ==" => "" (forces new resource)
Default: "different hash",
// If `content` is used, always suppress the diff. Equivalent to Computed behavior.
// If `source` is used:
// 1. Compute the md5 hash of the local file
// 2. Compare the computed md5 hash with the hash stored in Cloud Storage
// 3. Don't suppress the diff iff they don't match
DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool {
// `old` is the md5 hash we retrieved from the server in the ReadFunc
if source, ok := d.GetOkExists("source"); ok {
if old != getFileMd5Hash(source.(string)) {
return false
}
}
return true
},
},
"predefined_acl": &schema.Schema{
@ -221,3 +245,15 @@ func resourceStorageBucketObjectDelete(d *schema.ResourceData, meta interface{})
return nil
}
func getFileMd5Hash(filename string) string {
data, err := ioutil.ReadFile(filename)
if err != nil {
log.Printf("[WARN] Failed to read source file %q. Cannot compute md5 hash for it.", filename)
return ""
}
h := md5.New()
h.Write(data)
return base64.StdEncoding.EncodeToString(h.Sum(nil))
}

View File

@ -11,12 +11,13 @@ import (
"github.com/hashicorp/terraform/terraform"
"google.golang.org/api/storage/v1"
"os"
)
var tf, err = ioutil.TempFile("", "tf-gce-test")
var bucketName = "tf-gce-bucket-test"
var objectName = "tf-gce-test"
var content = "now this is content!"
const (
objectName = "tf-gce-test"
content = "now this is content!"
)
func TestAccGoogleStorageObject_basic(t *testing.T) {
t.Parallel()
@ -27,25 +28,62 @@ func TestAccGoogleStorageObject_basic(t *testing.T) {
h.Write(data)
data_md5 := base64.StdEncoding.EncodeToString(h.Sum(nil))
ioutil.WriteFile(tf.Name(), data, 0644)
testFile := getNewTmpTestFile(t, "tf-test")
ioutil.WriteFile(testFile.Name(), data, 0644)
resource.Test(t, resource.TestCase{
PreCheck: func() {
if err != nil {
panic(err)
}
testAccPreCheck(t)
},
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccGoogleStorageObjectDestroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: testGoogleStorageBucketsObjectBasic(bucketName),
Config: testGoogleStorageBucketsObjectBasic(bucketName, testFile.Name()),
Check: testAccCheckGoogleStorageObject(bucketName, objectName, data_md5),
},
},
})
}
func TestAccGoogleStorageObject_recreate(t *testing.T) {
t.Parallel()
bucketName := testBucketName()
writeFile := func(name string, data []byte) string {
h := md5.New()
h.Write(data)
data_md5 := base64.StdEncoding.EncodeToString(h.Sum(nil))
ioutil.WriteFile(name, data, 0644)
return data_md5
}
testFile := getNewTmpTestFile(t, "tf-test")
data_md5 := writeFile(testFile.Name(), []byte("data data data"))
updatedName := testFile.Name() + ".update"
updated_data_md5 := writeFile(updatedName, []byte("datum"))
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccGoogleStorageObjectDestroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: testGoogleStorageBucketsObjectBasic(bucketName, testFile.Name()),
Check: testAccCheckGoogleStorageObject(bucketName, objectName, data_md5),
},
resource.TestStep{
PreConfig: func() {
err := os.Rename(updatedName, testFile.Name())
if err != nil {
t.Errorf("Failed to rename %s to %s", updatedName, testFile.Name())
}
},
Config: testGoogleStorageBucketsObjectBasic(bucketName, testFile.Name()),
Check: testAccCheckGoogleStorageObject(bucketName, objectName, updated_data_md5),
},
},
})
}
func TestAccGoogleStorageObject_content(t *testing.T) {
t.Parallel()
@ -55,14 +93,10 @@ func TestAccGoogleStorageObject_content(t *testing.T) {
h.Write(data)
data_md5 := base64.StdEncoding.EncodeToString(h.Sum(nil))
ioutil.WriteFile(tf.Name(), data, 0644)
testFile := getNewTmpTestFile(t, "tf-test")
ioutil.WriteFile(testFile.Name(), data, 0644)
resource.Test(t, resource.TestCase{
PreCheck: func() {
if err != nil {
panic(err)
}
testAccPreCheck(t)
},
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccGoogleStorageObjectDestroy,
Steps: []resource.TestStep{
@ -88,16 +122,12 @@ func TestAccGoogleStorageObject_withContentCharacteristics(t *testing.T) {
h := md5.New()
h.Write(data)
data_md5 := base64.StdEncoding.EncodeToString(h.Sum(nil))
ioutil.WriteFile(tf.Name(), data, 0644)
testFile := getNewTmpTestFile(t, "tf-test")
ioutil.WriteFile(testFile.Name(), data, 0644)
disposition, encoding, language, content_type := "inline", "compress", "en", "binary/octet-stream"
resource.Test(t, resource.TestCase{
PreCheck: func() {
if err != nil {
panic(err)
}
testAccPreCheck(t)
},
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccGoogleStorageObjectDestroy,
Steps: []resource.TestStep{
@ -128,21 +158,17 @@ func TestAccGoogleStorageObject_cacheControl(t *testing.T) {
h := md5.New()
h.Write(data)
data_md5 := base64.StdEncoding.EncodeToString(h.Sum(nil))
ioutil.WriteFile(tf.Name(), data, 0644)
testFile := getNewTmpTestFile(t, "tf-test")
ioutil.WriteFile(testFile.Name(), data, 0644)
cacheControl := "private"
resource.Test(t, resource.TestCase{
PreCheck: func() {
if err != nil {
panic(err)
}
testAccPreCheck(t)
},
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccGoogleStorageObjectDestroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: testGoogleStorageBucketsObject_cacheControl(bucketName, cacheControl),
Config: testGoogleStorageBucketsObject_cacheControl(bucketName, testFile.Name(), cacheControl),
Check: resource.ComposeTestCheckFunc(
testAccCheckGoogleStorageObject(bucketName, objectName, data_md5),
resource.TestCheckResourceAttr(
@ -161,16 +187,12 @@ func TestAccGoogleStorageObject_storageClass(t *testing.T) {
h := md5.New()
h.Write(data)
data_md5 := base64.StdEncoding.EncodeToString(h.Sum(nil))
ioutil.WriteFile(tf.Name(), data, 0644)
testFile := getNewTmpTestFile(t, "tf-test")
ioutil.WriteFile(testFile.Name(), data, 0644)
storageClass := "MULTI_REGIONAL"
resource.Test(t, resource.TestCase{
PreCheck: func() {
if err != nil {
panic(err)
}
testAccPreCheck(t)
},
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccGoogleStorageObjectDestroy,
Steps: []resource.TestStep{
@ -245,7 +267,7 @@ resource "google_storage_bucket_object" "object" {
`, bucketName, objectName, content)
}
func testGoogleStorageBucketsObjectBasic(bucketName string) string {
func testGoogleStorageBucketsObjectBasic(bucketName, sourceFilename string) string {
return fmt.Sprintf(`
resource "google_storage_bucket" "bucket" {
name = "%s"
@ -256,7 +278,7 @@ resource "google_storage_bucket_object" "object" {
bucket = "${google_storage_bucket.bucket.name}"
source = "%s"
}
`, bucketName, objectName, tf.Name())
`, bucketName, objectName, sourceFilename)
}
func testGoogleStorageBucketsObject_optionalContentFields(
@ -278,7 +300,7 @@ resource "google_storage_bucket_object" "object" {
`, bucketName, objectName, content, disposition, encoding, language, content_type)
}
func testGoogleStorageBucketsObject_cacheControl(bucketName, cacheControl string) string {
func testGoogleStorageBucketsObject_cacheControl(bucketName, sourceFilename, cacheControl string) string {
return fmt.Sprintf(`
resource "google_storage_bucket" "bucket" {
name = "%s"
@ -290,7 +312,7 @@ resource "google_storage_bucket_object" "object" {
source = "%s"
cache_control = "%s"
}
`, bucketName, objectName, tf.Name(), cacheControl)
`, bucketName, objectName, sourceFilename, cacheControl)
}
func testGoogleStorageBucketsObject_storageClass(bucketName string, storageClass string) string {
@ -307,3 +329,13 @@ resource "google_storage_bucket_object" "object" {
}
`, bucketName, objectName, content, storageClass)
}
// Creates a new tmp test file. Fails the current test if we cannot create
// new tmp file in the filesystem.
func getNewTmpTestFile(t *testing.T, prefix string) *os.File {
testFile, err := ioutil.TempFile("", prefix)
if err != nil {
t.Fatalf("Cannot create temp file: %s", err)
}
return testFile
}