Fix golangci-lint errors (#3059)

Signed-off-by: Modular Magician <magic-modules@google.com>
This commit is contained in:
The Magician 2019-02-14 16:00:35 -08:00 committed by Paddy
parent 25cac70b17
commit 23db6b0c12
40 changed files with 181 additions and 91 deletions

View File

@ -1,21 +0,0 @@
{
"Deadline": "10m",
"Enable": [
"gofmt",
"misspell",
"staticcheck",
"structcheck",
"unconvert",
"varcheck",
"vet"
],
"EnableGC": true,
"Linters": {
},
"Sort": [
"path",
"line"
],
"Vendor": true,
"WarnUnmatchedDirective": true
}

View File

@ -66,7 +66,9 @@ func TestSetOmittedFields(t *testing.T) {
},
}
output := &OutputOuter{}
Convert(input, output)
if err := Convert(input, output); err != nil {
t.Errorf("Error converting: %v", err)
}
if input.NotOmitted != output.NotOmitted ||
!reflect.DeepEqual(input.Omitted, output.Omitted) ||
!reflect.DeepEqual(input.Struct, output.Struct) ||

View File

@ -141,6 +141,9 @@ func testAccCheckDataSourceComputeAddressDestroy(resource_name string) resource.
}
addressId, err := parseComputeAddressId(rs.Primary.ID, nil)
if err != nil {
return err
}
_, err = config.clientCompute.Addresses.Get(
config.Project, addressId.Region, addressId.Name).Do()

View File

@ -348,7 +348,9 @@ func SignString(toSign []byte, cfg *jwt.Config) ([]byte, error) {
// Hash string
hasher := sha256.New()
hasher.Write(toSign)
if _, err := hasher.Write(toSign); err != nil {
return nil, errwrap.Wrapf("failed to calculate sha256: {{err}}", err)
}
// Sign string
signed, err := rsa.SignPKCS1v15(rand.Reader, pk, crypto.SHA256, hasher.Sum(nil))

View File

@ -476,7 +476,9 @@ func resourceComposerEnvironmentDelete(d *schema.ResourceData, meta interface{})
func resourceComposerEnvironmentImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) {
config := meta.(*Config)
parseImportId([]string{"projects/(?P<project>[^/]+)/locations/(?P<region>[^/]+)/environments/(?P<name>[^/]+)", "(?P<project>[^/]+)/(?P<region>[^/]+)/(?P<name>[^/]+)", "(?P<name>[^/]+)"}, d, config)
if err := parseImportId([]string{"projects/(?P<project>[^/]+)/locations/(?P<region>[^/]+)/environments/(?P<name>[^/]+)", "(?P<project>[^/]+)/(?P<region>[^/]+)/(?P<name>[^/]+)", "(?P<name>[^/]+)"}, d, config); err != nil {
return nil, err
}
// Replace import id for the resource id
id, err := replaceVars(d, config, "{{project}}/{{region}}/{{name}}")

View File

@ -278,6 +278,9 @@ func resourceComputeBackendServiceCreate(d *schema.ResourceData, meta interface{
if v, ok := d.GetOk("security_policy"); ok {
pol, err := ParseSecurityPolicyFieldValue(v.(string), d, config)
if err != nil {
return errwrap.Wrapf("Error parsing Backend Service security policy: {{err}}", err)
}
op, err := config.clientComputeBeta.BackendServices.SetSecurityPolicy(
project, service.Name, &computeBeta.SecurityPolicyReference{
SecurityPolicy: pol.RelativeLink(),

View File

@ -73,7 +73,7 @@ func TestComputeFirewallMigrateState_empty(t *testing.T) {
// should handle non-nil but empty
is = &terraform.InstanceState{}
is, err = resourceComputeFirewallMigrateState(0, is, meta)
_, err = resourceComputeFirewallMigrateState(0, is, meta)
if err != nil {
t.Fatalf("err: %#v", err)

View File

@ -464,6 +464,9 @@ func resourceComputeHealthCheckUpdate(d *schema.ResourceData, meta interface{})
}
obj, err = resourceComputeHealthCheckEncoder(d, meta, obj)
if err != nil {
return err
}
url, err := replaceVars(d, config, "https://www.googleapis.com/compute/v1/projects/{{project}}/global/healthChecks/{{name}}")
if err != nil {

View File

@ -1024,8 +1024,6 @@ func resourceComputeInstanceUpdate(d *schema.ResourceData, meta interface{}) err
instNetworkInterface := instance.NetworkInterfaces[i]
networkName := d.Get(prefix + ".name").(string)
// TODO: This sanity check is broken by #929, disabled for now (by forcing the equality)
networkName = instNetworkInterface.Name
// Sanity check
if networkName != instNetworkInterface.Name {
return fmt.Errorf("Instance networkInterface had unexpected name: %s", instNetworkInterface.Name)

View File

@ -586,6 +586,9 @@ func resourceComputeInstanceGroupManagerUpdate(d *schema.ResourceData, meta inte
updateStrategy := d.Get("update_strategy").(string)
err = performZoneUpdate(config, name, updateStrategy, project, zone)
if err != nil {
return err
}
d.SetPartial("instance_template")
}
@ -630,9 +633,9 @@ func resourceComputeInstanceGroupManagerDelete(d *schema.ResourceData, meta inte
return err
}
instanceGroup, err := config.clientComputeBeta.InstanceGroups.Get(
instanceGroup, igErr := config.clientComputeBeta.InstanceGroups.Get(
project, zone, name).Do()
if err != nil {
if igErr != nil {
return fmt.Errorf("Error getting instance group size: %s", err)
}

View File

@ -132,7 +132,7 @@ func TestComputeInstanceMigrateState_empty(t *testing.T) {
// should handle non-nil but empty
is = &terraform.InstanceState{}
is, err = resourceComputeInstanceMigrateState(0, is, meta)
_, err = resourceComputeInstanceMigrateState(0, is, meta)
if err != nil {
t.Fatalf("err: %#v", err)
@ -841,7 +841,7 @@ func runInstanceMigrateTest(t *testing.T, id, testName string, version int, attr
ID: id,
Attributes: attributes,
}
is, err := resourceComputeInstanceMigrateState(version, is, meta)
_, err := resourceComputeInstanceMigrateState(version, is, meta)
if err != nil {
t.Fatal(err)
}

View File

@ -129,7 +129,7 @@ func TestComputeInstanceTemplateMigrateState_empty(t *testing.T) {
// should handle non-nil but empty
is = &terraform.InstanceState{}
is, err = resourceComputeInstanceTemplateMigrateState(0, is, meta)
_, err = resourceComputeInstanceTemplateMigrateState(0, is, meta)
if err != nil {
t.Fatalf("err: %#v", err)

View File

@ -571,6 +571,9 @@ func resourceComputeRegionInstanceGroupManagerDelete(d *schema.ResourceData, met
// Wait for the operation to complete
err = computeSharedOperationWaitTime(config.clientCompute, op, regionalID.Project, int(d.Timeout(schema.TimeoutDelete).Minutes()), "Deleting RegionInstanceGroupManager")
if err != nil {
return fmt.Errorf("Error waiting for delete to complete: %s", err)
}
d.SetId("")
return nil

View File

@ -280,6 +280,9 @@ func resourceComputeSslPolicyUpdate(d *schema.ResourceData, meta interface{}) er
}
obj, err = resourceComputeSslPolicyUpdateEncoder(d, meta, obj)
if err != nil {
return err
}
url, err := replaceVars(d, config, "https://www.googleapis.com/compute/v1/projects/{{project}}/global/sslPolicies/{{name}}")
if err != nil {

View File

@ -67,7 +67,7 @@ func TestContainerClusterMigrateState_empty(t *testing.T) {
// should handle non-nil but empty
is = &terraform.InstanceState{}
is, err = resourceContainerClusterMigrateState(0, is, meta)
_, err = resourceContainerClusterMigrateState(0, is, meta)
if err != nil {
t.Fatalf("err: %#v", err)

View File

@ -1,8 +1,9 @@
package google
import (
"github.com/hashicorp/terraform/terraform"
"testing"
"github.com/hashicorp/terraform/terraform"
)
func TestContainerNodePoolMigrateState(t *testing.T) {
@ -57,7 +58,7 @@ func TestContainerNodePoolMigrateState_empty(t *testing.T) {
// should handle non-nil but empty
is = &terraform.InstanceState{}
is, err = resourceContainerNodePoolMigrateState(0, is, meta)
_, err = resourceContainerNodePoolMigrateState(0, is, meta)
if err != nil {
t.Fatalf("err: %#v", err)

View File

@ -323,8 +323,10 @@ func resourceDataprocJobDelete(d *schema.ResourceData, meta interface{}) error {
if forceDelete {
log.Printf("[DEBUG] Attempting to first cancel Dataproc job %s if it's still running ...", d.Id())
config.clientDataproc.Projects.Regions.Jobs.Cancel(
project, region, d.Id(), &dataproc.CancelJobRequest{}).Do()
if _, err := config.clientDataproc.Projects.Regions.Jobs.Cancel(
project, region, d.Id(), &dataproc.CancelJobRequest{}).Do(); err != nil {
return fmt.Errorf("Error canceling job: %v", err)
}
// ignore error if we get one - job may be finished already and not need to
// be cancelled. We do however wait for the state to be one that is
// at least not active

View File

@ -4,6 +4,7 @@ import (
"encoding/base64"
"encoding/json"
"errors"
"github.com/hashicorp/terraform/helper/schema"
"google.golang.org/api/servicemanagement/v1"
)
@ -228,7 +229,9 @@ func resourceEndpointsServiceUpdate(d *schema.ResourceData, meta interface{}) er
return err
}
var serviceConfig servicemanagement.SubmitConfigSourceResponse
json.Unmarshal(s, &serviceConfig)
if err := json.Unmarshal(s, &serviceConfig); err != nil {
return err
}
// Next, we create a new rollout with the new config value, and wait for it to complete.
rolloutService := servicemanagement.NewServicesRolloutsService(config.clientServiceMan)

View File

@ -62,7 +62,7 @@ func TestGoogleProjectMigrateState_empty(t *testing.T) {
// should handle non-nil but empty
is = &terraform.InstanceState{}
is, err = resourceGoogleProjectMigrateState(0, is, meta)
_, err = resourceGoogleProjectMigrateState(0, is, meta)
if err != nil {
t.Fatalf("err: %#v", err)

View File

@ -130,7 +130,9 @@ func resourceGoogleProjectServicesDelete(d *schema.ResourceData, meta interface{
config := meta.(*Config)
services := resourceServices(d)
for _, s := range services {
disableService(s, d.Id(), config, true)
if err := disableService(s, d.Id(), config, true); err != nil {
return err
}
}
d.SetId("")
return nil

View File

@ -44,7 +44,9 @@ func TestAccProjectServices_basic(t *testing.T) {
{
PreConfig: func() {
config := testAccProvider.Meta().(*Config)
enableService(oobService, pid, config)
if err := enableService(oobService, pid, config); err != nil {
t.Fatalf("Error enabling %q: %v", oobService, err)
}
},
Config: testAccProjectAssociateServicesBasic(services2, pid, pname, org),
Check: resource.ComposeTestCheckFunc(
@ -87,7 +89,9 @@ func TestAccProjectServices_authoritative(t *testing.T) {
{
PreConfig: func() {
config := testAccProvider.Meta().(*Config)
enableService(oobService, pid, config)
if err := enableService(oobService, pid, config); err != nil {
t.Fatalf("Error enabling %q: %v", oobService, err)
}
},
Config: testAccProjectAssociateServicesBasic(services, pid, pname, org),
Check: resource.ComposeTestCheckFunc(
@ -126,7 +130,9 @@ func TestAccProjectServices_authoritative2(t *testing.T) {
PreConfig: func() {
config := testAccProvider.Meta().(*Config)
for _, s := range oobServices {
enableService(s, pid, config)
if err := enableService(s, pid, config); err != nil {
t.Fatalf("Error enabling %q: %v", s, err)
}
}
},
Config: testAccProjectAssociateServicesBasic(services, pid, pname, org),

View File

@ -134,10 +134,12 @@ func resourceGoogleServiceAccountUpdate(d *schema.ResourceData, meta interface{}
func resourceGoogleServiceAccountImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) {
config := meta.(*Config)
parseImportId([]string{
if err := parseImportId([]string{
"projects/(?P<project>[^/]+)/serviceAccounts/(?P<email>[^/]+)",
"(?P<project>[^/]+)/(?P<email>[^/]+)",
"(?P<email>[^/]+)"}, d, config)
"(?P<email>[^/]+)"}, d, config); err != nil {
return nil, err
}
// Replace import id for the resource id
id, err := replaceVars(d, config, "projects/{{project}}/serviceAccounts/{{email}}")

View File

@ -541,7 +541,9 @@ func resourceMonitoringAlertPolicyImport(d *schema.ResourceData, meta interface{
config := meta.(*Config)
// current import_formats can't import id's with forward slashes in them.
parseImportId([]string{"(?P<name>.+)"}, d, config)
if err := parseImportId([]string{"(?P<name>.+)"}, d, config); err != nil {
return nil, err
}
return []*schema.ResourceData{d}, nil
}

View File

@ -261,7 +261,9 @@ func resourceMonitoringGroupImport(d *schema.ResourceData, meta interface{}) ([]
config := meta.(*Config)
// current import_formats can't import id's with forward slashes in them.
parseImportId([]string{"(?P<name>.+)"}, d, config)
if err := parseImportId([]string{"(?P<name>.+)"}, d, config); err != nil {
return nil, err
}
return []*schema.ResourceData{d}, nil
}

View File

@ -308,7 +308,9 @@ func resourceMonitoringNotificationChannelImport(d *schema.ResourceData, meta in
config := meta.(*Config)
// current import_formats can't import id's with forward slashes in them.
parseImportId([]string{"(?P<name>.+)"}, d, config)
if err := parseImportId([]string{"(?P<name>.+)"}, d, config); err != nil {
return nil, err
}
return []*schema.ResourceData{d}, nil
}

View File

@ -542,7 +542,9 @@ func resourceMonitoringUptimeCheckConfigImport(d *schema.ResourceData, meta inte
config := meta.(*Config)
// current import_formats can't import id's with forward slashes in them.
parseImportId([]string{"(?P<name>.+)"}, d, config)
if err := parseImportId([]string{"(?P<name>.+)"}, d, config); err != nil {
return nil, err
}
return []*schema.ResourceData{d}, nil
}

View File

@ -214,6 +214,10 @@ func resourceResourceManagerLienDelete(d *schema.ResourceData, meta interface{})
}
var obj map[string]interface{}
// log the old URL to make the ineffassign linter happy
// in theory, we should find a way to disable the default URL and not construct
// both, but that's a problem for another day. Today, we cheat.
log.Printf("[DEBUG] replacing URL %q with a custom delete URL", url)
url, err = replaceVars(d, config, "https://cloudresourcemanager.googleapis.com/v1/liens/{{name}}")
if err != nil {
return err

View File

@ -5,7 +5,7 @@ import (
"regexp"
"github.com/hashicorp/terraform/helper/schema"
"google.golang.org/api/runtimeconfig/v1beta1"
runtimeconfig "google.golang.org/api/runtimeconfig/v1beta1"
)
var runtimeConfigFullName *regexp.Regexp = regexp.MustCompile("^projects/([^/]+)/configs/(.+)$")
@ -137,7 +137,9 @@ func resourceRuntimeconfigConfigDelete(d *schema.ResourceData, meta interface{})
func resourceRuntimeconfigConfigImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) {
config := meta.(*Config)
parseImportId([]string{"projects/(?P<project>[^/]+)/configs/(?P<name>[^/]+)", "(?P<name>[^/]+)"}, d, config)
if err := parseImportId([]string{"projects/(?P<project>[^/]+)/configs/(?P<name>[^/]+)", "(?P<name>[^/]+)"}, d, config); err != nil {
return nil, err
}
// Replace import id for the resource id
id, err := replaceVars(d, config, "projects/{{project}}/configs/{{name}}")

View File

@ -2,9 +2,10 @@ package google
import (
"fmt"
"github.com/hashicorp/terraform/helper/schema"
"google.golang.org/api/runtimeconfig/v1beta1"
"regexp"
"github.com/hashicorp/terraform/helper/schema"
runtimeconfig "google.golang.org/api/runtimeconfig/v1beta1"
)
func resourceRuntimeconfigVariable() *schema.Resource {
@ -130,7 +131,9 @@ func resourceRuntimeconfigVariableDelete(d *schema.ResourceData, meta interface{
func resourceRuntimeconfigVariableImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) {
config := meta.(*Config)
parseImportId([]string{"projects/(?P<project>[^/]+)/configs/(?P<parent>[^/]+)/variables/(?P<name>[^/]+)", "(?P<parent>[^/]+)/(?P<name>[^/]+)"}, d, config)
if err := parseImportId([]string{"projects/(?P<project>[^/]+)/configs/(?P<parent>[^/]+)/variables/(?P<name>[^/]+)", "(?P<parent>[^/]+)/(?P<name>[^/]+)"}, d, config); err != nil {
return nil, err
}
// Replace import id for the resource id
id, err := replaceVars(d, config, "projects/{{project}}/configs/{{parent}}/variables/{{name}}")

View File

@ -252,6 +252,9 @@ func resourceSpannerInstanceUpdate(d *schema.ResourceData, meta interface{}) err
}
obj, err = resourceSpannerInstanceUpdateEncoder(d, meta, obj)
if err != nil {
return err
}
url, err := replaceVars(d, config, "https://spanner.googleapis.com/v1/projects/{{project}}/instances/{{name}}")
if err != nil {

View File

@ -229,13 +229,15 @@ func resourceSqlDatabaseDelete(d *schema.ResourceData, meta interface{}) error {
func resourceSqlDatabaseImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) {
config := meta.(*Config)
parseImportId([]string{
if err := parseImportId([]string{
"projects/(?P<project>[^/]+)/instances/(?P<instance>[^/]+)/databases/(?P<name>[^/]+)",
"instances/(?P<instance>[^/]+)/databases/(?P<name>[^/]+)",
"(?P<project>[^/]+)/(?P<instance>[^/]+)/(?P<name>[^/]+)",
"(?P<instance>[^/]+)/(?P<name>[^/]+)",
"(?P<instance>[^/]+):(?P<name>[^/]+)",
}, d, config)
}, d, config); err != nil {
return nil, err
}
// Replace import id for the resource id
id, err := replaceVars(d, config, "{{instance}}:{{name}}")

View File

@ -801,10 +801,12 @@ func resourceSqlDatabaseInstanceDelete(d *schema.ResourceData, meta interface{})
func resourceSqlDatabaseInstanceImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) {
config := meta.(*Config)
parseImportId([]string{
if err := parseImportId([]string{
"projects/(?P<project>[^/]+)/instances/(?P<name>[^/]+)",
"(?P<project>[^/]+)/(?P<name>[^/]+)",
"(?P<name>[^/]+)"}, d, config)
"(?P<name>[^/]+)"}, d, config); err != nil {
return nil, err
}
// Replace import id for the resource id
id, err := replaceVars(d, config, "{{name}}")

View File

@ -73,7 +73,7 @@ func TestSqlUserMigrateState_empty(t *testing.T) {
// should handle non-nil but empty
is = &terraform.InstanceState{}
is, err = resourceSqlUserMigrateState(0, is, meta)
_, err = resourceSqlUserMigrateState(0, is, meta)
if err != nil {
t.Fatalf("err: %#v", err)

View File

@ -122,7 +122,7 @@ func resourceStorageBucketAclCreate(d *schema.ResourceData, meta interface{}) er
return fmt.Errorf("Error reading bucket %s: %v", bucket, err)
}
res, err = config.clientStorage.Buckets.Update(bucket,
_, err = config.clientStorage.Buckets.Update(bucket,
res).PredefinedAcl(predefined_acl).Do()
if err != nil {
@ -175,7 +175,7 @@ func resourceStorageBucketAclCreate(d *schema.ResourceData, meta interface{}) er
return fmt.Errorf("Error reading bucket %s: %v", bucket, err)
}
res, err = config.clientStorage.Buckets.Update(bucket,
_, err = config.clientStorage.Buckets.Update(bucket,
res).PredefinedDefaultObjectAcl(default_acl).Do()
if err != nil {
@ -297,7 +297,7 @@ func resourceStorageBucketAclUpdate(d *schema.ResourceData, meta interface{}) er
return fmt.Errorf("Error reading bucket %s: %v", bucket, err)
}
res, err = config.clientStorage.Buckets.Update(bucket,
_, err = config.clientStorage.Buckets.Update(bucket,
res).PredefinedDefaultObjectAcl(default_acl).Do()
if err != nil {

View File

@ -171,7 +171,7 @@ func resourceStorageBucketObjectCreate(d *schema.ResourceData, meta interface{})
var media io.Reader
if v, ok := d.GetOk("source"); ok {
err := error(nil)
var err error
media, err = os.Open(v.(string))
if err != nil {
return err
@ -292,6 +292,8 @@ func getFileMd5Hash(filename string) string {
func getContentMd5Hash(content []byte) string {
h := md5.New()
h.Write(content)
if _, err := h.Write(content); err != nil {
log.Printf("[WARN] Failed to compute md5 hash for content: %v", err)
}
return base64.StdEncoding.EncodeToString(h.Sum(nil))
}

View File

@ -26,11 +26,15 @@ func TestAccStorageObject_basic(t *testing.T) {
bucketName := testBucketName()
data := []byte("data data data")
h := md5.New()
h.Write(data)
if _, err := h.Write(data); err != nil {
t.Errorf("error calculating md5: %v", err)
}
data_md5 := base64.StdEncoding.EncodeToString(h.Sum(nil))
testFile := getNewTmpTestFile(t, "tf-test")
ioutil.WriteFile(testFile.Name(), data, 0644)
if err := ioutil.WriteFile(testFile.Name(), data, 0644); err != nil {
t.Errorf("error writing file: %v", err)
}
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
@ -51,10 +55,14 @@ func TestAccStorageObject_recreate(t *testing.T) {
writeFile := func(name string, data []byte) string {
h := md5.New()
h.Write(data)
if _, err := h.Write(data); err != nil {
t.Errorf("error calculating md5: %v", err)
}
data_md5 := base64.StdEncoding.EncodeToString(h.Sum(nil))
ioutil.WriteFile(name, data, 0644)
if err := ioutil.WriteFile(name, data, 0644); err != nil {
t.Errorf("error writing file: %v", err)
}
return data_md5
}
testFile := getNewTmpTestFile(t, "tf-test")
@ -91,11 +99,15 @@ func TestAccStorageObject_content(t *testing.T) {
bucketName := testBucketName()
data := []byte(content)
h := md5.New()
h.Write(data)
if _, err := h.Write(data); err != nil {
t.Errorf("error calculating md5: %v", err)
}
data_md5 := base64.StdEncoding.EncodeToString(h.Sum(nil))
testFile := getNewTmpTestFile(t, "tf-test")
ioutil.WriteFile(testFile.Name(), data, 0644)
if err := ioutil.WriteFile(testFile.Name(), data, 0644); err != nil {
t.Errorf("error writing file: %v", err)
}
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
@ -121,10 +133,14 @@ func TestAccStorageObject_withContentCharacteristics(t *testing.T) {
bucketName := testBucketName()
data := []byte(content)
h := md5.New()
h.Write(data)
if _, err := h.Write(data); err != nil {
t.Errorf("error calculating md5: %v", err)
}
data_md5 := base64.StdEncoding.EncodeToString(h.Sum(nil))
testFile := getNewTmpTestFile(t, "tf-test")
ioutil.WriteFile(testFile.Name(), data, 0644)
if err := ioutil.WriteFile(testFile.Name(), data, 0644); err != nil {
t.Errorf("error writing file: %v", err)
}
disposition, encoding, language, content_type := "inline", "compress", "en", "binary/octet-stream"
resource.Test(t, resource.TestCase{
@ -178,10 +194,14 @@ func TestAccStorageObject_cacheControl(t *testing.T) {
bucketName := testBucketName()
data := []byte(content)
h := md5.New()
h.Write(data)
if _, err := h.Write(data); err != nil {
t.Errorf("error calculating md5: %v", err)
}
data_md5 := base64.StdEncoding.EncodeToString(h.Sum(nil))
testFile := getNewTmpTestFile(t, "tf-test")
ioutil.WriteFile(testFile.Name(), data, 0644)
if err := ioutil.WriteFile(testFile.Name(), data, 0644); err != nil {
t.Errorf("error writing file: %v", err)
}
cacheControl := "private"
resource.Test(t, resource.TestCase{
@ -207,10 +227,14 @@ func TestAccStorageObject_storageClass(t *testing.T) {
bucketName := testBucketName()
data := []byte(content)
h := md5.New()
h.Write(data)
if _, err := h.Write(data); err != nil {
t.Errorf("error calculating md5: %v", err)
}
data_md5 := base64.StdEncoding.EncodeToString(h.Sum(nil))
testFile := getNewTmpTestFile(t, "tf-test")
ioutil.WriteFile(testFile.Name(), data, 0644)
if err := ioutil.WriteFile(testFile.Name(), data, 0644); err != nil {
t.Errorf("error writing file: %v", err)
}
storageClass := "MULTI_REGIONAL"
resource.Test(t, resource.TestCase{

View File

@ -14,7 +14,9 @@ func TestAccStorageObjectAccessControl_update(t *testing.T) {
bucketName := testBucketName()
objectName := testAclObjectName()
objectData := []byte("data data data")
ioutil.WriteFile(tfObjectAcl.Name(), objectData, 0644)
if err := ioutil.WriteFile(tfObjectAcl.Name(), objectData, 0644); err != nil {
t.Errorf("error writing file: %v", err)
}
resource.Test(t, resource.TestCase{
PreCheck: func() {
if errObjectAcl != nil {

View File

@ -2,10 +2,11 @@ package google
import (
"fmt"
"strings"
"github.com/hashicorp/terraform/helper/schema"
"github.com/hashicorp/terraform/helper/validation"
"google.golang.org/api/storage/v1"
"strings"
)
func resourceStorageObjectAcl() *schema.Resource {
@ -110,7 +111,7 @@ func resourceStorageObjectAclCreate(d *schema.ResourceData, meta interface{}) er
return fmt.Errorf("Error reading object %s in %s: %v", object, bucket, err)
}
res, err = config.clientStorage.Objects.Update(bucket, object, res).PredefinedAcl(predefinedAcl.(string)).Do()
_, err = config.clientStorage.Objects.Update(bucket, object, res).PredefinedAcl(predefinedAcl.(string)).Do()
if err != nil {
return fmt.Errorf("Error updating object %s in %s: %v", object, bucket, err)
}
@ -176,7 +177,7 @@ func resourceStorageObjectAclUpdate(d *schema.ResourceData, meta interface{}) er
return fmt.Errorf("Error reading object %s in %s: %v", object, bucket, err)
}
res, err = config.clientStorage.Objects.Update(bucket, object, res).PredefinedAcl(d.Get("predefined_acl").(string)).Do()
_, err = config.clientStorage.Objects.Update(bucket, object, res).PredefinedAcl(d.Get("predefined_acl").(string)).Do()
if err != nil {
return fmt.Errorf("Error updating object %s in %s: %v", object, bucket, err)
}
@ -219,7 +220,7 @@ func resourceStorageObjectAclDelete(d *schema.ResourceData, meta interface{}) er
return fmt.Errorf("Error reading object %s in %s: %v", object, bucket, err)
}
res, err = config.clientStorage.Objects.Update(bucket, object, res).PredefinedAcl("private").Do()
_, err = config.clientStorage.Objects.Update(bucket, object, res).PredefinedAcl("private").Do()
if err != nil {
return fmt.Errorf("Error updating object %s in %s: %v", object, bucket, err)
}

View File

@ -24,7 +24,9 @@ func TestAccStorageObjectAcl_basic(t *testing.T) {
bucketName := testBucketName()
objectName := testAclObjectName()
objectData := []byte("data data data")
ioutil.WriteFile(tfObjectAcl.Name(), objectData, 0644)
if err := ioutil.WriteFile(tfObjectAcl.Name(), objectData, 0644); err != nil {
t.Errorf("error writing file: %v", err)
}
resource.Test(t, resource.TestCase{
PreCheck: func() {
if errObjectAcl != nil {
@ -54,7 +56,9 @@ func TestAccStorageObjectAcl_upgrade(t *testing.T) {
bucketName := testBucketName()
objectName := testAclObjectName()
objectData := []byte("data data data")
ioutil.WriteFile(tfObjectAcl.Name(), objectData, 0644)
if err := ioutil.WriteFile(tfObjectAcl.Name(), objectData, 0644); err != nil {
t.Errorf("error writing file: %v", err)
}
resource.Test(t, resource.TestCase{
PreCheck: func() {
if errObjectAcl != nil {
@ -106,7 +110,9 @@ func TestAccStorageObjectAcl_downgrade(t *testing.T) {
bucketName := testBucketName()
objectName := testAclObjectName()
objectData := []byte("data data data")
ioutil.WriteFile(tfObjectAcl.Name(), objectData, 0644)
if err := ioutil.WriteFile(tfObjectAcl.Name(), objectData, 0644); err != nil {
t.Errorf("error writing file: %v", err)
}
resource.Test(t, resource.TestCase{
PreCheck: func() {
if errObjectAcl != nil {
@ -158,7 +164,9 @@ func TestAccStorageObjectAcl_predefined(t *testing.T) {
bucketName := testBucketName()
objectName := testAclObjectName()
objectData := []byte("data data data")
ioutil.WriteFile(tfObjectAcl.Name(), objectData, 0644)
if err := ioutil.WriteFile(tfObjectAcl.Name(), objectData, 0644); err != nil {
t.Errorf("error writing file: %v", err)
}
resource.Test(t, resource.TestCase{
PreCheck: func() {
if errObjectAcl != nil {
@ -182,7 +190,9 @@ func TestAccStorageObjectAcl_predefinedToExplicit(t *testing.T) {
bucketName := testBucketName()
objectName := testAclObjectName()
objectData := []byte("data data data")
ioutil.WriteFile(tfObjectAcl.Name(), objectData, 0644)
if err := ioutil.WriteFile(tfObjectAcl.Name(), objectData, 0644); err != nil {
t.Errorf("error writing file: %v", err)
}
resource.Test(t, resource.TestCase{
PreCheck: func() {
if errObjectAcl != nil {
@ -215,7 +225,9 @@ func TestAccStorageObjectAcl_explicitToPredefined(t *testing.T) {
bucketName := testBucketName()
objectName := testAclObjectName()
objectData := []byte("data data data")
ioutil.WriteFile(tfObjectAcl.Name(), objectData, 0644)
if err := ioutil.WriteFile(tfObjectAcl.Name(), objectData, 0644); err != nil {
t.Errorf("error writing file: %v", err)
}
resource.Test(t, resource.TestCase{
PreCheck: func() {
if errObjectAcl != nil {
@ -249,7 +261,9 @@ func TestAccStorageObjectAcl_unordered(t *testing.T) {
bucketName := testBucketName()
objectName := testAclObjectName()
objectData := []byte("data data data")
ioutil.WriteFile(tfObjectAcl.Name(), objectData, 0644)
if err := ioutil.WriteFile(tfObjectAcl.Name(), objectData, 0644); err != nil {
t.Errorf("error writing file: %v", err)
}
resource.Test(t, resource.TestCase{
PreCheck: func() {
if errObjectAcl != nil {

View File

@ -488,7 +488,9 @@ func TestRetryTimeDuration(t *testing.T) {
Code: 500,
}
}
retryTimeDuration(f, time.Duration(1000)*time.Millisecond)
if err := retryTimeDuration(f, time.Duration(1000)*time.Millisecond); err == nil || err.(*googleapi.Error).Code != 500 {
t.Errorf("unexpected error retrying: %v", err)
}
if i < 2 {
t.Errorf("expected error function to be called at least twice, but was called %d times", i)
}
@ -503,7 +505,9 @@ func TestRetryTimeDuration_wrapped(t *testing.T) {
}
return errwrap.Wrapf("nested error: {{err}}", err)
}
retryTimeDuration(f, time.Duration(1000)*time.Millisecond)
if err := retryTimeDuration(f, time.Duration(1000)*time.Millisecond); err == nil || err.(*googleapi.Error).Code != 500 {
t.Errorf("unexpected error retrying: %v", err)
}
if i < 2 {
t.Errorf("expected error function to be called at least twice, but was called %d times", i)
}
@ -517,7 +521,9 @@ func TestRetryTimeDuration_noretry(t *testing.T) {
Code: 400,
}
}
retryTimeDuration(f, time.Duration(1000)*time.Millisecond)
if err := retryTimeDuration(f, time.Duration(1000)*time.Millisecond); err == nil || err.(*googleapi.Error).Code != 400 {
t.Errorf("unexpected error retrying: %v", err)
}
if i != 1 {
t.Errorf("expected error function to be called exactly once, but was called %d times", i)
}