Refactor storage bucket import logic (#3244)

<!-- This change is generated by MagicModules. -->
/cc @chrisst
This commit is contained in:
The Magician 2019-03-15 11:22:22 -07:00 committed by Chris Stephens
parent c00fe3c4b7
commit d9510464a3
3 changed files with 37 additions and 14 deletions

View File

@ -9,6 +9,7 @@ import (
"github.com/hashicorp/terraform/helper/acctest"
"github.com/hashicorp/terraform/helper/resource"
"github.com/hashicorp/terraform/terraform"
sqladmin "google.golang.org/api/sqladmin/v1beta4"
)

View File

@ -457,22 +457,25 @@ func resourceStorageBucketRead(d *schema.ResourceData, meta interface{}) error {
// Get the bucket and acl
bucket := d.Get("name").(string)
res, err := config.clientStorage.Buckets.Get(bucket).Do()
if err != nil {
return handleNotFoundError(err, d, fmt.Sprintf("Storage Bucket %q", d.Get("name").(string)))
}
log.Printf("[DEBUG] Read bucket %v at location %v\n\n", res.Name, res.SelfLink)
// We need to get the project associated with this bucket because otherwise import
// won't work properly. That means we need to call the projects.get API with the
// project number, to get the project ID - there's no project ID field in the
// resource response. However, this requires a call to the Compute API, which
// would otherwise not be required for this resource. So, we're going to
// intentionally check whether the project is set *on the resource*. If it is,
// we will not try to fetch the project name. If it is not, either because
// the user intends to use the default provider project, or because the resource
// is currently being imported, we will read it from the API.
if _, ok := d.GetOk("project"); !ok {
// We are trying to support several different use cases for bucket. Buckets are globally
// unique but they are associated with projects internally, but some users want to use
// buckets in a project agnostic way. Thus we will check to see if the project ID has been
// explicitly set and use that first. However if no project is explicitly set, such as during
// import, we will look up the ID from the compute API using the project Number from the
// bucket API response.
// If you are working in a project-agnostic way and have not set the project ID in the provider
// block, or the resource or an environment variable, we use the compute API to lookup the projectID
// from the projectNumber which is included in the bucket API response
if d.Get("project") == "" {
project, _ := getProject(d, config)
d.Set("project", project)
}
if d.Get("project") == "" {
proj, err := config.clientCompute.Projects.Get(strconv.FormatUint(res.ProjectNumber, 10)).Do()
if err != nil {
return err
@ -585,7 +588,18 @@ func resourceStorageBucketDelete(d *schema.ResourceData, meta interface{}) error
}
func resourceStorageBucketStateImporter(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) {
d.Set("name", d.Id())
// We need to support project/bucket_name and bucket_name formats. This will allow
// importing a bucket that is in a different project than the provider default.
// ParseImportID can't be used because having no project will cause an error but it
// is a valid state as the project_id will be retrieved in READ
parts := strings.Split(d.Id(), "/")
if len(parts) == 1 {
d.Set("name", parts[0])
} else if len(parts) > 1 {
d.Set("project", parts[0])
d.Set("name", parts[1])
}
d.Set("force_destroy", false)
return []*schema.ResourceData{d}, nil
}

View File

@ -18,7 +18,9 @@ For more information see
and
[API](https://cloud.google.com/storage/docs/json_api/v1/buckets).
**Note**: When importing a bucket or using only the default provider project for bucket creation, you will need to enable the Compute API and will otherwise get an error with a link to the API enablement page. If you would prefer not to enable the Compute API, make sure to explicitly set `project` on the bucket resource.
**Note**: If the project id is not set on the resource or in the provider block it will be dynamically
determined which will require enabling the compute api.
## Example Usage
@ -141,9 +143,15 @@ exported:
## Import
Storage buckets can be imported using the `name`, e.g.
Storage buckets can be imported using the `name` or `project/name`. If the project is not
passed to the import command it will be inferred from the provider block or environment variables.
If it cannot be inferred it will be queried from the Compute API (this will fail if the API is
not enabled).
e.g.
```
$ terraform import google_storage_bucket.image-store image-store-bucket
$ terraform import google_storage_bucket.image-store tf-test-project/image-store-bucket
```