Merge pull request #2158 from rileykarson/cloud-functions-event-trigger

Add event_trigger to cloud functions
This commit is contained in:
Paddy 2018-10-03 18:55:53 -07:00 committed by GitHub
commit 3294d7e1f4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 361 additions and 38 deletions

View File

@ -78,10 +78,11 @@ func joinMapKeys(mapToJoin *map[int]bool) string {
func resourceCloudFunctionsFunction() *schema.Resource {
return &schema.Resource{
Create: resourceCloudFunctionsCreate,
Read: resourceCloudFunctionsRead,
Update: resourceCloudFunctionsUpdate,
Delete: resourceCloudFunctionsDestroy,
Create: resourceCloudFunctionsCreate,
Read: resourceCloudFunctionsRead,
Update: resourceCloudFunctionsUpdate,
Delete: resourceCloudFunctionsDestroy,
CustomizeDiff: resourceCloudFunctionsCustomizeDiff,
Importer: &schema.ResourceImporter{
State: schema.ImportStatePassthrough,
@ -177,7 +178,8 @@ func resourceCloudFunctionsFunction() *schema.Resource {
"trigger_bucket": {
Type: schema.TypeString,
Optional: true,
ForceNew: true,
Computed: true,
Deprecated: "This field is deprecated. Use `event_trigger` instead.",
ConflictsWith: []string{"trigger_http", "trigger_topic"},
},
@ -191,10 +193,46 @@ func resourceCloudFunctionsFunction() *schema.Resource {
"trigger_topic": {
Type: schema.TypeString,
Optional: true,
ForceNew: true,
Computed: true,
Deprecated: "This field is deprecated. Use `event_trigger` instead.",
ConflictsWith: []string{"trigger_http", "trigger_bucket"},
},
"event_trigger": {
Type: schema.TypeList,
Optional: true,
Computed: true,
ConflictsWith: []string{"trigger_http", "retry_on_failure", "trigger_topic", "trigger_http"},
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"event_type": {
Type: schema.TypeString,
ForceNew: true,
Required: true,
},
"resource": {
Type: schema.TypeString,
Required: true,
},
"failure_policy": {
Type: schema.TypeList,
Optional: true,
Computed: true,
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"retry": {
Type: schema.TypeBool,
// not strictly required, but this way an empty block can't be specified
Required: true,
},
}},
},
},
},
},
"https_trigger_url": {
Type: schema.TypeString,
Optional: true,
@ -204,6 +242,8 @@ func resourceCloudFunctionsFunction() *schema.Resource {
"retry_on_failure": {
Type: schema.TypeBool,
Optional: true,
Computed: true,
Deprecated: "This field is deprecated. Use `event_trigger.failure_policy.retry` instead.",
ConflictsWith: []string{"trigger_http"},
},
@ -225,6 +265,28 @@ func resourceCloudFunctionsFunction() *schema.Resource {
}
}
func resourceCloudFunctionsCustomizeDiff(diff *schema.ResourceDiff, meta interface{}) error {
if diff.HasChange("trigger_topic") {
_, n := diff.GetChange("trigger_topic")
if n == "" {
diff.Clear("trigger_topic")
} else {
diff.ForceNew("trigger_topic")
}
}
if diff.HasChange("trigger_bucket") {
_, n := diff.GetChange("trigger_bucket")
if n == "" {
diff.Clear("trigger_bucket")
} else {
diff.ForceNew("trigger_bucket")
}
}
return nil
}
func resourceCloudFunctionsCreate(d *schema.ResourceData, meta interface{}) error {
config := meta.(*Config)
@ -253,7 +315,8 @@ func resourceCloudFunctionsCreate(d *schema.ResourceData, meta interface{}) erro
}
function := &cloudfunctions.CloudFunction{
Name: cloudFuncId.cloudFunctionId(),
Name: cloudFuncId.cloudFunctionId(),
ForceSendFields: []string{},
}
sourceArchiveBucket := d.Get("source_archive_bucket").(string)
@ -277,13 +340,11 @@ func resourceCloudFunctionsCreate(d *schema.ResourceData, meta interface{}) erro
function.Timeout = fmt.Sprintf("%vs", v.(int))
}
v, triggHttpOk := d.GetOk("trigger_http")
if triggHttpOk && v.(bool) {
if v, ok := d.GetOk("event_trigger"); ok {
function.EventTrigger = expandEventTrigger(v.([]interface{}), project)
} else if v, ok := d.GetOk("trigger_http"); ok && v.(bool) {
function.HttpsTrigger = &cloudfunctions.HttpsTrigger{}
}
v, triggTopicOk := d.GetOk("trigger_topic")
if triggTopicOk {
} else if v, ok := d.GetOk("trigger_topic"); ok {
// Make PubSub event publish as in https://cloud.google.com/functions/docs/calling/pubsub
function.EventTrigger = &cloudfunctions.EventTrigger{
// Other events are not supported
@ -297,10 +358,7 @@ func resourceCloudFunctionsCreate(d *schema.ResourceData, meta interface{}) erro
Retry: &cloudfunctions.Retry{},
}
}
}
v, triggBucketOk := d.GetOk("trigger_bucket")
if triggBucketOk {
} else if v, ok := d.GetOk("trigger_bucket"); ok {
// Make Storage event as in https://cloud.google.com/functions/docs/calling/storage
function.EventTrigger = &cloudfunctions.EventTrigger{
EventType: "providers/cloud.storage/eventTypes/object.change",
@ -313,10 +371,8 @@ func resourceCloudFunctionsCreate(d *schema.ResourceData, meta interface{}) erro
Retry: &cloudfunctions.Retry{},
}
}
}
if !triggHttpOk && !triggTopicOk && !triggBucketOk {
return fmt.Errorf("One of arguments [trigger_topic, trigger_bucket, trigger_http] is required: " +
} else {
return fmt.Errorf("One of `event_trigger` or `trigger_http` is required: " +
"You must specify a trigger when deploying a new function.")
}
@ -389,16 +445,24 @@ func resourceCloudFunctionsRead(d *schema.ResourceData, meta interface{}) error
d.Set("https_trigger_url", function.HttpsTrigger.Url)
}
d.Set("event_trigger", flattenEventTrigger(function.EventTrigger))
if function.EventTrigger != nil {
switch function.EventTrigger.EventType {
// From https://github.com/google/google-api-go-client/blob/master/cloudfunctions/v1/cloudfunctions-gen.go#L335
case "google.pubsub.topic.publish":
d.Set("trigger_topic", GetResourceNameFromSelfLink(function.EventTrigger.Resource))
if _, ok := d.GetOk("trigger_topic"); ok {
d.Set("trigger_topic", GetResourceNameFromSelfLink(function.EventTrigger.Resource))
}
case "providers/cloud.storage/eventTypes/object.change":
d.Set("trigger_bucket", GetResourceNameFromSelfLink(function.EventTrigger.Resource))
if _, ok := d.GetOk("trigger_bucket"); ok {
d.Set("trigger_bucket", GetResourceNameFromSelfLink(function.EventTrigger.Resource))
}
}
if _, ok := d.GetOk("retry_on_failure"); ok {
retry := function.EventTrigger.FailurePolicy != nil && function.EventTrigger.FailurePolicy.Retry != nil
d.Set("retry_on_failure", retry)
}
retry := function.EventTrigger.FailurePolicy != nil && function.EventTrigger.FailurePolicy.Retry != nil
d.Set("retry_on_failure", retry)
}
d.Set("region", cloudFuncId.Region)
d.Set("project", cloudFuncId.Project)
@ -410,6 +474,11 @@ func resourceCloudFunctionsUpdate(d *schema.ResourceData, meta interface{}) erro
log.Printf("[DEBUG]: Updating google_cloudfunctions_function")
config := meta.(*Config)
project, err := getProject(d, config)
if err != nil {
return err
}
cloudFuncId, err := parseCloudFunctionId(d.Id(), config)
if err != nil {
return err
@ -455,6 +524,7 @@ func resourceCloudFunctionsUpdate(d *schema.ResourceData, meta interface{}) erro
updateMaskArr = append(updateMaskArr, "environment_variables")
}
// Event trigger will run after failure policy and take precedence
if d.HasChange("retry_on_failure") {
if d.Get("retry_on_failure").(bool) {
if function.EventTrigger == nil {
@ -467,6 +537,11 @@ func resourceCloudFunctionsUpdate(d *schema.ResourceData, meta interface{}) erro
updateMaskArr = append(updateMaskArr, "eventTrigger.failurePolicy.retry")
}
if d.HasChange("event_trigger") {
function.EventTrigger = expandEventTrigger(d.Get("event_trigger").([]interface{}), project)
updateMaskArr = append(updateMaskArr, "eventTrigger", "eventTrigger.failurePolicy.retry")
}
if len(updateMaskArr) > 0 {
log.Printf("[DEBUG] Send Patch CloudFunction Configuration request: %#v", function)
updateMask := strings.Join(updateMaskArr, ",")
@ -509,3 +584,70 @@ func resourceCloudFunctionsDestroy(d *schema.ResourceData, meta interface{}) err
return nil
}
func expandEventTrigger(configured []interface{}, project string) *cloudfunctions.EventTrigger {
if len(configured) == 0 {
return nil
}
if data, ok := configured[0].(map[string]interface{}); ok {
eventType := data["event_type"].(string)
shape := ""
switch {
case strings.HasPrefix(eventType, "providers/cloud.storage/eventTypes/"):
shape = "projects/%s/buckets/%s"
case strings.HasPrefix(eventType, "providers/cloud.pubsub/eventTypes/"):
shape = "projects/%s/topics/%s"
}
return &cloudfunctions.EventTrigger{
EventType: eventType,
Resource: fmt.Sprintf(shape, project, data["resource"].(string)),
FailurePolicy: expandFailurePolicy(data["failure_policy"].([]interface{})),
}
}
return nil
}
func flattenEventTrigger(eventTrigger *cloudfunctions.EventTrigger) []map[string]interface{} {
result := make([]map[string]interface{}, 0, 1)
if eventTrigger == nil {
return result
}
result = append(result, map[string]interface{}{
"event_type": eventTrigger.EventType,
"resource": GetResourceNameFromSelfLink(eventTrigger.Resource),
"failure_policy": flattenFailurePolicy(eventTrigger.FailurePolicy),
})
return result
}
func expandFailurePolicy(configured []interface{}) *cloudfunctions.FailurePolicy {
if len(configured) == 0 {
return &cloudfunctions.FailurePolicy{}
}
if data, ok := configured[0].(map[string]interface{}); ok && data["retry"].(bool) {
return &cloudfunctions.FailurePolicy{
Retry: &cloudfunctions.Retry{},
}
}
return nil
}
func flattenFailurePolicy(failurePolicy *cloudfunctions.FailurePolicy) []map[string]interface{} {
result := make([]map[string]interface{}, 0, 1)
if failurePolicy == nil {
return nil
}
result = append(result, map[string]interface{}{
"retry": failurePolicy.Retry != nil,
})
return result
}

View File

@ -134,8 +134,6 @@ func TestAccCloudFunctionsFunction_update(t *testing.T) {
func TestAccCloudFunctionsFunction_pubsub(t *testing.T) {
t.Parallel()
var function cloudfunctions.CloudFunction
funcResourceName := "google_cloudfunctions_function.function"
functionName := fmt.Sprintf("tf-test-%s", acctest.RandString(10))
bucketName := fmt.Sprintf("tf-test-bucket-%d", acctest.RandInt())
@ -154,6 +152,39 @@ func TestAccCloudFunctionsFunction_pubsub(t *testing.T) {
{
Config: testAccCloudFunctionsFunction_pubsub(functionName, bucketName,
topicName, zipFilePath),
},
{
ResourceName: funcResourceName,
ImportState: true,
ImportStateVerify: true,
},
},
})
}
func TestAccCloudFunctionsFunction_oldPubsub(t *testing.T) {
t.Parallel()
var function cloudfunctions.CloudFunction
funcResourceName := "google_cloudfunctions_function.function"
functionName := fmt.Sprintf("tf-test-%s", acctest.RandString(10))
bucketName := fmt.Sprintf("tf-test-bucket-%d", acctest.RandInt())
topicName := fmt.Sprintf("tf-test-sub-%s", acctest.RandString(10))
zipFilePath, err := createZIPArchiveForIndexJs(testPubSubTriggerPath)
if err != nil {
t.Fatal(err.Error())
}
defer os.Remove(zipFilePath) // clean up
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckCloudFunctionsFunctionDestroy,
Steps: []resource.TestStep{
{
Config: testAccCloudFunctionsFunction_oldPubsub(functionName, bucketName,
topicName, zipFilePath),
Check: resource.ComposeTestCheckFunc(
testAccCloudFunctionsFunctionExists(
funcResourceName, &function),
@ -169,6 +200,43 @@ func TestAccCloudFunctionsFunction_pubsub(t *testing.T) {
"trigger_topic", topicName),
),
},
{
ResourceName: funcResourceName,
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"retry_on_failure", "trigger_topic"},
},
},
})
}
func TestAccCloudFunctionsFunction_bucket(t *testing.T) {
t.Parallel()
funcResourceName := "google_cloudfunctions_function.function"
functionName := fmt.Sprintf("tf-test-%s", acctest.RandString(10))
bucketName := fmt.Sprintf("tf-test-bucket-%d", acctest.RandInt())
zipFilePath, err := createZIPArchiveForIndexJs(testBucketTriggerPath)
if err != nil {
t.Fatal(err.Error())
}
defer os.Remove(zipFilePath) // clean up
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckCloudFunctionsFunctionDestroy,
Steps: []resource.TestStep{
{
Config: testAccCloudFunctionsFunction_bucket(functionName, bucketName, zipFilePath),
},
{
ResourceName: funcResourceName,
ImportState: true,
ImportStateVerify: true,
},
{
Config: testAccCloudFunctionsFunction_bucketNoRetry(functionName, bucketName, zipFilePath),
},
{
ResourceName: funcResourceName,
ImportState: true,
@ -177,7 +245,8 @@ func TestAccCloudFunctionsFunction_pubsub(t *testing.T) {
},
})
}
func TestAccCloudFunctionsFunction_bucket(t *testing.T) {
func TestAccCloudFunctionsFunction_oldBucket(t *testing.T) {
t.Parallel()
var function cloudfunctions.CloudFunction
@ -197,7 +266,7 @@ func TestAccCloudFunctionsFunction_bucket(t *testing.T) {
CheckDestroy: testAccCheckCloudFunctionsFunctionDestroy,
Steps: []resource.TestStep{
{
Config: testAccCloudFunctionsFunction_bucket(functionName, bucketName, zipFilePath),
Config: testAccCloudFunctionsFunction_oldBucket(functionName, bucketName, zipFilePath),
Check: resource.ComposeTestCheckFunc(
testAccCloudFunctionsFunctionExists(
funcResourceName, &function),
@ -214,12 +283,13 @@ func TestAccCloudFunctionsFunction_bucket(t *testing.T) {
),
},
{
ResourceName: funcResourceName,
ImportState: true,
ImportStateVerify: true,
ResourceName: funcResourceName,
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"retry_on_failure", "trigger_bucket"},
},
{
Config: testAccCloudFunctionsFunction_bucketNoRetry(functionName, bucketName, zipFilePath),
Config: testAccCloudFunctionsFunction_OldBucketNoRetry(functionName, bucketName, zipFilePath),
Check: resource.ComposeTestCheckFunc(
testAccCloudFunctionsFunctionExists(
funcResourceName, &function),
@ -236,9 +306,10 @@ func TestAccCloudFunctionsFunction_bucket(t *testing.T) {
),
},
{
ResourceName: funcResourceName,
ImportState: true,
ImportStateVerify: true,
ResourceName: funcResourceName,
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"retry_on_failure", "trigger_bucket"},
},
},
})
@ -471,7 +542,7 @@ resource "google_cloudfunctions_function" "function" {
}`, bucketName, zipFilePath, functionName)
}
func testAccCloudFunctionsFunction_pubsub(functionName string, bucketName string,
func testAccCloudFunctionsFunction_oldPubsub(functionName string, bucketName string,
topic string, zipFilePath string) string {
return fmt.Sprintf(`
resource "google_storage_bucket" "bucket" {
@ -500,6 +571,40 @@ resource "google_cloudfunctions_function" "function" {
}`, bucketName, zipFilePath, topic, functionName)
}
func testAccCloudFunctionsFunction_pubsub(functionName string, bucketName string,
topic string, zipFilePath string) string {
return fmt.Sprintf(`
resource "google_storage_bucket" "bucket" {
name = "%s"
}
resource "google_storage_bucket_object" "archive" {
name = "index.zip"
bucket = "${google_storage_bucket.bucket.name}"
source = "%s"
}
resource "google_pubsub_topic" "sub" {
name = "%s"
}
resource "google_cloudfunctions_function" "function" {
name = "%s"
available_memory_mb = 128
source_archive_bucket = "${google_storage_bucket.bucket.name}"
source_archive_object = "${google_storage_bucket_object.archive.name}"
timeout = 61
entry_point = "helloPubSub"
event_trigger {
event_type = "providers/cloud.pubsub/eventTypes/topic.publish"
resource = "${google_pubsub_topic.sub.name}"
failure_policy {
retry = false
}
}
}`, bucketName, zipFilePath, topic, functionName)
}
func testAccCloudFunctionsFunction_bucket(functionName string, bucketName string,
zipFilePath string) string {
return fmt.Sprintf(`
@ -513,6 +618,63 @@ resource "google_storage_bucket_object" "archive" {
source = "%s"
}
resource "google_cloudfunctions_function" "function" {
name = "%s"
available_memory_mb = 128
source_archive_bucket = "${google_storage_bucket.bucket.name}"
source_archive_object = "${google_storage_bucket_object.archive.name}"
timeout = 61
entry_point = "helloGCS"
event_trigger {
event_type = "providers/cloud.storage/eventTypes/object.change"
resource = "${google_storage_bucket.bucket.name}"
failure_policy {
retry = true
}
}
}`, bucketName, zipFilePath, functionName)
}
func testAccCloudFunctionsFunction_bucketNoRetry(functionName string, bucketName string,
zipFilePath string) string {
return fmt.Sprintf(`
resource "google_storage_bucket" "bucket" {
name = "%s"
}
resource "google_storage_bucket_object" "archive" {
name = "index.zip"
bucket = "${google_storage_bucket.bucket.name}"
source = "%s"
}
resource "google_cloudfunctions_function" "function" {
name = "%s"
available_memory_mb = 128
source_archive_bucket = "${google_storage_bucket.bucket.name}"
source_archive_object = "${google_storage_bucket_object.archive.name}"
timeout = 61
entry_point = "helloGCS"
event_trigger {
event_type = "providers/cloud.storage/eventTypes/object.change"
resource = "${google_storage_bucket.bucket.name}"
}
}`, bucketName, zipFilePath, functionName)
}
func testAccCloudFunctionsFunction_oldBucket(functionName string, bucketName string,
zipFilePath string) string {
return fmt.Sprintf(`
resource "google_storage_bucket" "bucket" {
name = "%s"
}
resource "google_storage_bucket_object" "archive" {
name = "index.zip"
bucket = "${google_storage_bucket.bucket.name}"
source = "%s"
}
resource "google_cloudfunctions_function" "function" {
name = "%s"
available_memory_mb = 128
@ -525,7 +687,7 @@ resource "google_cloudfunctions_function" "function" {
}`, bucketName, zipFilePath, functionName)
}
func testAccCloudFunctionsFunction_bucketNoRetry(functionName string, bucketName string,
func testAccCloudFunctionsFunction_OldBucketNoRetry(functionName string, bucketName string,
zipFilePath string) string {
return fmt.Sprintf(`
resource "google_storage_bucket" "bucket" {

View File

@ -64,17 +64,36 @@ The following arguments are supported:
* `entry_point` - (Optional) Name of a JavaScript function that will be executed when the Google Cloud Function is triggered.
* `event_trigger` - (Optional) A source that fires events in response to a condition in another service. Structure is documented below. Cannot be used with `trigger_http`.
* `trigger_http` - (Optional) Boolean variable. Any HTTP request (of a supported type) to the endpoint will trigger function execution. Supported HTTP request types are: POST, PUT, GET, DELETE, and OPTIONS. Endpoint is returned as `https_trigger_url`. Cannot be used with `trigger_bucket` and `trigger_topic`.
* `trigger_bucket` - (Optional) Google Cloud Storage bucket name. Every change in files in this bucket will trigger function execution. Cannot be used with `trigger_http` and `trigger_topic`.
Deprecated. Use `event_trigger` instead.
* `trigger_topic` - (Optional) Name of Pub/Sub topic. Every message published in this topic will trigger function execution with message contents passed as input data. Cannot be used with `trigger_http` and `trigger_bucket`.
Deprecated. Use `event_trigger` instead.
* `labels` - (Optional) A set of key/value label pairs to assign to the function.
* `environment_variables` - (Optional) A set of key/value environment variable pairs to assign to the function.
* `retry_on_failure` - (Optional) Whether the function should be retried on failure. This only applies to bucket and topic triggers, not HTTPS triggers.
Deprecated. Use `event_trigger.failure_policy.retry` instead.
The `event_trigger` block supports:
* `event_type` - (Required) The type of event to observe. For example: `"providers/cloud.storage/eventTypes/object.change"`
and `"providers/cloud.pubsub/eventTypes/topic.publish"`. See the documentation on [calling Cloud Functions](https://cloud.google.com/functions/docs/calling/)
for a full reference. Only Cloud Storage and Cloud Pub/Sub triggers are supported at this time.
* `resource` - (Required) Required. The name of the resource from which to observe events, for example, `"myBucket"`
* `failure_policy` - (Optional) Specifies policy for failed executions. Structure is documented below.
The `failure_policy` block supports:
* `retry` - (Required) Whether the function should be retried on failure. Defaults to `false`.
## Attributes Reference