Add Stackdriver Logging Metric resource (#3523)

Signed-off-by: Modular Magician <magic-modules@google.com>
This commit is contained in:
The Magician 2019-05-03 12:26:14 -07:00 committed by Ty Larrabee
parent 07af08027e
commit 966229afbe
7 changed files with 1299 additions and 4 deletions

View File

@ -202,6 +202,7 @@ func ResourceMapWithErrors() (map[string]*schema.Resource, error) {
"google_folder_organization_policy": resourceGoogleFolderOrganizationPolicy(),
"google_logging_billing_account_sink": resourceLoggingBillingAccountSink(),
"google_logging_billing_account_exclusion": ResourceLoggingExclusion(BillingAccountLoggingExclusionSchema, NewBillingAccountLoggingExclusionUpdater, billingAccountLoggingExclusionIdParseFunc),
"google_logging_metric": resourceLoggingMetric(),
"google_logging_organization_sink": resourceLoggingOrganizationSink(),
"google_logging_organization_exclusion": ResourceLoggingExclusion(OrganizationLoggingExclusionSchema, NewOrganizationLoggingExclusionUpdater, organizationLoggingExclusionIdParseFunc),
"google_logging_folder_sink": resourceLoggingFolderSink(),

View File

@ -0,0 +1,21 @@
// ----------------------------------------------------------------------------
//
// *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
//
// ----------------------------------------------------------------------------
//
// This file is automatically generated by Magic Modules and manual
// changes will be clobbered when the file is regenerated.
//
// Please read more about how to change this file in
// .github/CONTRIBUTING.md.
//
// ----------------------------------------------------------------------------
package google
import "github.com/hashicorp/terraform/helper/schema"
var GeneratedLoggingResourcesMap = map[string]*schema.Resource{
"google_logging_metric": resourceLoggingMetric(),
}

View File

@ -0,0 +1,879 @@
// ----------------------------------------------------------------------------
//
// *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
//
// ----------------------------------------------------------------------------
//
// This file is automatically generated by Magic Modules and manual
// changes will be clobbered when the file is regenerated.
//
// Please read more about how to change this file in
// .github/CONTRIBUTING.md.
//
// ----------------------------------------------------------------------------
package google
import (
"fmt"
"log"
"reflect"
"strconv"
"time"
"github.com/hashicorp/terraform/helper/schema"
"github.com/hashicorp/terraform/helper/validation"
)
func resourceLoggingMetric() *schema.Resource {
return &schema.Resource{
Create: resourceLoggingMetricCreate,
Read: resourceLoggingMetricRead,
Update: resourceLoggingMetricUpdate,
Delete: resourceLoggingMetricDelete,
Importer: &schema.ResourceImporter{
State: resourceLoggingMetricImport,
},
Timeouts: &schema.ResourceTimeout{
Create: schema.DefaultTimeout(240 * time.Second),
Update: schema.DefaultTimeout(240 * time.Second),
Delete: schema.DefaultTimeout(240 * time.Second),
},
Schema: map[string]*schema.Schema{
"filter": {
Type: schema.TypeString,
Required: true,
},
"metric_descriptor": {
Type: schema.TypeList,
Required: true,
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"metric_kind": {
Type: schema.TypeString,
Required: true,
ValidateFunc: validation.StringInSlice([]string{"DELTA", "GAUGE", "CUMULATIVE"}, false),
},
"value_type": {
Type: schema.TypeString,
Required: true,
ValidateFunc: validation.StringInSlice([]string{"BOOL", "INT64", "DOUBLE", "STRING", "DISTRIBUTION", "MONEY"}, false),
},
"labels": {
Type: schema.TypeList,
Optional: true,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"key": {
Type: schema.TypeString,
Required: true,
},
"description": {
Type: schema.TypeString,
Optional: true,
},
"value_type": {
Type: schema.TypeString,
Optional: true,
ValidateFunc: validation.StringInSlice([]string{"BOOL", "INT64", "STRING", ""}, false),
Default: "STRING",
},
},
},
},
},
},
},
"name": {
Type: schema.TypeString,
Required: true,
},
"bucket_options": {
Type: schema.TypeList,
Optional: true,
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"explicit": {
Type: schema.TypeList,
Optional: true,
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"bounds": {
Type: schema.TypeList,
Optional: true,
Elem: &schema.Schema{
Type: schema.TypeString,
},
},
},
},
},
"exponential_buckets": {
Type: schema.TypeList,
Optional: true,
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"growth_factor": {
Type: schema.TypeInt,
Optional: true,
},
"num_finite_buckets": {
Type: schema.TypeInt,
Optional: true,
},
"scale": {
Type: schema.TypeFloat,
Optional: true,
},
},
},
},
"linear_buckets": {
Type: schema.TypeList,
Optional: true,
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"num_finite_buckets": {
Type: schema.TypeInt,
Optional: true,
},
"offset": {
Type: schema.TypeFloat,
Optional: true,
},
"width": {
Type: schema.TypeInt,
Optional: true,
},
},
},
},
},
},
},
"description": {
Type: schema.TypeString,
Optional: true,
},
"label_extractors": {
Type: schema.TypeMap,
Optional: true,
Elem: &schema.Schema{Type: schema.TypeString},
},
"value_extractor": {
Type: schema.TypeString,
Optional: true,
},
"project": {
Type: schema.TypeString,
Optional: true,
Computed: true,
ForceNew: true,
},
},
}
}
func resourceLoggingMetricCreate(d *schema.ResourceData, meta interface{}) error {
config := meta.(*Config)
obj := make(map[string]interface{})
nameProp, err := expandLoggingMetricName(d.Get("name"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("name"); !isEmptyValue(reflect.ValueOf(nameProp)) && (ok || !reflect.DeepEqual(v, nameProp)) {
obj["name"] = nameProp
}
descriptionProp, err := expandLoggingMetricDescription(d.Get("description"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("description"); !isEmptyValue(reflect.ValueOf(descriptionProp)) && (ok || !reflect.DeepEqual(v, descriptionProp)) {
obj["description"] = descriptionProp
}
filterProp, err := expandLoggingMetricFilter(d.Get("filter"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("filter"); !isEmptyValue(reflect.ValueOf(filterProp)) && (ok || !reflect.DeepEqual(v, filterProp)) {
obj["filter"] = filterProp
}
metricDescriptorProp, err := expandLoggingMetricMetricDescriptor(d.Get("metric_descriptor"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("metric_descriptor"); !isEmptyValue(reflect.ValueOf(metricDescriptorProp)) && (ok || !reflect.DeepEqual(v, metricDescriptorProp)) {
obj["metricDescriptor"] = metricDescriptorProp
}
labelExtractorsProp, err := expandLoggingMetricLabelExtractors(d.Get("label_extractors"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("label_extractors"); !isEmptyValue(reflect.ValueOf(labelExtractorsProp)) && (ok || !reflect.DeepEqual(v, labelExtractorsProp)) {
obj["labelExtractors"] = labelExtractorsProp
}
valueExtractorProp, err := expandLoggingMetricValueExtractor(d.Get("value_extractor"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("value_extractor"); !isEmptyValue(reflect.ValueOf(valueExtractorProp)) && (ok || !reflect.DeepEqual(v, valueExtractorProp)) {
obj["valueExtractor"] = valueExtractorProp
}
bucketOptionsProp, err := expandLoggingMetricBucketOptions(d.Get("bucket_options"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("bucket_options"); !isEmptyValue(reflect.ValueOf(bucketOptionsProp)) && (ok || !reflect.DeepEqual(v, bucketOptionsProp)) {
obj["bucketOptions"] = bucketOptionsProp
}
lockName, err := replaceVars(d, config, "customMetric/{{project}}")
if err != nil {
return err
}
mutexKV.Lock(lockName)
defer mutexKV.Unlock(lockName)
url, err := replaceVars(d, config, "https://logging.googleapis.com/v2/projects/{{project}}/metrics")
if err != nil {
return err
}
log.Printf("[DEBUG] Creating new Metric: %#v", obj)
res, err := sendRequestWithTimeout(config, "POST", url, obj, d.Timeout(schema.TimeoutCreate))
if err != nil {
return fmt.Errorf("Error creating Metric: %s", err)
}
// Store the ID now
id, err := replaceVars(d, config, "{{name}}")
if err != nil {
return fmt.Errorf("Error constructing id: %s", err)
}
d.SetId(id)
log.Printf("[DEBUG] Finished creating Metric %q: %#v", d.Id(), res)
// `name` is autogenerated from the api so needs to be set post-create
name, ok := res["name"]
if !ok {
return fmt.Errorf("Create response didn't contain critical fields. Create may not have succeeded.")
}
d.Set("name", name.(string))
d.SetId(name.(string))
return resourceLoggingMetricRead(d, meta)
}
func resourceLoggingMetricRead(d *schema.ResourceData, meta interface{}) error {
config := meta.(*Config)
url, err := replaceVars(d, config, "https://logging.googleapis.com/v2/projects/{{project}}/metrics/{{%name}}")
if err != nil {
return err
}
res, err := sendRequest(config, "GET", url, nil)
if err != nil {
return handleNotFoundError(err, d, fmt.Sprintf("LoggingMetric %q", d.Id()))
}
project, err := getProject(d, config)
if err != nil {
return err
}
if err := d.Set("project", project); err != nil {
return fmt.Errorf("Error reading Metric: %s", err)
}
if err := d.Set("name", flattenLoggingMetricName(res["name"], d)); err != nil {
return fmt.Errorf("Error reading Metric: %s", err)
}
if err := d.Set("description", flattenLoggingMetricDescription(res["description"], d)); err != nil {
return fmt.Errorf("Error reading Metric: %s", err)
}
if err := d.Set("filter", flattenLoggingMetricFilter(res["filter"], d)); err != nil {
return fmt.Errorf("Error reading Metric: %s", err)
}
if err := d.Set("metric_descriptor", flattenLoggingMetricMetricDescriptor(res["metricDescriptor"], d)); err != nil {
return fmt.Errorf("Error reading Metric: %s", err)
}
if err := d.Set("label_extractors", flattenLoggingMetricLabelExtractors(res["labelExtractors"], d)); err != nil {
return fmt.Errorf("Error reading Metric: %s", err)
}
if err := d.Set("value_extractor", flattenLoggingMetricValueExtractor(res["valueExtractor"], d)); err != nil {
return fmt.Errorf("Error reading Metric: %s", err)
}
if err := d.Set("bucket_options", flattenLoggingMetricBucketOptions(res["bucketOptions"], d)); err != nil {
return fmt.Errorf("Error reading Metric: %s", err)
}
return nil
}
func resourceLoggingMetricUpdate(d *schema.ResourceData, meta interface{}) error {
config := meta.(*Config)
obj := make(map[string]interface{})
nameProp, err := expandLoggingMetricName(d.Get("name"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("name"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, nameProp)) {
obj["name"] = nameProp
}
descriptionProp, err := expandLoggingMetricDescription(d.Get("description"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("description"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, descriptionProp)) {
obj["description"] = descriptionProp
}
filterProp, err := expandLoggingMetricFilter(d.Get("filter"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("filter"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, filterProp)) {
obj["filter"] = filterProp
}
metricDescriptorProp, err := expandLoggingMetricMetricDescriptor(d.Get("metric_descriptor"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("metric_descriptor"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, metricDescriptorProp)) {
obj["metricDescriptor"] = metricDescriptorProp
}
labelExtractorsProp, err := expandLoggingMetricLabelExtractors(d.Get("label_extractors"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("label_extractors"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, labelExtractorsProp)) {
obj["labelExtractors"] = labelExtractorsProp
}
valueExtractorProp, err := expandLoggingMetricValueExtractor(d.Get("value_extractor"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("value_extractor"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, valueExtractorProp)) {
obj["valueExtractor"] = valueExtractorProp
}
bucketOptionsProp, err := expandLoggingMetricBucketOptions(d.Get("bucket_options"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("bucket_options"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, bucketOptionsProp)) {
obj["bucketOptions"] = bucketOptionsProp
}
lockName, err := replaceVars(d, config, "customMetric/{{project}}")
if err != nil {
return err
}
mutexKV.Lock(lockName)
defer mutexKV.Unlock(lockName)
url, err := replaceVars(d, config, "https://logging.googleapis.com/v2/projects/{{project}}/metrics/{{%name}}")
if err != nil {
return err
}
log.Printf("[DEBUG] Updating Metric %q: %#v", d.Id(), obj)
_, err = sendRequestWithTimeout(config, "PUT", url, obj, d.Timeout(schema.TimeoutUpdate))
if err != nil {
return fmt.Errorf("Error updating Metric %q: %s", d.Id(), err)
}
return resourceLoggingMetricRead(d, meta)
}
func resourceLoggingMetricDelete(d *schema.ResourceData, meta interface{}) error {
config := meta.(*Config)
lockName, err := replaceVars(d, config, "customMetric/{{project}}")
if err != nil {
return err
}
mutexKV.Lock(lockName)
defer mutexKV.Unlock(lockName)
url, err := replaceVars(d, config, "https://logging.googleapis.com/v2/projects/{{project}}/metrics/{{%name}}")
if err != nil {
return err
}
var obj map[string]interface{}
log.Printf("[DEBUG] Deleting Metric %q", d.Id())
res, err := sendRequestWithTimeout(config, "DELETE", url, obj, d.Timeout(schema.TimeoutDelete))
if err != nil {
return handleNotFoundError(err, d, "Metric")
}
log.Printf("[DEBUG] Finished deleting Metric %q: %#v", d.Id(), res)
return nil
}
func resourceLoggingMetricImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) {
config := meta.(*Config)
// current import_formats can't import fields with forward slashes in their value
if err := parseImportId([]string{"(?P<name>.+)"}, d, config); err != nil {
return nil, err
}
return []*schema.ResourceData{d}, nil
}
func flattenLoggingMetricName(v interface{}, d *schema.ResourceData) interface{} {
return v
}
func flattenLoggingMetricDescription(v interface{}, d *schema.ResourceData) interface{} {
return v
}
func flattenLoggingMetricFilter(v interface{}, d *schema.ResourceData) interface{} {
return v
}
func flattenLoggingMetricMetricDescriptor(v interface{}, d *schema.ResourceData) interface{} {
if v == nil {
return nil
}
original := v.(map[string]interface{})
if len(original) == 0 {
return nil
}
transformed := make(map[string]interface{})
transformed["value_type"] =
flattenLoggingMetricMetricDescriptorValueType(original["valueType"], d)
transformed["metric_kind"] =
flattenLoggingMetricMetricDescriptorMetricKind(original["metricKind"], d)
transformed["labels"] =
flattenLoggingMetricMetricDescriptorLabels(original["labels"], d)
return []interface{}{transformed}
}
func flattenLoggingMetricMetricDescriptorValueType(v interface{}, d *schema.ResourceData) interface{} {
return v
}
func flattenLoggingMetricMetricDescriptorMetricKind(v interface{}, d *schema.ResourceData) interface{} {
return v
}
func flattenLoggingMetricMetricDescriptorLabels(v interface{}, d *schema.ResourceData) interface{} {
if v == nil {
return v
}
l := v.([]interface{})
transformed := make([]interface{}, 0, len(l))
for _, raw := range l {
original := raw.(map[string]interface{})
if len(original) < 1 {
// Do not include empty json objects coming back from the api
continue
}
transformed = append(transformed, map[string]interface{}{
"key": flattenLoggingMetricMetricDescriptorLabelsKey(original["key"], d),
"description": flattenLoggingMetricMetricDescriptorLabelsDescription(original["description"], d),
"value_type": flattenLoggingMetricMetricDescriptorLabelsValueType(original["valueType"], d),
})
}
return transformed
}
func flattenLoggingMetricMetricDescriptorLabelsKey(v interface{}, d *schema.ResourceData) interface{} {
return v
}
func flattenLoggingMetricMetricDescriptorLabelsDescription(v interface{}, d *schema.ResourceData) interface{} {
return v
}
func flattenLoggingMetricMetricDescriptorLabelsValueType(v interface{}, d *schema.ResourceData) interface{} {
if v == nil || v.(string) == "" {
return "STRING"
}
return v
}
func flattenLoggingMetricLabelExtractors(v interface{}, d *schema.ResourceData) interface{} {
return v
}
func flattenLoggingMetricValueExtractor(v interface{}, d *schema.ResourceData) interface{} {
return v
}
func flattenLoggingMetricBucketOptions(v interface{}, d *schema.ResourceData) interface{} {
if v == nil {
return nil
}
original := v.(map[string]interface{})
if len(original) == 0 {
return nil
}
transformed := make(map[string]interface{})
transformed["linear_buckets"] =
flattenLoggingMetricBucketOptionsLinearBuckets(original["linearBuckets"], d)
transformed["exponential_buckets"] =
flattenLoggingMetricBucketOptionsExponentialBuckets(original["exponentialBuckets"], d)
transformed["explicit"] =
flattenLoggingMetricBucketOptionsExplicit(original["explicit"], d)
return []interface{}{transformed}
}
func flattenLoggingMetricBucketOptionsLinearBuckets(v interface{}, d *schema.ResourceData) interface{} {
if v == nil {
return nil
}
original := v.(map[string]interface{})
if len(original) == 0 {
return nil
}
transformed := make(map[string]interface{})
transformed["num_finite_buckets"] =
flattenLoggingMetricBucketOptionsLinearBucketsNumFiniteBuckets(original["numFiniteBuckets"], d)
transformed["width"] =
flattenLoggingMetricBucketOptionsLinearBucketsWidth(original["width"], d)
transformed["offset"] =
flattenLoggingMetricBucketOptionsLinearBucketsOffset(original["offset"], d)
return []interface{}{transformed}
}
func flattenLoggingMetricBucketOptionsLinearBucketsNumFiniteBuckets(v interface{}, d *schema.ResourceData) interface{} {
// Handles the string fixed64 format
if strVal, ok := v.(string); ok {
if intVal, err := strconv.ParseInt(strVal, 10, 64); err == nil {
return intVal
} // let terraform core handle it if we can't convert the string to an int.
}
return v
}
func flattenLoggingMetricBucketOptionsLinearBucketsWidth(v interface{}, d *schema.ResourceData) interface{} {
// Handles the string fixed64 format
if strVal, ok := v.(string); ok {
if intVal, err := strconv.ParseInt(strVal, 10, 64); err == nil {
return intVal
} // let terraform core handle it if we can't convert the string to an int.
}
return v
}
func flattenLoggingMetricBucketOptionsLinearBucketsOffset(v interface{}, d *schema.ResourceData) interface{} {
return v
}
func flattenLoggingMetricBucketOptionsExponentialBuckets(v interface{}, d *schema.ResourceData) interface{} {
if v == nil {
return nil
}
original := v.(map[string]interface{})
if len(original) == 0 {
return nil
}
transformed := make(map[string]interface{})
transformed["num_finite_buckets"] =
flattenLoggingMetricBucketOptionsExponentialBucketsNumFiniteBuckets(original["numFiniteBuckets"], d)
transformed["growth_factor"] =
flattenLoggingMetricBucketOptionsExponentialBucketsGrowthFactor(original["growthFactor"], d)
transformed["scale"] =
flattenLoggingMetricBucketOptionsExponentialBucketsScale(original["scale"], d)
return []interface{}{transformed}
}
func flattenLoggingMetricBucketOptionsExponentialBucketsNumFiniteBuckets(v interface{}, d *schema.ResourceData) interface{} {
// Handles the string fixed64 format
if strVal, ok := v.(string); ok {
if intVal, err := strconv.ParseInt(strVal, 10, 64); err == nil {
return intVal
} // let terraform core handle it if we can't convert the string to an int.
}
return v
}
func flattenLoggingMetricBucketOptionsExponentialBucketsGrowthFactor(v interface{}, d *schema.ResourceData) interface{} {
// Handles the string fixed64 format
if strVal, ok := v.(string); ok {
if intVal, err := strconv.ParseInt(strVal, 10, 64); err == nil {
return intVal
} // let terraform core handle it if we can't convert the string to an int.
}
return v
}
func flattenLoggingMetricBucketOptionsExponentialBucketsScale(v interface{}, d *schema.ResourceData) interface{} {
return v
}
func flattenLoggingMetricBucketOptionsExplicit(v interface{}, d *schema.ResourceData) interface{} {
if v == nil {
return nil
}
original := v.(map[string]interface{})
if len(original) == 0 {
return nil
}
transformed := make(map[string]interface{})
transformed["bounds"] =
flattenLoggingMetricBucketOptionsExplicitBounds(original["bounds"], d)
return []interface{}{transformed}
}
func flattenLoggingMetricBucketOptionsExplicitBounds(v interface{}, d *schema.ResourceData) interface{} {
return v
}
func expandLoggingMetricName(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandLoggingMetricDescription(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandLoggingMetricFilter(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandLoggingMetricMetricDescriptor(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
l := v.([]interface{})
if len(l) == 0 || l[0] == nil {
return nil, nil
}
raw := l[0]
original := raw.(map[string]interface{})
transformed := make(map[string]interface{})
transformedValueType, err := expandLoggingMetricMetricDescriptorValueType(original["value_type"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedValueType); val.IsValid() && !isEmptyValue(val) {
transformed["valueType"] = transformedValueType
}
transformedMetricKind, err := expandLoggingMetricMetricDescriptorMetricKind(original["metric_kind"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedMetricKind); val.IsValid() && !isEmptyValue(val) {
transformed["metricKind"] = transformedMetricKind
}
transformedLabels, err := expandLoggingMetricMetricDescriptorLabels(original["labels"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedLabels); val.IsValid() && !isEmptyValue(val) {
transformed["labels"] = transformedLabels
}
return transformed, nil
}
func expandLoggingMetricMetricDescriptorValueType(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandLoggingMetricMetricDescriptorMetricKind(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandLoggingMetricMetricDescriptorLabels(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
l := v.([]interface{})
req := make([]interface{}, 0, len(l))
for _, raw := range l {
if raw == nil {
continue
}
original := raw.(map[string]interface{})
transformed := make(map[string]interface{})
transformedKey, err := expandLoggingMetricMetricDescriptorLabelsKey(original["key"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedKey); val.IsValid() && !isEmptyValue(val) {
transformed["key"] = transformedKey
}
transformedDescription, err := expandLoggingMetricMetricDescriptorLabelsDescription(original["description"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedDescription); val.IsValid() && !isEmptyValue(val) {
transformed["description"] = transformedDescription
}
transformedValueType, err := expandLoggingMetricMetricDescriptorLabelsValueType(original["value_type"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedValueType); val.IsValid() && !isEmptyValue(val) {
transformed["valueType"] = transformedValueType
}
req = append(req, transformed)
}
return req, nil
}
func expandLoggingMetricMetricDescriptorLabelsKey(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandLoggingMetricMetricDescriptorLabelsDescription(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandLoggingMetricMetricDescriptorLabelsValueType(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandLoggingMetricLabelExtractors(v interface{}, d TerraformResourceData, config *Config) (map[string]string, error) {
if v == nil {
return map[string]string{}, nil
}
m := make(map[string]string)
for k, val := range v.(map[string]interface{}) {
m[k] = val.(string)
}
return m, nil
}
func expandLoggingMetricValueExtractor(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandLoggingMetricBucketOptions(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
l := v.([]interface{})
if len(l) == 0 || l[0] == nil {
return nil, nil
}
raw := l[0]
original := raw.(map[string]interface{})
transformed := make(map[string]interface{})
transformedLinearBuckets, err := expandLoggingMetricBucketOptionsLinearBuckets(original["linear_buckets"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedLinearBuckets); val.IsValid() && !isEmptyValue(val) {
transformed["linearBuckets"] = transformedLinearBuckets
}
transformedExponentialBuckets, err := expandLoggingMetricBucketOptionsExponentialBuckets(original["exponential_buckets"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedExponentialBuckets); val.IsValid() && !isEmptyValue(val) {
transformed["exponentialBuckets"] = transformedExponentialBuckets
}
transformedExplicit, err := expandLoggingMetricBucketOptionsExplicit(original["explicit"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedExplicit); val.IsValid() && !isEmptyValue(val) {
transformed["explicit"] = transformedExplicit
}
return transformed, nil
}
func expandLoggingMetricBucketOptionsLinearBuckets(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
l := v.([]interface{})
if len(l) == 0 || l[0] == nil {
return nil, nil
}
raw := l[0]
original := raw.(map[string]interface{})
transformed := make(map[string]interface{})
transformedNumFiniteBuckets, err := expandLoggingMetricBucketOptionsLinearBucketsNumFiniteBuckets(original["num_finite_buckets"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedNumFiniteBuckets); val.IsValid() && !isEmptyValue(val) {
transformed["numFiniteBuckets"] = transformedNumFiniteBuckets
}
transformedWidth, err := expandLoggingMetricBucketOptionsLinearBucketsWidth(original["width"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedWidth); val.IsValid() && !isEmptyValue(val) {
transformed["width"] = transformedWidth
}
transformedOffset, err := expandLoggingMetricBucketOptionsLinearBucketsOffset(original["offset"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedOffset); val.IsValid() && !isEmptyValue(val) {
transformed["offset"] = transformedOffset
}
return transformed, nil
}
func expandLoggingMetricBucketOptionsLinearBucketsNumFiniteBuckets(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandLoggingMetricBucketOptionsLinearBucketsWidth(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandLoggingMetricBucketOptionsLinearBucketsOffset(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandLoggingMetricBucketOptionsExponentialBuckets(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
l := v.([]interface{})
if len(l) == 0 || l[0] == nil {
return nil, nil
}
raw := l[0]
original := raw.(map[string]interface{})
transformed := make(map[string]interface{})
transformedNumFiniteBuckets, err := expandLoggingMetricBucketOptionsExponentialBucketsNumFiniteBuckets(original["num_finite_buckets"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedNumFiniteBuckets); val.IsValid() && !isEmptyValue(val) {
transformed["numFiniteBuckets"] = transformedNumFiniteBuckets
}
transformedGrowthFactor, err := expandLoggingMetricBucketOptionsExponentialBucketsGrowthFactor(original["growth_factor"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedGrowthFactor); val.IsValid() && !isEmptyValue(val) {
transformed["growthFactor"] = transformedGrowthFactor
}
transformedScale, err := expandLoggingMetricBucketOptionsExponentialBucketsScale(original["scale"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedScale); val.IsValid() && !isEmptyValue(val) {
transformed["scale"] = transformedScale
}
return transformed, nil
}
func expandLoggingMetricBucketOptionsExponentialBucketsNumFiniteBuckets(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandLoggingMetricBucketOptionsExponentialBucketsGrowthFactor(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandLoggingMetricBucketOptionsExponentialBucketsScale(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandLoggingMetricBucketOptionsExplicit(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
l := v.([]interface{})
if len(l) == 0 || l[0] == nil {
return nil, nil
}
raw := l[0]
original := raw.(map[string]interface{})
transformed := make(map[string]interface{})
transformedBounds, err := expandLoggingMetricBucketOptionsExplicitBounds(original["bounds"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedBounds); val.IsValid() && !isEmptyValue(val) {
transformed["bounds"] = transformedBounds
}
return transformed, nil
}
func expandLoggingMetricBucketOptionsExplicitBounds(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}

View File

@ -0,0 +1,101 @@
// ----------------------------------------------------------------------------
//
// *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
//
// ----------------------------------------------------------------------------
//
// This file is automatically generated by Magic Modules and manual
// changes will be clobbered when the file is regenerated.
//
// Please read more about how to change this file in
// .github/CONTRIBUTING.md.
//
// ----------------------------------------------------------------------------
package google
import (
"fmt"
"strings"
"testing"
"github.com/hashicorp/terraform/helper/acctest"
"github.com/hashicorp/terraform/helper/resource"
"github.com/hashicorp/terraform/terraform"
)
func TestAccLoggingMetric_loggingMetricBasicExample(t *testing.T) {
t.Parallel()
context := map[string]interface{}{
"random_suffix": acctest.RandString(10),
}
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckLoggingMetricDestroy,
Steps: []resource.TestStep{
{
Config: testAccLoggingMetric_loggingMetricBasicExample(context),
},
{
ResourceName: "google_logging_metric.logging_metric",
ImportState: true,
ImportStateVerify: true,
},
},
})
}
func testAccLoggingMetric_loggingMetricBasicExample(context map[string]interface{}) string {
return Nprintf(`
resource "google_logging_metric" "logging_metric" {
name = "my-(custom)/metric-%{random_suffix}"
filter = "resource.type=gae_app AND severity>=ERROR"
metric_descriptor {
metric_kind = "DELTA"
value_type = "DISTRIBUTION"
labels {
key = "mass"
value_type = "STRING"
description = "amount of matter"
}
}
value_extractor = "EXTRACT(jsonPayload.request)"
label_extractors = { "mass": "EXTRACT(jsonPayload.request)" }
bucket_options {
linear_buckets {
num_finite_buckets = 3
width = 1
offset = 1
}
}
}
`, context)
}
func testAccCheckLoggingMetricDestroy(s *terraform.State) error {
for name, rs := range s.RootModule().Resources {
if rs.Type != "google_logging_metric" {
continue
}
if strings.HasPrefix(name, "data.") {
continue
}
config := testAccProvider.Meta().(*Config)
url, err := replaceVarsForTest(rs, "https://logging.googleapis.com/v2/projects/{{project}}/metrics/{{%name}}")
if err != nil {
return err
}
_, err = sendRequest(config, "GET", url, nil)
if err == nil {
return fmt.Errorf("LoggingMetric still exists at %s", url)
}
}
return nil
}

View File

@ -0,0 +1,53 @@
package google
import (
"fmt"
"testing"
"github.com/hashicorp/terraform/helper/acctest"
"github.com/hashicorp/terraform/helper/resource"
)
func TestAccLoggingMetric_update(t *testing.T) {
t.Parallel()
suffix := acctest.RandString(10)
filter := "resource.type=gae_app AND severity>=ERROR"
updatedFilter := "resource.type=gae_app AND severity=ERROR"
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckLoggingMetricDestroy,
Steps: []resource.TestStep{
{
Config: testAccLoggingMetric_update(suffix, filter),
},
{
ResourceName: "google_logging_metric.logging_metric",
ImportState: true,
ImportStateVerify: true,
},
{
Config: testAccLoggingMetric_update(suffix, updatedFilter),
},
{
ResourceName: "google_logging_metric.logging_metric",
ImportState: true,
ImportStateVerify: true,
},
},
})
}
func testAccLoggingMetric_update(suffix string, filter string) string {
return fmt.Sprintf(`
resource "google_logging_metric" "logging_metric" {
name = "my-custom-metric-%s"
filter = "%s"
metric_descriptor {
metric_kind = "DELTA"
value_type = "INT64"
}
}`, suffix, filter)
}

View File

@ -120,7 +120,8 @@ func addQueryParams(rawurl string, params map[string]string) (string, error) {
}
func replaceVars(d TerraformResourceData, config *Config, linkTmpl string) (string, error) {
re := regexp.MustCompile("{{([[:word:]]+)}}")
// https://github.com/google/re2/wiki/Syntax
re := regexp.MustCompile("{{([%[:word:]]+)}}")
f, err := buildReplacementFunc(re, d, config, linkTmpl)
if err != nil {
return "", err
@ -128,6 +129,9 @@ func replaceVars(d TerraformResourceData, config *Config, linkTmpl string) (stri
return re.ReplaceAllStringFunc(linkTmpl, f), nil
}
// This function replaces references to Terraform properties (in the form of {{var}}) with their value in Terraform
// It also replaces {{project}}, {{region}}, and {{zone}} with their appropriate values
// This function supports URL-encoding the result by prepending '%' to the field name e.g. {{%var}}
func buildReplacementFunc(re *regexp.Regexp, d TerraformResourceData, config *Config, linkTmpl string) (func(string) string, error) {
var project, region, zone string
var err error
@ -164,9 +168,16 @@ func buildReplacementFunc(re *regexp.Regexp, d TerraformResourceData, config *Co
if m == "zone" {
return zone
}
v, ok := d.GetOkExists(m)
if ok {
return fmt.Sprintf("%v", v)
if string(m[0]) == "%" {
v, ok := d.GetOkExists(m[1:])
if ok {
return url.PathEscape(fmt.Sprintf("%v", v))
}
} else {
v, ok := d.GetOkExists(m)
if ok {
return fmt.Sprintf("%v", v)
}
}
return ""
}

View File

@ -0,0 +1,229 @@
---
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file in
# .github/CONTRIBUTING.md.
#
# ----------------------------------------------------------------------------
layout: "google"
page_title: "Google: google_logging_metric"
sidebar_current: "docs-google-logging-metric"
description: |-
Logs-based metric can also be used to extract values from logs and create a a distribution
of the values.
---
# google\_logging\_metric
Logs-based metric can also be used to extract values from logs and create a a distribution
of the values. The distribution records the statistics of the extracted values along with
an optional histogram of the values as specified by the bucket options.
To get more information about Metric, see:
* [API documentation](https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/create)
* How-to Guides
* [Official Documentation](https://cloud.google.com/logging/docs/apis)
<div class = "oics-button" style="float: right; margin: 0 0 -15px">
<a href="https://console.cloud.google.com/cloudshell/open?cloudshell_git_repo=https%3A%2F%2Fgithub.com%2Fterraform-google-modules%2Fdocs-examples.git&cloudshell_working_dir=logging_metric_basic&cloudshell_image=gcr.io%2Fgraphite-cloud-shell-images%2Fterraform%3Alatest&open_in_editor=main.tf&cloudshell_print=.%2Fmotd&cloudshell_tutorial=.%2Ftutorial.md" target="_blank">
<img alt="Open in Cloud Shell" src="//gstatic.com/cloudssh/images/open-btn.svg" style="max-height: 44px; margin: 32px auto; max-width: 100%;">
</a>
</div>
## Example Usage - Logging Metric Basic
```hcl
resource "google_logging_metric" "logging_metric" {
name = "my-(custom)/metric"
filter = "resource.type=gae_app AND severity>=ERROR"
metric_descriptor {
metric_kind = "DELTA"
value_type = "DISTRIBUTION"
labels {
key = "mass"
value_type = "STRING"
description = "amount of matter"
}
}
value_extractor = "EXTRACT(jsonPayload.request)"
label_extractors = { "mass": "EXTRACT(jsonPayload.request)" }
bucket_options {
linear_buckets {
num_finite_buckets = 3
width = 1
offset = 1
}
}
}
```
## Argument Reference
The following arguments are supported:
* `name` -
(Required)
The client-assigned metric identifier. Examples - "error_count", "nginx/requests".
Metric identifiers are limited to 100 characters and can include only the following
characters A-Z, a-z, 0-9, and the special characters _-.,+!*',()%/. The forward-slash
character (/) denotes a hierarchy of name pieces, and it cannot be the first character
of the name.
* `filter` -
(Required)
An advanced logs filter (https://cloud.google.com/logging/docs/view/advanced-filters) which
is used to match log entries.
* `metric_descriptor` -
(Required)
The metric descriptor associated with the logs-based metric. Structure is documented below.
The `metric_descriptor` block supports:
* `value_type` -
(Required)
Whether the measurement is an integer, a floating-point number, etc.
Some combinations of metricKind and valueType might not be supported.
* `metric_kind` -
(Required)
Whether the metric records instantaneous values, changes to a value, etc.
Some combinations of metricKind and valueType might not be supported.
* `labels` -
(Optional)
The set of labels that can be used to describe a specific instance of this metric type. For
example, the appengine.googleapis.com/http/server/response_latencies metric type has a label
for the HTTP response code, response_code, so you can look at latencies for successful responses
or just for responses that failed. Structure is documented below.
The `labels` block supports:
* `key` -
(Required)
The label key.
* `description` -
(Optional)
A human-readable description for the label.
* `value_type` -
(Optional)
The type of data that can be assigned to the label.
- - -
* `description` -
(Optional)
A description of this metric, which is used in documentation. The maximum length of the
description is 8000 characters.
* `label_extractors` -
(Optional)
A map from a label key string to an extractor expression which is used to extract data from a log
entry field and assign as the label value. Each label key specified in the LabelDescriptor must
have an associated extractor expression in this map. The syntax of the extractor expression is
the same as for the valueExtractor field.
* `value_extractor` -
(Optional)
A valueExtractor is required when using a distribution logs-based metric to extract the values to
record from a log entry. Two functions are supported for value extraction - EXTRACT(field) or
REGEXP_EXTRACT(field, regex). The argument are 1. field - The name of the log entry field from which
the value is to be extracted. 2. regex - A regular expression using the Google RE2 syntax
(https://github.com/google/re2/wiki/Syntax) with a single capture group to extract data from the specified
log entry field. The value of the field is converted to a string before applying the regex. It is an
error to specify a regex that does not include exactly one capture group.
* `bucket_options` -
(Optional)
The bucketOptions are required when the logs-based metric is using a DISTRIBUTION value type and it
describes the bucket boundaries used to create a histogram of the extracted values. Structure is documented below.
* `project` - (Optional) The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
The `bucket_options` block supports:
* `linear_buckets` -
(Optional)
Specifies a linear sequence of buckets that all have the same width (except overflow and underflow).
Each bucket represents a constant absolute uncertainty on the specific value in the bucket. Structure is documented below.
* `exponential_buckets` -
(Optional)
Specifies an exponential sequence of buckets that have a width that is proportional to the value of
the lower bound. Each bucket represents a constant relative uncertainty on a specific value in the bucket. Structure is documented below.
* `explicit` -
(Optional)
Specifies a set of buckets with arbitrary widths. Structure is documented below.
The `linear_buckets` block supports:
* `num_finite_buckets` -
(Optional)
Must be greater than 0.
* `width` -
(Optional)
Must be greater than 0.
* `offset` -
(Optional)
Lower bound of the first bucket.
The `exponential_buckets` block supports:
* `num_finite_buckets` -
(Optional)
Must be greater than 0.
* `growth_factor` -
(Optional)
Must be greater than 1.
* `scale` -
(Optional)
Must be greater than 0.
The `explicit` block supports:
* `bounds` -
(Optional)
The values must be monotonically increasing.
## Timeouts
This resource provides the following
[Timeouts](/docs/configuration/resources.html#timeouts) configuration options:
- `create` - Default is 4 minutes.
- `update` - Default is 4 minutes.
- `delete` - Default is 4 minutes.
## Import
Metric can be imported using any of these accepted formats:
```
$ terraform import google_logging_metric.default {{name}}
```
-> If you're importing a resource with beta features, make sure to include `-provider=google-beta`
as an argument so that Terraform uses the correct provider to import your resource.