Add region choice to dataflow jobs (#1979)

Fixes #1600
This commit is contained in:
Mikaël Gibert 2018-09-07 19:48:33 +02:00 committed by Dana Hoffman
parent 62cee9bcea
commit af3e7375b9
2 changed files with 128 additions and 3 deletions

View File

@ -54,6 +54,12 @@ func resourceDataflowJob() *schema.Resource {
ForceNew: true,
},
"region": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"max_workers": &schema.Schema{
Type: schema.TypeInt,
Optional: true,
@ -100,6 +106,12 @@ func resourceDataflowJobCreate(d *schema.ResourceData, meta interface{}) error {
if err != nil {
return err
}
region, err := getRegion(d, config)
if err != nil {
return err
}
params := expandStringMap(d, "parameters")
env := dataflow.RuntimeEnvironment{
@ -115,7 +127,7 @@ func resourceDataflowJobCreate(d *schema.ResourceData, meta interface{}) error {
Environment: &env,
}
job, err := config.clientDataflow.Projects.Templates.Create(project, &request).Do()
job, err := createJob(config, project, region, &request)
if err != nil {
return err
}
@ -132,9 +144,14 @@ func resourceDataflowJobRead(d *schema.ResourceData, meta interface{}) error {
return err
}
region, err := getRegion(d, config)
if err != nil {
return err
}
id := d.Id()
job, err := config.clientDataflow.Projects.Jobs.Get(project, id).Do()
job, err := getJob(config, project, region, id)
if err != nil {
return handleNotFoundError(err, d, fmt.Sprintf("Dataflow job %s", id))
}
@ -161,6 +178,11 @@ func resourceDataflowJobDelete(d *schema.ResourceData, meta interface{}) error {
return err
}
region, err := getRegion(d, config)
if err != nil {
return err
}
id := d.Id()
requestedState, err := mapOnDelete(d.Get("on_delete").(string))
if err != nil {
@ -171,7 +193,7 @@ func resourceDataflowJobDelete(d *schema.ResourceData, meta interface{}) error {
RequestedState: requestedState,
}
_, err = config.clientDataflow.Projects.Jobs.Update(project, id, job).Do()
_, err = updateJob(config, project, region, id, job)
if err != nil {
if gerr, err_ok := err.(*googleapi.Error); !err_ok {
// If we have an error and it's not a google-specific error, we should go ahead and return.
@ -216,3 +238,24 @@ func mapOnDelete(policy string) (string, error) {
return "", fmt.Errorf("Invalid `on_delete` policy: %s", policy)
}
}
func createJob(config *Config, project string, region string, request *dataflow.CreateJobFromTemplateRequest) (*dataflow.Job, error) {
if region == "" {
return config.clientDataflow.Projects.Templates.Create(project, request).Do()
}
return config.clientDataflow.Projects.Locations.Templates.Create(project, region, request).Do()
}
func getJob(config *Config, project string, region string, id string) (*dataflow.Job, error) {
if region == "" {
return config.clientDataflow.Projects.Jobs.Get(project, id).Do()
}
return config.clientDataflow.Projects.Locations.Jobs.Get(project, region, id).Do()
}
func updateJob(config *Config, project string, region string, id string, job *dataflow.Job) (*dataflow.Job, error) {
if region == "" {
return config.clientDataflow.Projects.Jobs.Update(project, id, job).Do()
}
return config.clientDataflow.Projects.Locations.Jobs.Update(project, region, id, job).Do()
}

View File

@ -27,6 +27,24 @@ func TestAccDataflowJobCreate(t *testing.T) {
})
}
func TestAccDataflowJobRegionCreate(t *testing.T) {
t.Parallel()
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckDataflowJobRegionDestroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: testAccDataflowJobRegion,
Check: resource.ComposeTestCheckFunc(
testAccDataflowJobRegionExists(
"google_dataflow_job.big_data"),
),
},
},
})
}
func testAccCheckDataflowJobDestroy(s *terraform.State) error {
for _, rs := range s.RootModule().Resources {
if rs.Type != "google_dataflow_job" {
@ -47,6 +65,26 @@ func testAccCheckDataflowJobDestroy(s *terraform.State) error {
return nil
}
func testAccCheckDataflowJobRegionDestroy(s *terraform.State) error {
for _, rs := range s.RootModule().Resources {
if rs.Type != "google_dataflow_job" {
continue
}
config := testAccProvider.Meta().(*Config)
job, err := config.clientDataflow.Projects.Locations.Jobs.Get(config.Project, "us-central1", rs.Primary.ID).Do()
if job != nil {
if _, ok := dataflowTerminalStatesMap[job.CurrentState]; !ok {
return fmt.Errorf("Job still present")
}
} else if err != nil {
return err
}
}
return nil
}
func testAccDataflowJobExists(n string) resource.TestCheckFunc {
return func(s *terraform.State) error {
rs, ok := s.RootModule().Resources[n]
@ -67,6 +105,26 @@ func testAccDataflowJobExists(n string) resource.TestCheckFunc {
}
}
func testAccDataflowJobRegionExists(n string) resource.TestCheckFunc {
return func(s *terraform.State) error {
rs, ok := s.RootModule().Resources[n]
if !ok {
return fmt.Errorf("Not found: %s", n)
}
if rs.Primary.ID == "" {
return fmt.Errorf("No ID is set")
}
config := testAccProvider.Meta().(*Config)
_, err := config.clientDataflow.Projects.Locations.Jobs.Get(config.Project, "us-central1", rs.Primary.ID).Do()
if err != nil {
return fmt.Errorf("Job does not exist")
}
return nil
}
}
var testAccDataflowJob = fmt.Sprintf(`
resource "google_storage_bucket" "temp" {
name = "dfjob-test-%s-temp"
@ -89,3 +147,27 @@ resource "google_dataflow_job" "big_data" {
on_delete = "cancel"
}`, acctest.RandString(10), acctest.RandString(10), getTestProjectFromEnv())
var testAccDataflowJobRegion = fmt.Sprintf(`
resource "google_storage_bucket" "temp" {
name = "dfjob-test-%s-temp"
force_destroy = true
}
resource "google_dataflow_job" "big_data" {
name = "dfjob-test-%s"
template_gcs_path = "gs://dataflow-templates/wordcount/template_file"
temp_gcs_location = "${google_storage_bucket.temp.url}"
parameters {
inputFile = "gs://dataflow-samples/shakespeare/kinglear.txt"
output = "${google_storage_bucket.temp.url}/output"
}
region = "us-central1"
zone = "us-central1-c"
project = "%s"
on_delete = "cancel"
}`, acctest.RandString(10), acctest.RandString(10), getTestProjectFromEnv())