mirror of
https://github.com/opentofu/opentofu.git
synced 2025-02-11 08:05:33 -06:00
provider/google: BigQuery Dataset (#13436)
* Vendor BigQuery * Add resource * Add tests * Add documentation * Remove named import * Remove `retain_on_delete` * Fix formatting
This commit is contained in:
parent
e667411cc5
commit
9bd50a1219
@ -13,6 +13,7 @@ import (
|
||||
"golang.org/x/oauth2"
|
||||
"golang.org/x/oauth2/google"
|
||||
"golang.org/x/oauth2/jwt"
|
||||
"google.golang.org/api/bigquery/v2"
|
||||
"google.golang.org/api/cloudbilling/v1"
|
||||
"google.golang.org/api/cloudresourcemanager/v1"
|
||||
"google.golang.org/api/compute/v1"
|
||||
@ -42,6 +43,7 @@ type Config struct {
|
||||
clientSqlAdmin *sqladmin.Service
|
||||
clientIAM *iam.Service
|
||||
clientServiceMan *servicemanagement.APIService
|
||||
clientBigQuery *bigquery.Service
|
||||
}
|
||||
|
||||
func (c *Config) loadAndValidate() error {
|
||||
@ -169,6 +171,13 @@ func (c *Config) loadAndValidate() error {
|
||||
}
|
||||
c.clientBilling.UserAgent = userAgent
|
||||
|
||||
log.Printf("[INFO] Instantiating Google Cloud BigQuery Client...")
|
||||
c.clientBigQuery, err = bigquery.New(client)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
c.clientBigQuery.UserAgent = userAgent
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
31
builtin/providers/google/import_bigquery_dataset_test.go
Normal file
31
builtin/providers/google/import_bigquery_dataset_test.go
Normal file
@ -0,0 +1,31 @@
|
||||
package google
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/acctest"
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
func TestAccBigQueryDataset_importBasic(t *testing.T) {
|
||||
resourceName := "google_bigquery_dataset.test"
|
||||
datasetID := fmt.Sprintf("tf_test_%s", acctest.RandString(10))
|
||||
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckBigQueryDatasetDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: testAccBigQueryDataset(datasetID),
|
||||
},
|
||||
|
||||
{
|
||||
ResourceName: resourceName,
|
||||
ImportState: true,
|
||||
ImportStateVerify: true,
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
@ -55,6 +55,7 @@ func Provider() terraform.ResourceProvider {
|
||||
},
|
||||
|
||||
ResourcesMap: map[string]*schema.Resource{
|
||||
"google_bigquery_dataset": resourceBigQueryDataset(),
|
||||
"google_compute_autoscaler": resourceComputeAutoscaler(),
|
||||
"google_compute_address": resourceComputeAddress(),
|
||||
"google_compute_backend_service": resourceComputeBackendService(),
|
||||
|
285
builtin/providers/google/resource_bigquery_dataset.go
Normal file
285
builtin/providers/google/resource_bigquery_dataset.go
Normal file
@ -0,0 +1,285 @@
|
||||
package google
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
"github.com/hashicorp/terraform/helper/validation"
|
||||
"google.golang.org/api/bigquery/v2"
|
||||
"google.golang.org/api/googleapi"
|
||||
)
|
||||
|
||||
func resourceBigQueryDataset() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Create: resourceBigQueryDatasetCreate,
|
||||
Read: resourceBigQueryDatasetRead,
|
||||
Update: resourceBigQueryDatasetUpdate,
|
||||
Delete: resourceBigQueryDatasetDelete,
|
||||
Importer: &schema.ResourceImporter{
|
||||
State: schema.ImportStatePassthrough,
|
||||
},
|
||||
Schema: map[string]*schema.Schema{
|
||||
// DatasetId: [Required] A unique ID for this dataset, without the
|
||||
// project name. The ID must contain only letters (a-z, A-Z), numbers
|
||||
// (0-9), or underscores (_). The maximum length is 1,024 characters.
|
||||
"dataset_id": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
ForceNew: true,
|
||||
ValidateFunc: func(v interface{}, k string) (ws []string, errors []error) {
|
||||
value := v.(string)
|
||||
if !regexp.MustCompile(`^[0-9A-Za-z_]+$`).MatchString(value) {
|
||||
errors = append(errors, fmt.Errorf(
|
||||
"%q must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_)", k))
|
||||
}
|
||||
|
||||
if len(value) > 1024 {
|
||||
errors = append(errors, fmt.Errorf(
|
||||
"%q cannot be greater than 1,024 characters", k))
|
||||
}
|
||||
|
||||
return
|
||||
},
|
||||
},
|
||||
|
||||
// ProjectId: [Optional] The ID of the project containing this dataset.
|
||||
"project": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
|
||||
// FriendlyName: [Optional] A descriptive name for the dataset.
|
||||
"friendly_name": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
|
||||
// Description: [Optional] A user-friendly description of the dataset.
|
||||
"description": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
|
||||
// Location: [Experimental] The geographic location where the dataset
|
||||
// should reside. Possible values include EU and US. The default value
|
||||
// is US.
|
||||
"location": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
Default: "US",
|
||||
ValidateFunc: validation.StringInSlice([]string{"US", "EU"}, false),
|
||||
},
|
||||
|
||||
// DefaultTableExpirationMs: [Optional] The default lifetime of all
|
||||
// tables in the dataset, in milliseconds. The minimum value is 3600000
|
||||
// milliseconds (one hour). Once this property is set, all newly-created
|
||||
// tables in the dataset will have an expirationTime property set to the
|
||||
// creation time plus the value in this property, and changing the value
|
||||
// will only affect new tables, not existing ones. When the
|
||||
// expirationTime for a given table is reached, that table will be
|
||||
// deleted automatically. If a table's expirationTime is modified or
|
||||
// removed before the table expires, or if you provide an explicit
|
||||
// expirationTime when creating a table, that value takes precedence
|
||||
// over the default expiration time indicated by this property.
|
||||
"default_table_expiration_ms": {
|
||||
Type: schema.TypeInt,
|
||||
Optional: true,
|
||||
ValidateFunc: func(v interface{}, k string) (ws []string, errors []error) {
|
||||
value := v.(int)
|
||||
if value < 3600000 {
|
||||
errors = append(errors, fmt.Errorf("%q cannot be shorter than 3600000 milliseconds (one hour)", k))
|
||||
}
|
||||
|
||||
return
|
||||
},
|
||||
},
|
||||
|
||||
// Labels: [Experimental] The labels associated with this dataset. You
|
||||
// can use these to organize and group your datasets. You can set this
|
||||
// property when inserting or updating a dataset.
|
||||
"labels": &schema.Schema{
|
||||
Type: schema.TypeMap,
|
||||
Optional: true,
|
||||
Elem: schema.TypeString,
|
||||
},
|
||||
|
||||
// SelfLink: [Output-only] A URL that can be used to access the resource
|
||||
// again. You can use this URL in Get or Update requests to the
|
||||
// resource.
|
||||
"self_link": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
// Etag: [Output-only] A hash of the resource.
|
||||
"etag": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
// CreationTime: [Output-only] The time when this dataset was created,
|
||||
// in milliseconds since the epoch.
|
||||
"creation_time": {
|
||||
Type: schema.TypeInt,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
// LastModifiedTime: [Output-only] The date when this dataset or any of
|
||||
// its tables was last modified, in milliseconds since the epoch.
|
||||
"last_modified_time": {
|
||||
Type: schema.TypeInt,
|
||||
Computed: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func resourceDataset(d *schema.ResourceData, meta interface{}) (*bigquery.Dataset, error) {
|
||||
config := meta.(*Config)
|
||||
|
||||
project, err := getProject(d, config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
dataset := &bigquery.Dataset{
|
||||
DatasetReference: &bigquery.DatasetReference{
|
||||
DatasetId: d.Get("dataset_id").(string),
|
||||
ProjectId: project,
|
||||
},
|
||||
}
|
||||
|
||||
if v, ok := d.GetOk("friendly_name"); ok {
|
||||
dataset.FriendlyName = v.(string)
|
||||
}
|
||||
|
||||
if v, ok := d.GetOk("description"); ok {
|
||||
dataset.Description = v.(string)
|
||||
}
|
||||
|
||||
if v, ok := d.GetOk("location"); ok {
|
||||
dataset.Location = v.(string)
|
||||
}
|
||||
|
||||
if v, ok := d.GetOk("default_table_expiration_ms"); ok {
|
||||
dataset.DefaultTableExpirationMs = int64(v.(int))
|
||||
}
|
||||
|
||||
if v, ok := d.GetOk("labels"); ok {
|
||||
labels := map[string]string{}
|
||||
|
||||
for k, v := range v.(map[string]interface{}) {
|
||||
labels[k] = v.(string)
|
||||
}
|
||||
|
||||
dataset.Labels = labels
|
||||
}
|
||||
|
||||
return dataset, nil
|
||||
}
|
||||
|
||||
func resourceBigQueryDatasetCreate(d *schema.ResourceData, meta interface{}) error {
|
||||
config := meta.(*Config)
|
||||
|
||||
project, err := getProject(d, config)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
dataset, err := resourceDataset(d, meta)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.Printf("[INFO] Creating BigQuery dataset: %s", dataset.DatasetReference.DatasetId)
|
||||
|
||||
res, err := config.clientBigQuery.Datasets.Insert(project, dataset).Do()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.Printf("[INFO] BigQuery dataset %s has been created", res.Id)
|
||||
|
||||
d.SetId(res.Id)
|
||||
|
||||
return resourceBigQueryDatasetRead(d, meta)
|
||||
}
|
||||
|
||||
func resourceBigQueryDatasetParseID(id string) (string, string) {
|
||||
// projectID, datasetID
|
||||
parts := strings.Split(id, ":")
|
||||
return parts[0], parts[1]
|
||||
}
|
||||
|
||||
func resourceBigQueryDatasetRead(d *schema.ResourceData, meta interface{}) error {
|
||||
config := meta.(*Config)
|
||||
|
||||
log.Printf("[INFO] Reading BigQuery dataset: %s", d.Id())
|
||||
|
||||
projectID, datasetID := resourceBigQueryDatasetParseID(d.Id())
|
||||
|
||||
res, err := config.clientBigQuery.Datasets.Get(projectID, datasetID).Do()
|
||||
if err != nil {
|
||||
if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 404 {
|
||||
log.Printf("[WARN] Removing BigQuery dataset %q because it's gone", datasetID)
|
||||
// The resource doesn't exist anymore
|
||||
d.SetId("")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
d.Set("etag", res.Etag)
|
||||
d.Set("labels", res.Labels)
|
||||
d.Set("location", res.Location)
|
||||
d.Set("self_link", res.SelfLink)
|
||||
d.Set("description", res.Description)
|
||||
d.Set("friendly_name", res.FriendlyName)
|
||||
d.Set("creation_time", res.CreationTime)
|
||||
d.Set("last_modified_time", res.LastModifiedTime)
|
||||
d.Set("dataset_id", res.DatasetReference.DatasetId)
|
||||
d.Set("default_table_expiration_ms", res.DefaultTableExpirationMs)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func resourceBigQueryDatasetUpdate(d *schema.ResourceData, meta interface{}) error {
|
||||
config := meta.(*Config)
|
||||
|
||||
dataset, err := resourceDataset(d, meta)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.Printf("[INFO] Updating BigQuery dataset: %s", d.Id())
|
||||
|
||||
projectID, datasetID := resourceBigQueryDatasetParseID(d.Id())
|
||||
|
||||
if _, err = config.clientBigQuery.Datasets.Update(projectID, datasetID, dataset).Do(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return resourceBigQueryDatasetRead(d, meta)
|
||||
}
|
||||
|
||||
func resourceBigQueryDatasetDelete(d *schema.ResourceData, meta interface{}) error {
|
||||
config := meta.(*Config)
|
||||
|
||||
log.Printf("[INFO] Deleting BigQuery dataset: %s", d.Id())
|
||||
|
||||
projectID, datasetID := resourceBigQueryDatasetParseID(d.Id())
|
||||
|
||||
if err := config.clientBigQuery.Datasets.Delete(projectID, datasetID).Do(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
d.SetId("")
|
||||
return nil
|
||||
}
|
112
builtin/providers/google/resource_bigquery_dataset_test.go
Normal file
112
builtin/providers/google/resource_bigquery_dataset_test.go
Normal file
@ -0,0 +1,112 @@
|
||||
package google
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/acctest"
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
func TestAccBigQueryDataset_basic(t *testing.T) {
|
||||
datasetID := fmt.Sprintf("tf_test_%s", acctest.RandString(10))
|
||||
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckBigQueryDatasetDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: testAccBigQueryDataset(datasetID),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckBigQueryDatasetExists(
|
||||
"google_bigquery_dataset.test"),
|
||||
),
|
||||
},
|
||||
|
||||
{
|
||||
Config: testAccBigQueryDatasetUpdated(datasetID),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckBigQueryDatasetExists(
|
||||
"google_bigquery_dataset.test"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func testAccCheckBigQueryDatasetDestroy(s *terraform.State) error {
|
||||
config := testAccProvider.Meta().(*Config)
|
||||
|
||||
for _, rs := range s.RootModule().Resources {
|
||||
if rs.Type != "google_bigquery_dataset" {
|
||||
continue
|
||||
}
|
||||
|
||||
_, err := config.clientBigQuery.Datasets.Get(config.Project, rs.Primary.Attributes["dataset_id"]).Do()
|
||||
if err == nil {
|
||||
return fmt.Errorf("Dataset still exists")
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func testAccCheckBigQueryDatasetExists(n string) resource.TestCheckFunc {
|
||||
return func(s *terraform.State) error {
|
||||
rs, ok := s.RootModule().Resources[n]
|
||||
if !ok {
|
||||
return fmt.Errorf("Not found: %s", n)
|
||||
}
|
||||
|
||||
if rs.Primary.ID == "" {
|
||||
return fmt.Errorf("No ID is set")
|
||||
}
|
||||
|
||||
config := testAccProvider.Meta().(*Config)
|
||||
|
||||
found, err := config.clientBigQuery.Datasets.Get(config.Project, rs.Primary.Attributes["dataset_id"]).Do()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if found.Id != rs.Primary.ID {
|
||||
return fmt.Errorf("Dataset not found")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func testAccBigQueryDataset(datasetID string) string {
|
||||
return fmt.Sprintf(`
|
||||
resource "google_bigquery_dataset" "test" {
|
||||
dataset_id = "%s"
|
||||
friendly_name = "foo"
|
||||
description = "This is a foo description"
|
||||
location = "EU"
|
||||
default_table_expiration_ms = 3600000
|
||||
|
||||
labels {
|
||||
env = "foo"
|
||||
default_table_expiration_ms = 3600000
|
||||
}
|
||||
}`, datasetID)
|
||||
}
|
||||
|
||||
func testAccBigQueryDatasetUpdated(datasetID string) string {
|
||||
return fmt.Sprintf(`
|
||||
resource "google_bigquery_dataset" "test" {
|
||||
dataset_id = "%s"
|
||||
friendly_name = "bar"
|
||||
description = "This is a bar description"
|
||||
location = "EU"
|
||||
default_table_expiration_ms = 7200000
|
||||
|
||||
labels {
|
||||
env = "bar"
|
||||
default_table_expiration_ms = 7200000
|
||||
}
|
||||
}`, datasetID)
|
||||
}
|
2787
vendor/google.golang.org/api/bigquery/v2/bigquery-api.json
generated
vendored
Normal file
2787
vendor/google.golang.org/api/bigquery/v2/bigquery-api.json
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
6690
vendor/google.golang.org/api/bigquery/v2/bigquery-gen.go
generated
vendored
Normal file
6690
vendor/google.golang.org/api/bigquery/v2/bigquery-gen.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
6
vendor/vendor.json
vendored
6
vendor/vendor.json
vendored
@ -3266,6 +3266,12 @@
|
||||
"revision": "4687d739464a2d0af89a25be0318456e0776f3ef",
|
||||
"revisionTime": "2017-02-23T06:09:55Z"
|
||||
},
|
||||
{
|
||||
"checksumSHA1": "FEzQdhqmb6aqGL1lKnjOcUHIGSY=",
|
||||
"path": "google.golang.org/api/bigquery/v2",
|
||||
"revision": "16ab375f94503bfa0d19db78e96bffbe1a34354f",
|
||||
"revisionTime": "2017-03-20T22:51:23Z"
|
||||
},
|
||||
{
|
||||
"checksumSHA1": "I9nlJJGeNBvWlH7FLtRscT6NJhw=",
|
||||
"path": "google.golang.org/api/cloudbilling/v1",
|
||||
|
@ -0,0 +1,80 @@
|
||||
---
|
||||
layout: "google"
|
||||
page_title: "Google: google_bigquery_dataset"
|
||||
sidebar_current: "docs-google-bigquery-dataset"
|
||||
description: |-
|
||||
Creates a dataset resource for Google BigQuery.
|
||||
---
|
||||
|
||||
# google_bigquery_dataset
|
||||
|
||||
Creates a dataset resource for Google BigQuery. For more information see
|
||||
[the official documentation](https://cloud.google.com/bigquery/docs/) and
|
||||
[API](https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets).
|
||||
|
||||
|
||||
## Example Usage
|
||||
|
||||
```hcl
|
||||
resource "google_bigquery_dataset" "default" {
|
||||
dataset_id = "test"
|
||||
friendly_name = "test"
|
||||
description = "This is a test description"
|
||||
location = "EU"
|
||||
default_table_expiration_ms = 3600000
|
||||
|
||||
labels {
|
||||
env = "default"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Argument Reference
|
||||
|
||||
The following arguments are supported:
|
||||
|
||||
* `dataset_id` - (Required) A unique ID for the resource.
|
||||
Changing this forces a new resource to be created.
|
||||
|
||||
* `project` - (Optional) The project in which the resource belongs. If it
|
||||
is not provided, the provider project is used.
|
||||
|
||||
* `friendly_name` - (Optional) A descriptive name for the dataset.
|
||||
|
||||
* `description` - (Optional) A user-friendly description of the dataset.
|
||||
|
||||
* `location` - (Optional) The geographic location where the dataset should reside.
|
||||
|
||||
Possible values include `EU` and `US`. The default value is `US`.
|
||||
|
||||
Changing this forces a new resource to be created.
|
||||
|
||||
* `default_table_expiration_ms` - (Optional) The default lifetime of all
|
||||
tables in the dataset, in milliseconds. The minimum value is 3600000
|
||||
milliseconds (one hour).
|
||||
|
||||
Once this property is set, all newly-created
|
||||
tables in the dataset will have an expirationTime property set to the
|
||||
creation time plus the value in this property, and changing the value
|
||||
will only affect new tables, not existing ones. When the
|
||||
expirationTime for a given table is reached, that table will be
|
||||
deleted automatically. If a table's expirationTime is modified or
|
||||
removed before the table expires, or if you provide an explicit
|
||||
expirationTime when creating a table, that value takes precedence
|
||||
over the default expiration time indicated by this property.
|
||||
|
||||
* `labels` - (Optional) A mapping of labels to assign to the resource.
|
||||
|
||||
## Attributes Reference
|
||||
|
||||
In addition to the arguments listed above, the following computed attributes are
|
||||
exported:
|
||||
|
||||
* `self_link` - The URI of the created resource.
|
||||
|
||||
* `etag` - A hash of the resource.
|
||||
|
||||
* `creation_time` - The time when this dataset was created, in milliseconds since the epoch.
|
||||
|
||||
* `last_modified_time` - The date when this dataset or any of its tables was last modified,
|
||||
in milliseconds since the epoch.
|
@ -10,6 +10,15 @@
|
||||
<a href="/docs/providers/google/index.html">Google Provider</a>
|
||||
</li>
|
||||
|
||||
<li<%= sidebar_current("docs-google-bigquery") %>>
|
||||
<a href="#">Google BigQuery Resources</a>
|
||||
<ul class="nav nav-visible">
|
||||
<li<%= sidebar_current("docs-google-bigquery-dataset") %>>
|
||||
<a href="/docs/providers/google/r/bigquery_dataset.html">google_bigquery_dataset</a>
|
||||
</li>
|
||||
</ul>
|
||||
</li>
|
||||
|
||||
<li<%= sidebar_current("docs-google-datasource") %>>
|
||||
<a href="#">Google Cloud Platform Data Sources</a>
|
||||
<ul class="nav nav-visible">
|
||||
|
Loading…
Reference in New Issue
Block a user