diff --git a/README.md b/README.md index 96b766c..dd3ad8b 100644 --- a/README.md +++ b/README.md @@ -101,8 +101,10 @@ No modules. | Name | Description | Type | Default | Required | |------|-------------|------|---------|:--------:| -| [external\_locations](#input\_external\_locations) | List of object with external location configuration attributes |
list(object({| `[]` | no | -| [storage\_credential](#input\_storage\_credential) | Object with storage credentials configuration attributes |
index = string # Index of instance, for example short name, used later to access exact external location in output map
name = string # Custom whole name of resource
url = string # Path URL in cloud storage
credentials_name = optional(string)
owner = optional(string) # Owner of resource
skip_validation = optional(bool, true) # Suppress validation errors if any & force save the external location
read_only = optional(bool, false) # Indicates whether the external location is read-only.
force_destroy = optional(bool, true)
force_update = optional(bool, true)
comment = optional(string, "External location provisioned by Terraform")
permissions = optional(set(object({
principal = string
privileges = list(string)
})), [])
isolation_mode = optional(string, "ISOLATION_MODE_OPEN")
}))
object({| n/a | yes | +| [cloud](#input\_cloud) | Cloud (azure, aws or gcp) | `string` | n/a | yes | +| [create\_storage\_credential](#input\_create\_storage\_credential) | Boolean flag that determines whether to create storage credential or use the existing one | `bool` | `true` | no | +| [external\_locations](#input\_external\_locations) | List of object with external location configuration attributes |
azure_access_connector_id = optional(string, null) # Azure Databricks Access Connector Id
cloud = optional(string, "")
name = optional(string, null) # Custom whole name of resource
owner = optional(string) # Owner of resource
force_destroy = optional(bool, true)
comment = optional(string, "Managed identity credential provisioned by Terraform")
permissions = optional(set(object({
principal = string
privileges = list(string)
})), [])
isolation_mode = optional(string, "ISOLATION_MODE_OPEN")
})
list(object({| `[]` | no | +| [storage\_credential](#input\_storage\_credential) | Object with storage credentials configuration attributes |
index = string # Index of instance, for example short name, used later to access exact external location in output map
name = string # Custom whole name of resource
url = string # Path URL in cloud storage
credentials_name = optional(string) # If create_storage_credential is set to false, provide id of existing storage credential here
owner = optional(string) # Owner of resource
skip_validation = optional(bool, true) # Suppress validation errors if any & force save the external location
read_only = optional(bool, false) # Indicates whether the external location is read-only.
force_destroy = optional(bool, true)
force_update = optional(bool, true)
comment = optional(string, "External location provisioned by Terraform")
permissions = optional(set(object({
principal = string
privileges = list(string)
})), [])
isolation_mode = optional(string, "ISOLATION_MODE_OPEN")
}))
object({| `{}` | no | ## Outputs diff --git a/main.tf b/main.tf index ebe0b5c..06f5fb2 100644 --- a/main.tf +++ b/main.tf @@ -13,14 +13,14 @@ locals { } resource "databricks_storage_credential" "this" { - count = var.storage_credential.cloud != "" ? 1 : 0 + count = var.create_storage_credential == true ? 1 : 0 name = var.storage_credential.name owner = var.storage_credential.owner # Dynamic block for Azure dynamic "azure_managed_identity" { - for_each = var.storage_credential.cloud == "azure" ? [1] : [] + for_each = var.cloud == "azure" ? [1] : [] content { access_connector_id = var.storage_credential.azure_access_connector_id } @@ -28,17 +28,17 @@ resource "databricks_storage_credential" "this" { # Dynamic block for GCP dynamic "databricks_gcp_service_account" { - for_each = var.storage_credential.cloud == "gcp" ? [1] : [] + for_each = var.cloud == "gcp" ? [1] : [] content {} } force_destroy = var.storage_credential.force_destroy comment = var.storage_credential.comment - isolation_mode = var.storage_credential.cloud == "azure" ? var.storage_credential.isolation_mode : null + isolation_mode = var.cloud == "azure" ? var.storage_credential.isolation_mode : null } resource "databricks_grants" "credential" { - count = var.storage_credential.cloud != "" ? 1 : 0 + count = var.create_storage_credential == true ? (length(var.storage_credential.permissions) != 0 ? 1 : 0) : 0 storage_credential = try(databricks_storage_credential.this[0].id, null) dynamic "grant" { diff --git a/variables.tf b/variables.tf index b0ad60b..40c447f 100644 --- a/variables.tf +++ b/variables.tf @@ -1,8 +1,7 @@ variable "storage_credential" { type = object({ azure_access_connector_id = optional(string, null) # Azure Databricks Access Connector Id - cloud = optional(string, "") - name = optional(string, null) # Custom whole name of resource + name = optional(string, null) # Custom whole name of resource owner = optional(string) # Owner of resource force_destroy = optional(bool, true) comment = optional(string, "Managed identity credential provisioned by Terraform") @@ -13,14 +12,26 @@ variable "storage_credential" { isolation_mode = optional(string, "ISOLATION_MODE_OPEN") }) description = "Object with storage credentials configuration attributes" + default = {} +} + +variable "cloud" { + type = string + description = "Cloud (azure, aws or gcp)" +} + +variable "create_storage_credential" { + type = bool + default = true + description = "Boolean flag that determines whether to create storage credential or use the existing one" } variable "external_locations" { type = list(object({ - index = string # Index of instance, for example short name, used later to access exact external location in output map - name = string # Custom whole name of resource - url = string # Path URL in cloud storage - credentials_name = optional(string) + index = string # Index of instance, for example short name, used later to access exact external location in output map + name = string # Custom whole name of resource + url = string # Path URL in cloud storage + credentials_name = optional(string) # If create_storage_credential is set to false, provide id of existing storage credential here owner = optional(string) # Owner of resource skip_validation = optional(bool, true) # Suppress validation errors if any & force save the external location read_only = optional(bool, false) # Indicates whether the external location is read-only.
azure_access_connector_id = optional(string, null) # Azure Databricks Access Connector Id
name = optional(string, null) # Custom whole name of resource
owner = optional(string) # Owner of resource
force_destroy = optional(bool, true)
comment = optional(string, "Managed identity credential provisioned by Terraform")
permissions = optional(set(object({
principal = string
privileges = list(string)
})), [])
isolation_mode = optional(string, "ISOLATION_MODE_OPEN")
})