error:
Error: cannot create cluster: authentication is not configured for provider.. Please check https://registry.terraform.io/providers/databrickslabs/databricks/latest/docs#authentication for details
│
│ with module.databricks.databricks_cluster.cluster1,
│ on ..\Modules\DataBricks\databricks.tf line 34, in resource "databricks_cluster" "cluster1":
│ 34: resource "databricks_cluster" "cluster1" {
│
╵
╷
│ Error: cannot create notebook: authentication is not configured for provider.. Please check https://registry.terraform.io/providers/databrickslabs/databricks/latest/docs#authentication for details
│
│ with module.databricks.databricks_notebook.notebook1,
│ on ..\Modules\DataBricks\databricks.tf line 56, in resource "databricks_notebook" "notebook1":
│ 56: resource "databricks_notebook" "notebook1" {
│
╵
╷
│ Error: cannot create notebook: authentication is not configured for provider.. Please check https://registry.terraform.io/providers/databrickslabs/databricks/latest/docs#authentication for details
│
│ with module.databricks.databricks_notebook.notebook2,
│ on ..\Modules\DataBricks\databricks.tf line 62, in resource "databricks_notebook" "notebook2":
│ 62: resource "databricks_notebook" "notebook2" {
main.tf
terraform {
required_providers {
azurerm = {
source = "hashicorp/azurerm"
version = "~> 2.33"
}
databricks = {
source = "databrickslabs/databricks"
version = "~> 0.5"
}
random = {
source = "hashicorp/random"
version = "~> 2.2"
}
}
}
provider "azurerm" {
features {}
client_id = "edf7acb9-8b15-4a79-9fc1-de6f1bd5abf5"
client_secret = "fkR7Q~8HnWJ1yspQqJLg.MhvhDHLGgeZsteNb"
tenant_id = "30dc81e2-b6b0-42a0-8497-6dec86c3e52a"
subscription_id = "30d95dd8-b67c-4580-ac15-80d201767fe6"
}
provider "databricks" {
}
modules.tf
locals {
env = "dev"
sub = "302af5ae-a1ad-4b48-bac2-c0181e86705d"
rgname = "${local.env}-freetrail-rg"
rglocation = "eastus"
vnetname = "freetrailvnet"
subnetname1 = "subnet1"
}
module "rg" {
source = "../Modules/ResourceGroup"
rgname = local.rgname
rglocation = local.rglocation
}
module "vnet" {
source = "../Modules/VNet"
rgname = local.rgname
rglocation = local.rglocation
vnetname = "freetrailvnet"
address_space = ["10.0.0.0/8"]
subnetname1 = "subnet1"
subnetprefix1 = ["10.0.1.0/24"]
subnetname2 = "subnet2"
subnetprefix2 = ["10.0.2.0/24"]
nsgname = "freetrailnsg"
depends_on = [
module.rg
]
}
module "adf" {
source = "../Modules/ADF"
rgname = local.rgname
rglocation = local.rglocation
adfname = "dev-freetrail-adf"
irname = "IntegrationRuntime01"
# prefix = "${local.env}-adf"
# vnetname = "freetrailvnet" #module.vnet.virtual_network_name
# subnetname1 = "subnet1" #module.vnet.subnetname1
# subnet_id = "/subscriptions/${local.sub}/resourceGroups/${local.rgname}/providers/Microsoft.Network/virtualNetworks/${local.vnetname}/subnets/${local.subnetname1}"
depends_on = [module.rg,module.keyvault]
}
# module "irvm" {
# source = "../Modules/IntegrationRuntimeVM"
# prefix = "${local.env}-adf"
# rgname = local.rgname
# rglocation = local.rglocation
# vnetname = "freetrailvnet" #module.vnet.virtual_network_name
# subnetname1 = "subnet1" #module.vnet.subnetname1
# subnet_id = "/subscriptions/${local.sub}/resourceGroups/${local.rgname}/providers/Microsoft.Network/virtualNetworks/${local.vnetname}/subnets/${local.subnetname1}"
# depends_on = [module.vnet,module.adf]
# }
module "storage" {
source = "../Modules/Storage"
strname = "adfstorage2345"
rgname = local.rgname
rglocation = local.rglocation
depends_on = [module.rg,module.adf]
}
module "keyvault" {
source = "../Modules/KeyVault"
keyvaultname = "devkvasdfasdf"
rgname = local.rgname
rglocation = local.rglocation
depends_on = [module.rg]
}
module "sqlserver" {
source = "../Modules/Sql"
sqlservername = "devsqlsrvasdfasdf"
sqldbname = "devdbasdfasdf"
rgname = local.rgname
rglocation = local.rglocation
depends_on = [
module.rg,module.keyvault
]
}
module "databricks" {
source = "../Modules/DataBricks"
rgname = local.rgname
rglocation = local.rglocation
dtbrickname = "${local.env}-databricks-testsdfasf"
depends_on = [module.rg]
}
module "cluster" {
source = "../Modules/Cluster"
cluster1name = "Shared Autoscaling"
spark_version = "7.3.x-scala2.12"
node_type_id = "Standard_DS3_v2"
notebook_path1 = "/Shared/App1/python_notebook1"
notebook_path2 = "/Shared/App1/python_notebook2"
depends_on = [module.databricks]
}
workspace.tf
data "azurerm_client_config" "current" {
}
resource "azurerm_databricks_workspace" "myworkspace" {
location = var.rglocation
name = var.dtbrickname
resource_group_name = var.rgname
sku = "trial"
managed_resource_group_name = "${var.dtbrickname}-workspace-rg"
}
cluster.tf
terraform {
required_providers {
azurerm = {
source = "hashicorp/azurerm"
version = "~> 2.33"
}
databricks = {
source = "databrickslabs/databricks"
version = "~> 0.5"
}
random = {
source = "hashicorp/random"
version = "~> 2.2"
}
}
}
# data "databricks_node_type" "smallest" {
# local_disk = true
# }
# data "databricks_spark_version" "latest_lts" {
# long_term_support = true
# }
# resource "databricks_cluster" "shared_autoscaling" {
# cluster_name = var.cluster_name #"Shared Autoscaling"
# spark_version = data.databricks_spark_version.latest_lts.id
# node_type_id = data.databricks_node_type.smallest.id
# autotermination_minutes = 15
# autoscale {
# min_workers = 1
# max_workers = 3
# }
# }
resource "databricks_cluster" "this" {
cluster_name = var.cluster1name #"Cluster-1"
spark_version = var.spark_version #data.databricks_spark_version.latest_lts.id
node_type_id = var.node_type_id #data.databricks_node_type.smallest.id
autotermination_minutes = 15
autoscale {
min_workers = 1
max_workers = 3
}
library {
pypi {
package = "pyodbc"
}
}
library {
pypi {
package = "databricks-cli"
}
}
}
resource "databricks_notebook" "notebook1" {
content_base64 = base64encode("print('Welcome to your Python notebook1 App1')")
path = var.notebook_path1
language = "PYTHON"
format = "SOURCE"
}
resource "databricks_notebook" "notebook2" {
content_base64 = base64encode("print('Welcome to your Python notebook2 App2')")
path = var.notebook_path2
language = "PYTHON"
format = "SOURCE"
}