在terraform中配置数据块提供程序时出现问题,(Azure Databricks)

dhxwm5r4  于 2023-08-07  发布在  其他
关注(0)|答案(1)|浏览(100)

我有2个地形管道在ADO。地形版本iss - v1.5.3。
第一个管道-在第一个ADO管道中使用terraform创建了Azure DataBrick资源、Azure DataLake和Azure Connector for DataBricks。它还将“Storage Blob Data Contributor”角色分配给“Azure DataLake resource”中的“Azure Connector for DataBricks resource”(Terraform将状态文件存储在main.tf中定义的存储容器中)。
第二个管道-第二个ADO管道被认为是登录到数据砖和创建MetaStore使用的资源创建管道第一。由于管道的状态文件首先存储在容器中,所以我可以使用remote_state. tf读取和使用管道中的所有资源。我的问题是第二个管道,我无法正确配置提供程序“数据块”{}。

这是第一条管道的地形代码。

### create Resource Group
module "resource_group" {
  source = "./modules/Module-ResourceGroup-v1"

  resource_group_name = var.resource_group_name
  location            = var.location
}

# create Azure Data Bricks Service
module "adb" {
  
  source = "./modules/Module-DataBrickWorkSpace-v1"
  
  databrick_workspace_name    = var.databrick_workspace_name
  resource_group_name         = module.resource_group.resource_group_name
  resource_group_location     = module.resource_group.resource_group_location
  managed_resource_group_name = var.managed_resource_group_name
}

# create Azure Data Lake
module "adc-uc-storage" {
  source                      = "./modules/Module-StorageForUnityCatalog-v1"

  uc_storage_account_name     = var.uc_storage_account_name
  resource_group_name         = module.resource_group.resource_group_name
  resource_group_location     = module.resource_group.resource_group_location
  uc_container_name           = var.uc_container_name
}

# creating access connector
module "adb-access-connector" {
  source                      = "./modules/Module-DataBrickAccessConnector-v1"

  access_connector_name       = var.access_connector_name
  location                    = module.resource_group.resource_group_location 
  resource_group_name         = module.resource_group.resource_group_name
  storage_account_id          = module.adc-uc-storage.storage_account_id
}

output "databricks_workspace_resource_id" {
  description = " Resource ID of databrick workspace"
  value = module.adb.databricks_workspace_resource_id
}

output "databrick_workspace_url" {
  description = "URL of databrick workspace"
  value = module.adb.databricks_workspace_url
}

output "databrick_host" {
  description = "The host part of URL of the Databricks workspace"
  value = module.adb.databricks_host
}

output "databricks_workspace_id" {
  description = "The ID of the Databricks workspace"
  value = module.adb.databricks_workspace_id
}
######################################################
output "resource_group_name" {
  description = "name of the adb resource group"
  value = module.resource_group.resource_group_name
}

output "resource_group_id" {
  description = "id of resource group"
  value = module.resource_group.resource_group_id
}

output "resource_group_location" {
 description = "location of resource group"
 value = module.resource_group.resource_group_location
}
###################################################
output "storage_account_id" {
  description = "The ID of the storage account"
  value       = module.adc-uc-storage.storage_account_id
}

output "storage_account_name" {
  description = "The name of the storage account"
  value       = module.adc-uc-storage.storage_account_name
}

output "storage_container_name" {
  description = "The name of the storage container"
  value       = module.adc-uc-storage.storage_container_name
}
#
output "storage_account_principal_id" {
  description = "The principal ID for the storage account"
  value       = module.adc-uc-storage.storage_account_principal_id
}
#
#############################################################
#
output "access_connector_unity_catalog_principal_id" {
  description = "The identity of the Databricks Access Connector"
  value       = module.adb-access-connector.access_connector_unity_catalog_principal_id
}
#
output "access_connector_unity_catalog_resource_id" {
  description = "The ID of the Databricks Access Connector"
  value       = module.adb-access-connector.access_connector_unity_catalog_resource_id
}
#
output "access_connector_name" {
  description = "The name of the Databricks Access Connector"
  value       = module.adb-access-connector.access_connector_name
}
#
output "access_connector_resource_group_name" {
  description = "The name of the resource group where the Databricks Access Connector is created"
  value       = module.adb-access-connector.access_connector_resource_group_name
}
#
output "access_connector_location" {
  description = "The location of the Databricks Access Connector"
  value       = module.adb-access-connector.access_connector_location

字符串

这是第二条管道的地形代码

#remote_state.tf
data "terraform_remote_state" "dev" {
  backend = "azurerm"
  config = {
    resource_group_name  = "pipeline1-storageblob-resourcegroup"
    storage_account_name = "pipeline1storageblob"
    access_key           = "SECRET"
    # this is primary access key of storage account

    container_name = "pipeline1-storageblob-container"
    key            = "pipeline1-state-file"
  }
}

#main.tf 
terraform {
  required_providers {
    azurerm = {
      source = "hashicorp/azurerm"
      version = "~>3.0"
    }
    databricks = {
      source = "databricks/databricks"
       version = "1.21.0"
    }
  }
}

provider "azurerm" {
  features {}
  subscription_id   = "XXXXXXXXXXXXXXXXXXXXXXX"
  tenant_id         = "YYYYYYYYYYYYYYYYYYYYYYY"
  client_id         = "CCCCCCCCCCCCCCCCCCCCCCC"
  client_secret     = "SSSSSSSSSSSSSSSSSSSSSSS"
}

provider "databricks" {
  azure_workspace_resource_id = data.terraform_remote_state.dev.outputs.databricks_workspace_resource_id # this is able to see from output of pipeline 1
  azure_client_id             = "CCCCCCCCCCCCCCCCCCCCCCC"
  azure_client_secret         = "SSSSSSSSSSSSSSSSSSSSSSS"
  azure_tenant_id             = "YYYYYYYYYYYYYYYYYYYYYYY"
}
#resource.tf
resource "databricks_metastore" "metastore" {
  name          = "metastore-allenv"
  storage_root  = format(
                          "abfss://%s@%s.dfs.core.windows.net/",
                          data.terraform_remote_state.dev.outputs.storage_container_name,
                          data.terraform_remote_state.dev.outputs.storage_account_name
                         )
  force_destroy = true
}

这是我得到的错误

│ Error: cannot create metastore: Only account admin can create metastores.
│
│   with databricks_metastore.metastore,
│   on resources.tf line 1, in resource "databricks_metastore" "metastore":
│    1: resource "databricks_metastore" "metastore" {
│╵

验证:

My Service principal is a part of Global Administrator - in Active Directory.
Also Service Principal has following roles assigned at subscription level.
1. owner
2. Storage Blob Data Owner 
3. User Access Administrator

友情链接:

https://learn.microsoft.com/en-us/azure/databricks/data-governance/unity-catalog/automate
https://registry.terraform.io/providers/databricks/databricks/latest/docs/guides/unity-catalog-azure


创建Meta Store。

c8ib6hqw

c8ib6hqw1#

Error: cannot create metastore: Only account admin can create metastores.
│   with databricks_metastore.metastore,
│   on resources.tf line 1, in resource "databricks_metastore" "metastore":
│    1: resource "databricks_metastore" "metastore" {
│╵

字符串
您必须是帐户管理员角色才能创建大型商店。
按照此MS文档如何启用您的第一个帐户管理员
要解决此问题,请按以下步骤将帐户管理员角色分配给服务负责人。
1.以全局管理员角色登录accounts.azuredatabricks.net
1.进入用户管理- >服务主体>添加服务主体(提供您的SP名称和ID)
1.导航到角色并分配帐户管理员角色。
x1c 0d1x的数据

Terraform代码创建megastore。

terraform {
  required_providers {
    databricks = {
      source = "databricks/databricks"
    }
  }
}

provider "azurerm" {
  features {}
  subscription_id   = "xxxx-7af2-43f1-bd66-12e77ac05818"
  tenant_id         = "xxxxx-b0e9-cxxxx-a944-627017451367"
  client_id         = "b2ba5bf1djdjdjdjdd-8aae-ed3a4b006e70"
  client_secret     = "gchdhdhdbjdjdjdjdnnj"
}

data "azurerm_resource_group" "this" {
  name = "Sri"
}

resource "azurerm_storage_account" "unity_catalog" {
  name                     = "storagedatabrickss"
  resource_group_name      = data.azurerm_resource_group.this.name
  location                 = data.azurerm_resource_group.this.location
  tags                     = data.azurerm_resource_group.this.tags
  account_tier             = "Standard"
  account_replication_type = "GRS"
  is_hns_enabled           = true
}

resource "azurerm_storage_container" "unity_catalog" {
  name                  = "sample-container"
  storage_account_name  = azurerm_storage_account.unity_catalog.name
  container_access_type = "private"
}

resource "databricks_metastore" "metastore" {
  name          = "samplestore"
  storage_root  = format("abfss://%s@%s.dfs.core.windows.net/",
    azurerm_storage_container.unity_catalog.name,
    azurerm_storage_account.unity_catalog.name)
  force_destroy = true
}


请参阅通过terraform为Azure Databricks Unity Catalog创建Meta存储失败-Stack Link通过Kombajn zboąowy,

相关问题