diff --git a/hooks/postprovision.ps1 b/hooks/postprovision.ps1 index 0caabb4..49f1f35 100644 --- a/hooks/postprovision.ps1 +++ b/hooks/postprovision.ps1 @@ -64,6 +64,7 @@ az account set -s $env:AZURE_SUBSCRIPTION_ID # Configuration # ===================================================== $gpuOperatorVersion = if ($env:GPU_OPERATOR_VERSION) { $env:GPU_OPERATOR_VERSION } else { "v25.10.01" } +$cognitiveServicesRoleAssignmentFailed = $false # ===================================================== # Step 1: Get AKS credentials @@ -325,7 +326,9 @@ az deployment group create ` deepstreamNodeSelectorValue="$env:AZURE_DEEPSTREAM_NODE_SELECTOR_VALUE" ` inferenceNodeSelectorValue="$env:AZURE_INFERENCE_NODE_SELECTOR_VALUE" ` inferenceAgentEnabled="$inferenceAgentEnabled" ` - mediaStreamerEnabled="$mediaStreamerEnabled" + mediaStreamerEnabled="$mediaStreamerEnabled" ` + agentsRuntimeAzureOpenAIBaseUrl="$env:AGENTS_RUNTIME_AZURE_OPENAI_BASE_URL" ` + agentsRuntimeAzureOpenAIModel="$env:AGENTS_RUNTIME_AZURE_OPENAI_MODEL" Log-Success "Video Indexer Arc extension deployed" @@ -338,6 +341,14 @@ $principalId = (az k8s-extension show ` --query "identity.principalId" -o tsv 2>$null) $accountResourceId = $env:AZURE_VIDEO_INDEXER_ACCOUNT_RESOURCE_ID +$foundryAccountResourceId = $env:AI_FOUNDRY_ACCOUNT_RESOURCE_ID + +if (-not $foundryAccountResourceId -and $env:AI_FOUNDRY_ACCOUNT_NAME) { + $foundryAccountResourceId = (az cognitiveservices account show ` + --name "$env:AI_FOUNDRY_ACCOUNT_NAME" ` + --resource-group "$env:AZURE_RESOURCE_GROUP" ` + --query "id" -o tsv 2>$null) +} if (-not $principalId) { Log-Error "Extension managed identity principalId not found. Cannot assign permissions." @@ -377,6 +388,42 @@ else { Log-Success "Permissions assigned to Arc extension managed identity" } } + + if (-not $foundryAccountResourceId) { + Log-Info "AI Foundry account resource ID not found. Skipping 'Cognitive Services OpenAI Contributor' role assignment." + } + else { + Log-Info "Adding 'Cognitive Services OpenAI Contributor' role assignment on AI Foundry account..." + + $existingOpenAiAssignment = (az role assignment list ` + --assignee $principalId ` + --role "Cognitive Services OpenAI Contributor" ` + --scope $foundryAccountResourceId ` + --query "[0].id" -o tsv 2>$null) + + if ($existingOpenAiAssignment) { + Log-Success "Cognitive Services OpenAI Contributor role assignment already exists. Skipping." + } + else { + $openAiRoleErr = $null + az role assignment create ` + --assignee-object-id $principalId ` + --assignee-principal-type ServicePrincipal ` + --role "Cognitive Services OpenAI Contributor" ` + --scope $foundryAccountResourceId 2>&1 | ForEach-Object { + if ($_ -match 'ERROR|WARN') { $openAiRoleErr = $_ } + } + + if ($LASTEXITCODE -ne 0) { + Log-Error "Failed to create Cognitive Services OpenAI Contributor role assignment: $openAiRoleErr" + Log-Error "Agent inference scenarios may not function correctly without this permission." + $cognitiveServicesRoleAssignmentFailed = $true + } + else { + Log-Success "Cognitive Services OpenAI Contributor role assigned on AI Foundry account" + } + } + } } @@ -603,6 +650,15 @@ if ($env:AI_FOUNDRY_ACCOUNT_NAME) { Write-KeyValue "AI Foundry Hub" $env:AI_FOUNDRY_ACCOUNT_NAME Write-KeyValue "AI Foundry Model" $env:AI_FOUNDRY_MODEL_DEPLOYMENT Write-KeyValue "AI Endpoint" $env:AI_FOUNDRY_AI_SERVICES_ENDPOINT + if ($foundryAccountResourceId) { + Write-KeyValue "AI Foundry Resource ID" $foundryAccountResourceId + } + if ($cognitiveServicesRoleAssignmentFailed) { + Write-Host "" + Write-Host "⚠️ WARNING: Cognitive Services OpenAI Contributor role assignment failed." -ForegroundColor Yellow + Write-Host " Please manually grant this role to the VI extension managed identity on the AI Foundry account." -ForegroundColor Yellow + Write-Host " Agent inference scenarios will not function without this permission." -ForegroundColor Yellow + } } if ($principalId -and $cameraId) { $portalUrl = "https://www.videoindexer.ai/accounts/$env:AZURE_VIDEO_INDEXER_ACCOUNT_ID/extensions/$principalId/cameras/$cameraId/live-stream?feature.VideoAssistant=true&feature.LiveActivity=true" diff --git a/hooks/postprovision.sh b/hooks/postprovision.sh old mode 100755 new mode 100644 index e7c0be8..19ba146 --- a/hooks/postprovision.sh +++ b/hooks/postprovision.sh @@ -295,7 +295,9 @@ az deployment group create \ deepstreamNodeSelectorValue="$AZURE_DEEPSTREAM_NODE_SELECTOR_VALUE" \ inferenceNodeSelectorValue="$AZURE_INFERENCE_NODE_SELECTOR_VALUE" \ inferenceAgentEnabled="$INFERENCE_AGENT_ENABLED" \ - mediaStreamerEnabled="$MEDIA_STREAMER_ENABLED" + mediaStreamerEnabled="$MEDIA_STREAMER_ENABLED" \ + agentsRuntimeAzureOpenAIBaseUrl="$AGENTS_RUNTIME_AZURE_OPENAI_BASE_URL" \ + agentsRuntimeAzureOpenAIModel="$AGENTS_RUNTIME_AZURE_OPENAI_MODEL" log_success "Video Indexer Arc extension deployed" log_info "Assigning permissions to Arc extension managed identity..." @@ -307,6 +309,14 @@ PRINCIPAL_ID=$(az k8s-extension show \ --query "identity.principalId" -o tsv 2>/dev/null | tr -d '\r' || true) ACCOUNT_RESOURCE_ID="$AZURE_VIDEO_INDEXER_ACCOUNT_RESOURCE_ID" +FOUNDRY_ACCOUNT_RESOURCE_ID="${AI_FOUNDRY_ACCOUNT_RESOURCE_ID:-}" + +if [ -z "$FOUNDRY_ACCOUNT_RESOURCE_ID" ] && [ -n "${AI_FOUNDRY_ACCOUNT_NAME:-}" ]; then + FOUNDRY_ACCOUNT_RESOURCE_ID=$(az cognitiveservices account show \ + --name "$AI_FOUNDRY_ACCOUNT_NAME" \ + --resource-group "$AZURE_RESOURCE_GROUP" \ + --query "id" -o tsv 2>/dev/null || true) +fi if [ -z "$PRINCIPAL_ID" ]; then log_error "Extension managed identity principalId not found. Cannot assign permissions." @@ -339,6 +349,34 @@ else log_error "The VI extension may not function correctly without this permission." fi fi + + if [ -z "$FOUNDRY_ACCOUNT_RESOURCE_ID" ]; then + log_info "AI Foundry account resource ID not found. Skipping 'Cognitive Services OpenAI Contributor' role assignment." + else + log_info "Adding 'Cognitive Services OpenAI Contributor' role assignment on AI Foundry account..." + + EXISTING_OPENAI_ASSIGNMENT=$(az role assignment list \ + --assignee "$PRINCIPAL_ID" \ + --role "Cognitive Services OpenAI Contributor" \ + --scope "$FOUNDRY_ACCOUNT_RESOURCE_ID" \ + --query "[0].id" -o tsv 2>/dev/null || true) + + if [ -n "$EXISTING_OPENAI_ASSIGNMENT" ]; then + log_success "Cognitive Services OpenAI Contributor role assignment already exists. Skipping." + else + OPENAI_ROLE_ERR="" + if OPENAI_ROLE_ERR=$(az role assignment create \ + --assignee-object-id "$PRINCIPAL_ID" \ + --assignee-principal-type ServicePrincipal \ + --role "Cognitive Services OpenAI Contributor" \ + --scope "$FOUNDRY_ACCOUNT_RESOURCE_ID" 2>&1); then + log_success "Cognitive Services OpenAI Contributor role assigned on AI Foundry account" + else + log_error "Failed to create Cognitive Services OpenAI Contributor role assignment: $OPENAI_ROLE_ERR" + log_error "Agent inference scenarios may not function correctly without this permission." + fi + fi + fi fi # ===================================================== @@ -503,6 +541,9 @@ if [ -n "${AI_FOUNDRY_ACCOUNT_NAME:-}" ]; then write_key_value "AI Foundry Hub" "$AI_FOUNDRY_ACCOUNT_NAME" write_key_value "AI Foundry Model" "${AI_FOUNDRY_MODEL_DEPLOYMENT:-n/a}" write_key_value "AI Endpoint" "${AI_FOUNDRY_AI_SERVICES_ENDPOINT:-n/a}" + if [ -n "${FOUNDRY_ACCOUNT_RESOURCE_ID:-}" ]; then + write_key_value "AI Foundry Resource ID" "$FOUNDRY_ACCOUNT_RESOURCE_ID" + fi fi if [ -n "${PRINCIPAL_ID:-}" ] && [ -n "${CAMERA_ID:-}" ]; then PORTAL_URL="https://www.videoindexer.ai/accounts/${AZURE_VIDEO_INDEXER_ACCOUNT_ID}/extensions/${PRINCIPAL_ID}/cameras/${CAMERA_ID}/live-stream?feature.VideoAssistant=true&feature.LiveActivity=true" diff --git a/infra/main.bicep b/infra/main.bicep index 0018425..6daf9cb 100644 --- a/infra/main.bicep +++ b/infra/main.bicep @@ -198,7 +198,8 @@ output AZURE_DEEPSTREAM_NODE_SELECTOR_VALUE string = aks.outputs.deepstreamWorkl output AZURE_INFERENCE_NODE_SELECTOR_VALUE string = aks.outputs.inferenceWorkloadLabelValue output AI_FOUNDRY_ENDPOINT string = createFoundryProject ? aiFoundry.outputs.endpoint : '' output AI_FOUNDRY_AI_SERVICES_ENDPOINT string = createFoundryProject ? aiFoundry.outputs.aiServicesEndpoint : '' -output AI_FOUNDRY_MODEL_DEPLOYMENT string = createFoundryProject ? aiFoundry.outputs.modelDeploymentName : '' output AI_FOUNDRY_ACCOUNT_NAME string = createFoundryProject ? aiFoundry.outputs.accountName : '' output AI_FOUNDRY_PROJECT_NAME string = createFoundryProject ? aiFoundry.outputs.projectName : '' +output AGENTS_RUNTIME_AZURE_OPENAI_BASE_URL string = createFoundryProject ? aiFoundry.outputs.agentsRuntimeAzureOpenAIBaseUrl : '' +output AI_MODEL_NAME string = aiModelName output MEDIA_STREAMER_ENABLED bool = mediaStreamerEnabled diff --git a/infra/modules/ai-foundry.bicep b/infra/modules/ai-foundry.bicep index c3e831c..8bb07bf 100644 --- a/infra/modules/ai-foundry.bicep +++ b/infra/modules/ai-foundry.bicep @@ -142,3 +142,9 @@ output modelDeploymentName string = modelDeployment.name @description('AI Services account resource ID') output accountId string = aiAccount.id + +@description('Azure OpenAI base URL for agents runtime') +output agentsRuntimeAzureOpenAIBaseUrl string = 'https://${name}.cognitiveservices.azure.com/' + +@description('Azure OpenAI model for agents runtime') +output agentsRuntimeAzureOpenAIModel string = modelName diff --git a/infra/modules/vi-extension.bicep b/infra/modules/vi-extension.bicep index cee6841..dc17fc2 100644 --- a/infra/modules/vi-extension.bicep +++ b/infra/modules/vi-extension.bicep @@ -56,6 +56,12 @@ param liveSummarizationEnabled bool = false @description('Enable the inference agent (should be disabled when a Foundry project handles model serving)') param inferenceAgentEnabled bool = false +@description('Azure OpenAI base URL for agents runtime') +param agentsRuntimeAzureOpenAIBaseUrl string = '' + +@description('Azure OpenAI model for agents runtime') +param agentsRuntimeAzureOpenAIModel string = '' + // Base config properties @description('Storage class for persistent volumes') param storageClass string = 'azurefile-csi-premium' @@ -78,6 +84,8 @@ var baseConfigProperties = { 'ViAi.deepstream.nodeSelector.workload': deepstreamNodeSelectorValue 'ViAi.inference.nodeSelector.workload': inferenceNodeSelectorValue 'ViAi.LiveSummarization.enabled': string(liveSummarizationEnabled) + 'agentsRuntime.azureOpenAI.baseUrl': agentsRuntimeAzureOpenAIBaseUrl + 'agentsRuntime.azureOpenAI.model': agentsRuntimeAzureOpenAIModel } resource connectedCluster 'Microsoft.Kubernetes/connectedClusters@2024-01-01' existing = {