From 4d02d3722feb028854571d0abe185b72a8e8cbfd Mon Sep 17 00:00:00 2001 From: Max Dymond Date: Thu, 11 Apr 2024 09:06:57 +0100 Subject: [PATCH] Add managed identity / no-shared key support --- README.md | 24 ++++- create_resources.sh | 4 + create_resources_nosharedkey.sh | 137 ++++++++++++++++++++++++ function_app.py | 43 +++----- requirements.txt | 1 + rg.bicep | 147 +++++++++++--------------- rg_funcapp.bicep | 178 ++++++++++++++++++++++++++++++++ waitfortrigger.sh | 19 ++++ 8 files changed, 437 insertions(+), 116 deletions(-) create mode 100755 create_resources_nosharedkey.sh create mode 100644 rg_funcapp.bicep create mode 100755 waitfortrigger.sh diff --git a/README.md b/README.md index ebafbfb..1f893dc 100644 --- a/README.md +++ b/README.md @@ -6,6 +6,8 @@ an Azure Function App to keep it up to date. For use with # Getting Started +## Basic usage + To create a new Debian package repository with an Azure Function App, run ```bash @@ -30,20 +32,38 @@ overridden by passing the `-l` parameter: ./create_resources.sh -l uksouth ``` +## No shared-key access / Managed Identities + +By default, the storage container that is created has shared-key access enabled. +You can instead create a deployment that uses Managed Identities, but this +requires Docker (as the function application and its dependencies must be +compiled and packed appropriately). + +To create a new Debian package repository which uses Managed Identities, run + +```bash +./create_resources_nosharedkey.sh [-s ] [-l ] +``` + +This creates an additional blob container (`python`) in the storage account to +hold the compiled function application zip file; the function application is +run directly from that zip file. + # Design The function app works as follows: - It is triggered whenever a `.deb` file is uploaded to the monitored blob storage container - - It can be triggered by both blob storage triggers and by Event Grid triggers + - It is triggered by an Event Grid trigger. - It iterates over all `.deb` files and looks for a matching `.package` file. - If that file does not exist, it is created - The `.deb` file is downloaded and the control information is extracted - The hash values for the file are calculated (MD5sum, SHA1, SHA256) - All of this information is added to the `.package` file - All `.package` files are iterated over, downloaded, and combined into a - single `Package` file, which is then uploaded. + single `Package` file, which is then uploaded. A `Packages.xz` file is also + created. As the function app works on a Consumption plan it may take up to 10 minutes for the function app to trigger and regenerate the package information. In practice, diff --git a/create_resources.sh b/create_resources.sh index a97db85..0041831 100755 --- a/create_resources.sh +++ b/create_resources.sh @@ -56,6 +56,7 @@ az deployment group create \ --name "${DEPLOYMENT_NAME}" \ --resource-group "${RESOURCE_GROUP_NAME}" \ --template-file ./rg.bicep \ + --parameter use_shared_keys=true \ ${PARAMETERS} \ --output none echo "Resources created" @@ -84,6 +85,9 @@ echo "Function app code deployed" # Clean up rm -f build/function_app.zip +# Wait for the event trigger to exist +./waitfortrigger.sh "${FUNCTION_APP_NAME}" "${RESOURCE_GROUP_NAME}" + # Now run the second deployment script to create the eventgrid subscription. # This must be run after the function app is deployed, because the ARM ID of the # eventGridTrigger function doesn't exist until after deployment. diff --git a/create_resources_nosharedkey.sh b/create_resources_nosharedkey.sh new file mode 100755 index 0000000..6cf7fee --- /dev/null +++ b/create_resources_nosharedkey.sh @@ -0,0 +1,137 @@ +#!/bin/bash +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +set -euo pipefail + +# This script uses Bicep scripts to create a function app and a storage account, +# then uses the Azure CLI to deploy the function code to that app. +# Uses managed identities. +# Requires Docker to be installed and running. + +LOCATION="eastus" + +function usage() +{ + echo "Usage: $0 [-l ] [-s ] " + echo + echo "By default, location is '${LOCATION}'" + echo "A list of location names can be obtained by running 'az account list-locations --query \"[].name\"'" +} + +PARAMETERS="" + +while getopts ":l:s:" opt; do + case "${opt}" in + l) + LOCATION=${OPTARG} + ;; + s) + PARAMETERS="${PARAMETERS} --parameter suffix=${OPTARG}" + ;; + *) + usage + exit 0 + ;; + esac +done +shift $((OPTIND-1)) + +# Takes parameters of the resource group name. +RESOURCE_GROUP_NAME=${1:-} + +if [[ -z ${RESOURCE_GROUP_NAME} ]] +then + echo "Requires a resource group name" + echo + usage + exit 1 +fi + +# Pack the application using the core-tools tooling +# Should generate a file called function_app.zip +docker run -it \ + --rm \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v $PWD:/function_app \ + -w /function_app \ + mcr.microsoft.com/azure-functions/python:4-python3.11-core-tools \ + bash -c "func pack --python --build-native-deps" + +echo "Ensuring resource group ${RESOURCE_GROUP_NAME} exists" +az group create --name "${RESOURCE_GROUP_NAME}" --location "${LOCATION}" --output none + +# Create the resources +DEPLOYMENT_NAME="${RESOURCE_GROUP_NAME}" +echo "Creating resources in resource group ${RESOURCE_GROUP_NAME}" +az deployment group create \ + --name "${DEPLOYMENT_NAME}" \ + --resource-group "${RESOURCE_GROUP_NAME}" \ + --template-file ./rg.bicep \ + --parameter use_shared_keys=false \ + ${PARAMETERS} \ + --output none +echo "Resources created" + +# There's some output in the deployment that we need. +APT_SOURCES=$(az deployment group show -n "${DEPLOYMENT_NAME}" -g "${RESOURCE_GROUP_NAME}" --output tsv --query properties.outputs.apt_sources.value) +STORAGE_ACCOUNT=$(az deployment group show -n "${DEPLOYMENT_NAME}" -g "${RESOURCE_GROUP_NAME}" --output tsv --query properties.outputs.storage_account.value) +PACKAGE_CONTAINER=$(az deployment group show -n "${DEPLOYMENT_NAME}" -g "${RESOURCE_GROUP_NAME}" --output tsv --query properties.outputs.package_container.value) +PYTHON_CONTAINER=$(az deployment group show -n "${DEPLOYMENT_NAME}" -g "${RESOURCE_GROUP_NAME}" --output tsv --query properties.outputs.python_container.value) + +# Upload the function app code to the python container +echo "Uploading function app code to ${PYTHON_CONTAINER}" +az storage blob upload \ + --auth-mode login \ + --account-name "${STORAGE_ACCOUNT}" \ + --container-name "${PYTHON_CONTAINER}" \ + --file function_app.zip \ + --name function_app.zip \ + --overwrite \ + --output none + +# Create the function app +echo "Creating function app in resource group ${RESOURCE_GROUP_NAME}" +az deployment group create \ + --name "${DEPLOYMENT_NAME}_func" \ + --resource-group "${RESOURCE_GROUP_NAME}" \ + --template-file ./rg_funcapp.bicep \ + --parameter use_shared_keys=false \ + ${PARAMETERS} \ + --output none +echo "Function App created" + +# Get the generated function app name +FUNCTION_APP_NAME=$(az deployment group show -n "${DEPLOYMENT_NAME}_func" -g "${RESOURCE_GROUP_NAME}" --output tsv --query properties.outputs.function_app_name.value) + +# Clean up +rm -f function_app.zip + +# Wait for the event trigger to exist +./waitfortrigger.sh "${FUNCTION_APP_NAME}" "${RESOURCE_GROUP_NAME}" + +# Now run the second deployment script to create the eventgrid subscription. +# This must be run after the function app is deployed, because the ARM ID of the +# eventGridTrigger function doesn't exist until after deployment. +az deployment group create \ + --name "${DEPLOYMENT_NAME}_eg" \ + --resource-group "${RESOURCE_GROUP_NAME}" \ + --template-file ./rg_add_eventgrid.bicep \ + ${PARAMETERS} \ + --output none + +# Report to the user how to use this repository +echo "The repository has been created!" +echo "You can upload packages to the container '${PACKAGE_CONTAINER}' in the storage account '${STORAGE_ACCOUNT}'." +echo "The function app '${FUNCTION_APP_NAME}' will be triggered by new packages" +echo "in that container and regenerate the repository." +echo +echo "To download packages, you need to have apt-transport-blob installed on your machine." +echo "Next, add this line to /etc/apt/sources.list:" +echo +echo " ${APT_SOURCES}" +echo +echo "Ensure that you have a valid Azure credential, (either by logging in with 'az login' or " +echo "by setting the AZURE_CLIENT_ID, AZURE_CLIENT_SECRET, and AZURE_TENANT_ID environment variables)." +echo "That credential must have 'Storage Blob Data Reader' access to the storage account." +echo "Then you can use apt-get update and apt-get install as usual." diff --git a/function_app.py b/function_app.py index c4761df..f9e94fa 100644 --- a/function_app.py +++ b/function_app.py @@ -13,6 +13,7 @@ import azure.functions as func import pydpkg from azure.storage.blob import ContainerClient +from azure.identity import DefaultAzureCredential app = func.FunctionApp() log = logging.getLogger("apt-package-function") @@ -128,10 +129,21 @@ class RepoManager: def __init__(self) -> None: """Create a RepoManager object.""" - self.connection_string = os.environ["AzureWebJobsStorage"] - self.container_client = ContainerClient.from_connection_string( - self.connection_string, CONTAINER_NAME - ) + if "AzureWebJobsStorage" in os.environ: + # Use a connection string to access the storage account + self.connection_string = os.environ["AzureWebJobsStorage"] + self.container_client = ContainerClient.from_connection_string( + conn_str=self.connection_string, container_name=CONTAINER_NAME + ) + else: + # Use credentials to access the container. Used when shared-key + # access is disabled. + self.credential = DefaultAzureCredential() + self.container_client = ContainerClient.from_container_url( + container_url=os.environ["BLOB_CONTAINER_URL"], + credential=self.credential, + ) + self.package_file = self.container_client.get_blob_client("Packages") self.package_file_xz = self.container_client.get_blob_client("Packages.xz") @@ -184,29 +196,6 @@ def create_packages(self) -> None: log.info("Created Packages.xz file") -@app.blob_trigger( - arg_name="newfile", - path=f"{CONTAINER_NAME}/{{name}}.deb", - connection="AzureWebJobsStorage", -) -def blob_trigger(newfile: func.InputStream): - """Process a new blob in the container.""" - # Have to use %s for the length because .length is optional - log.info( - "Python blob trigger function processed blob; Name: %s, Blob Size: %s bytes", - newfile.name, - newfile.length, - ) - if not newfile.name or not newfile.name.endswith(".deb"): - log.info("Not a Debian package: %s", newfile.name) - return - - rm = RepoManager() - rm.check_metadata() - rm.create_packages() - log.info("Done processing %s", newfile.name) - - @app.function_name(name="eventGridTrigger") @app.event_grid_trigger(arg_name="event") def event_grid_trigger(event: func.EventGridEvent): diff --git a/requirements.txt b/requirements.txt index fa47559..4ea1cb7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,5 +6,6 @@ # Manually managing azure-functions-worker may cause unexpected issues azure-functions +azure-identity azure-storage-blob pydpkg diff --git a/rg.bicep b/rg.bicep index ecb9cbf..0d06482 100644 --- a/rg.bicep +++ b/rg.bicep @@ -13,6 +13,9 @@ param location string = resourceGroup().location @description('The name of the function app to use') param appName string = 'debfnapp${suffix}' +@description('Using shared keys or managed identity') +param use_shared_keys bool = true + // Storage account names must be between 3 and 24 characters, and unique, so // generate a unique name. @description('The name of the storage account to use') @@ -21,15 +24,23 @@ param storage_account_name string = 'debianrepo${suffix}' // Choose the package container name. This will be passed to the function app. var package_container_name = 'packages' -// The version of Python to run with -var python_version = '3.11' +// Create a container for the Python code +var python_container_name = 'python' -// The name of the hosting plan, application insights, and function app -var functionAppName = appName -var hostingPlanName = appName -var applicationInsightsName = appName +// Create a UAMI for the deployment script to access the storage account +resource uami 'Microsoft.ManagedIdentity/userAssignedIdentities@2023-01-31' = { + name: 'uami${suffix}' + location: location +} // Create a storage account for both package storage and function app storage +var common_storage_properties = { + publicNetworkAccess: 'Enabled' + allowBlobPublicAccess: false +} +var storage_properties = use_shared_keys ? common_storage_properties : union(common_storage_properties, { + allowSharedKeyAccess: false +}) resource storageAccount 'Microsoft.Storage/storageAccounts@2023-01-01' = { name: storage_account_name location: location @@ -37,10 +48,7 @@ resource storageAccount 'Microsoft.Storage/storageAccounts@2023-01-01' = { sku: { name: 'Standard_LRS' } - properties: { - publicNetworkAccess: 'Enabled' - allowBlobPublicAccess: false - } + properties: storage_properties } // Create a container for the packages @@ -54,12 +62,41 @@ resource packageContainer 'Microsoft.Storage/storageAccounts/blobServices/contai properties: { } } +resource pythonContainer 'Microsoft.Storage/storageAccounts/blobServices/containers@2023-01-01' = if (!use_shared_keys) { + parent: defBlobServices + name: python_container_name + properties: { + } +} + +// Grant the UAMI Storage Blob Data Contributor on the storage account +@description('This is the built-in Storage Blob Data Contributor role. See https://learn.microsoft.com/en-gb/azure/role-based-access-control/built-in-roles#storage-blob-data-contributor') +resource storageBlobDataContributor 'Microsoft.Authorization/roleDefinitions@2022-04-01' existing = { + scope: subscription() + name: 'ba92f5b4-2d11-453d-a403-e96b0029c9fe' +} +resource storageBlobDataContributorRoleAssignment 'Microsoft.Authorization/roleAssignments@2022-04-01' = { + name: guid(storageAccount.id, uami.id, storageBlobDataContributor.id) + scope: storageAccount + properties: { + principalId: uami.properties.principalId + roleDefinitionId: storageBlobDataContributor.id + principalType: 'ServicePrincipal' + } +} // Create a default Packages file if it doesn't exist using a deployment script resource deploymentScript 'Microsoft.Resources/deploymentScripts@2023-08-01' = { name: 'createPackagesFile${suffix}' + dependsOn: [storageBlobDataContributorRoleAssignment] location: location kind: 'AzureCLI' + identity: { + type: 'UserAssigned' + userAssignedIdentities: { + '${uami.id}': {} + } + } properties: { azCliVersion: '2.28.0' retentionInterval: 'PT1H' @@ -72,97 +109,33 @@ resource deploymentScript 'Microsoft.Resources/deploymentScripts@2023-08-01' = { name: 'AZURE_BLOB_CONTAINER' value: packageContainer.name } - { - name: 'AZURE_STORAGE_KEY' - secureValue: storageAccount.listKeys().keys[0].value - } ] // This script preserves the Packages file if it exists and creates it // if it does not. scriptContent: ''' -az storage blob download -f Packages -c "${AZURE_BLOB_CONTAINER}" -n Packages || echo "No existing file" +az storage blob download --auth-mode login -f Packages -c "${AZURE_BLOB_CONTAINER}" -n Packages || echo "No existing file" touch Packages -az storage blob upload -f Packages -c "${AZURE_BLOB_CONTAINER}" -n Packages +az storage blob upload --auth-mode login -f Packages -c "${AZURE_BLOB_CONTAINER}" -n Packages ''' cleanupPreference: 'OnSuccess' } } -// Create a hosting plan for the function app -resource hostingPlan 'Microsoft.Web/serverfarms@2023-01-01' = { - name: hostingPlanName - location: location - sku: { - name: 'Y1' - tier: 'Dynamic' - } - properties: { - reserved: true - } -} - -// Create application insights -resource applicationInsights 'Microsoft.Insights/components@2020-02-02' = { - name: applicationInsightsName - location: location - kind: 'web' - properties: { - Application_Type: 'web' - Request_Source: 'rest' - } -} - -// Create the function app. -resource functionApp 'Microsoft.Web/sites@2023-01-01' = { - name: functionAppName - location: location - kind: 'functionapp,linux' - properties: { - serverFarmId: hostingPlan.id - siteConfig: { - linuxFxVersion: 'Python|${python_version}' - pythonVersion: python_version - appSettings: [ - { - name: 'AzureWebJobsStorage' - value: 'DefaultEndpointsProtocol=https;AccountName=${storageAccount.name};EndpointSuffix=${environment().suffixes.storage};AccountKey=${storageAccount.listKeys().keys[0].value}' - } - { - name: 'WEBSITE_CONTENTAZUREFILECONNECTIONSTRING' - value: 'DefaultEndpointsProtocol=https;AccountName=${storageAccount.name};EndpointSuffix=${environment().suffixes.storage};AccountKey=${storageAccount.listKeys().keys[0].value}' - } - { - name: 'WEBSITE_CONTENTSHARE' - value: toLower(functionAppName) - } - { - name: 'FUNCTIONS_EXTENSION_VERSION' - value: '~4' - } - { - name: 'APPINSIGHTS_INSTRUMENTATIONKEY' - value: applicationInsights.properties.InstrumentationKey - } - { - name: 'FUNCTIONS_WORKER_RUNTIME' - value: 'python' - } - // Pass the blob container name to the function app - this is the - // container which is monitored for new packages. - { - name: 'BLOB_CONTAINER' - value: packageContainer.name - } - ] - ftpsState: 'FtpsOnly' - minTlsVersion: '1.2' - } - httpsOnly: true +// Create the function app directly, if shared key support is enabled +module funcapp 'rg_funcapp.bicep' = if (use_shared_keys) { + name: 'funcapp${suffix}' + params: { + location: location + storage_account_name: storageAccount.name + appName: appName + use_shared_keys: true + suffix: suffix } } // Create the apt sources string for using apt-transport-blob output apt_sources string = 'deb [trusted=yes] blob://${storageAccount.name}.blob.core.windows.net/${packageContainer.name} /' -output function_app_name string = functionApp.name +output function_app_name string = use_shared_keys ? funcapp.outputs.function_app_name : '' output storage_account string = storageAccount.name output package_container string = packageContainer.name +output python_container string = use_shared_keys ? '' : pythonContainer.name diff --git a/rg_funcapp.bicep b/rg_funcapp.bicep new file mode 100644 index 0000000..06747af --- /dev/null +++ b/rg_funcapp.bicep @@ -0,0 +1,178 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +// This file creates a function app +targetScope = 'resourceGroup' + +@description('Unique suffix') +param suffix string = uniqueString(resourceGroup().id) + +@description('The location of the resources') +param location string = resourceGroup().location + +@description('The name of the function app to use') +param appName string = 'debfnapp${suffix}' + +@description('Using shared keys or managed identity') +param use_shared_keys bool = true + +// Storage account names must be between 3 and 24 characters, and unique, so +// generate a unique name. +@description('The name of the storage account to use') +param storage_account_name string = 'debianrepo${suffix}' + +// Choose the package container name. This will be passed to the function app. +var package_container_name = 'packages' + +// Create a container for the Python code +var python_container_name = 'python' + +// The version of Python to run with +var python_version = '3.11' + +// The name of the hosting plan, application insights, and function app +var functionAppName = appName +var hostingPlanName = appName +var applicationInsightsName = appName + +// Existing resources +resource uami 'Microsoft.ManagedIdentity/userAssignedIdentities@2023-01-31' existing = { + name: 'uami${suffix}' +} +resource storageAccount 'Microsoft.Storage/storageAccounts@2023-01-01' existing = { + name: storage_account_name +} +resource defBlobServices 'Microsoft.Storage/storageAccounts/blobServices@2023-01-01' existing = { + parent: storageAccount + name: 'default' +} +resource packageContainer 'Microsoft.Storage/storageAccounts/blobServices/containers@2023-01-01' existing = { + parent: defBlobServices + name: package_container_name +} +resource pythonContainer 'Microsoft.Storage/storageAccounts/blobServices/containers@2023-01-01' existing = if (!use_shared_keys) { + parent: defBlobServices + name: python_container_name +} +@description('This is the built-in Storage Blob Data Contributor role. See https://learn.microsoft.com/en-gb/azure/role-based-access-control/built-in-roles#storage-blob-data-contributor') +resource storageBlobDataContributor 'Microsoft.Authorization/roleDefinitions@2022-04-01' existing = { + scope: subscription() + name: 'ba92f5b4-2d11-453d-a403-e96b0029c9fe' +} +resource storageBlobDataContributorRoleAssignment 'Microsoft.Authorization/roleAssignments@2022-04-01' existing = { + name: guid(storageAccount.id, uami.id, storageBlobDataContributor.id) + scope: storageAccount +} + +// Create a hosting plan for the function app +resource hostingPlan 'Microsoft.Web/serverfarms@2023-01-01' = { + name: hostingPlanName + location: location + sku: { + name: 'Y1' + tier: 'Dynamic' + } + properties: { + reserved: true + } +} + +// Create application insights +resource applicationInsights 'Microsoft.Insights/components@2020-02-02' = { + name: applicationInsightsName + location: location + kind: 'web' + properties: { + Application_Type: 'web' + Request_Source: 'rest' + } +} + +// Construct the app settings +var common_settings = [ + { + name: 'FUNCTIONS_EXTENSION_VERSION' + value: '~4' + } + { + name: 'APPINSIGHTS_INSTRUMENTATIONKEY' + value: applicationInsights.properties.InstrumentationKey + } + { + name: 'FUNCTIONS_WORKER_RUNTIME' + value: 'python' + } + // Pass the blob container name to the function app - this is the + // container which is monitored for new packages. + { + name: 'BLOB_CONTAINER' + value: packageContainer.name + } +] +// Construct the application settings +// If using shared keys, include the shared key settings. Otherwise, include the managed identity settings. +var app_settings = use_shared_keys ? concat(common_settings, [ + { + name: 'AzureWebJobsStorage' + value: 'DefaultEndpointsProtocol=https;AccountName=${storageAccount.name};EndpointSuffix=${environment().suffixes.storage};AccountKey=${storageAccount.listKeys().keys[0].value}' + } + { + name: 'WEBSITE_CONTENTAZUREFILECONNECTIONSTRING' + value: 'DefaultEndpointsProtocol=https;AccountName=${storageAccount.name};EndpointSuffix=${environment().suffixes.storage};AccountKey=${storageAccount.listKeys().keys[0].value}' + } + { + name: 'WEBSITE_CONTENTSHARE' + value: toLower(functionAppName) + } +]) : concat(common_settings, [ + { + name: 'AzureWebJobsStorage__accountName' + value: storageAccount.name + } + { + name: 'WEBSITE_RUN_FROM_PACKAGE' + value: 'https://${storageAccount.name}.blob.${environment().suffixes.storage}/${pythonContainer.name}/function_app.zip' + } + // Pass the container URL to the function app for the `from_container_url` call. + { + name: 'BLOB_CONTAINER_URL' + value: 'https://${storageAccount.name}.blob.${environment().suffixes.storage}/${packageContainer.name}/' + } +]) + +// Create the function app. +resource functionApp 'Microsoft.Web/sites@2023-01-01' = { + name: functionAppName + dependsOn: [storageBlobDataContributorRoleAssignment] + location: location + identity: { + type: 'SystemAssigned' + } + kind: 'functionapp,linux' + properties: { + serverFarmId: hostingPlan.id + siteConfig: { + linuxFxVersion: 'Python|${python_version}' + pythonVersion: python_version + appSettings: app_settings + ftpsState: 'FtpsOnly' + minTlsVersion: '1.2' + } + httpsOnly: true + } +} + +// Grant the Function App Storage Blob Data Contributor on the storage account +// so it can access the package. Only necessary when using managed identity. +resource funcAppRole 'Microsoft.Authorization/roleAssignments@2022-04-01' = if (!use_shared_keys) { + name: guid(storageAccount.id, functionApp.id, storageBlobDataContributor.id) + scope: storageAccount + properties: { + principalId: functionApp.identity.principalId + roleDefinitionId: storageBlobDataContributor.id + principalType: 'ServicePrincipal' + } +} + +// Output useful values +output function_app_name string = functionApp.name diff --git a/waitfortrigger.sh b/waitfortrigger.sh new file mode 100755 index 0000000..d33ac26 --- /dev/null +++ b/waitfortrigger.sh @@ -0,0 +1,19 @@ +#!/bin/bash +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +set -euo pipefail + +FUNCTION_APP_NAME=$1 +RESOURCE_GROUP_NAME=$2 + +echo "Waiting for event trigger to exist for ${FUNCTION_APP_NAME}" + +FUNCTIONS=$(az functionapp function list -n ${FUNCTION_APP_NAME} -g ${RESOURCE_GROUP_NAME} --query "[].name" --output tsv) +echo "App functions (${FUNCTION_APP_NAME}): ${FUNCTIONS}" +while [[ "${FUNCTIONS}" != *"eventGridTrigger"* ]] +do + sleep 5 + FUNCTIONS=$(az functionapp function list -n ${FUNCTION_APP_NAME} -g ${RESOURCE_GROUP_NAME} --query "[].name" --output tsv) + echo "App functions (${FUNCTION_APP_NAME}): ${FUNCTIONS}" +done