diff --git a/.azuredevops/pipelineTemplates/jobs.getModuleTestFiles.yml b/.azuredevops/pipelineTemplates/jobs.getModuleTestFiles.yml index 46229ad481..3f5650fc65 100644 --- a/.azuredevops/pipelineTemplates/jobs.getModuleTestFiles.yml +++ b/.azuredevops/pipelineTemplates/jobs.getModuleTestFiles.yml @@ -39,13 +39,20 @@ jobs: $deploymentTestPaths = Get-ModuleTestFileList @functionInput -Verbose $testTable = @{} - foreach($deploymentTestPath in $deploymentTestPaths) { - $deploymentTestFileName = Split-Path $deploymentTestPath -Leaf - $testTable[$deploymentTestFileName] = @{ - moduleTestFilePath = $deploymentTestPath - } + foreach ($deploymentTestPath in $deploymentTestPaths) { + if((Split-Path (Split-Path $deploymentTestPath) -Leaf) -ne '.test') { + # Using test files in subfolders (e.g. '.test/deploy.test.json') + $deploymentTestFileName = Split-Path (Split-Path $deploymentTestPath -Parent) -Leaf + } else { + # Using parameter files (e.g. '.test/parameters.json') + $deploymentTestFileName = Split-Path $deploymentTestPath -Leaf + } + $testTable[$deploymentTestFileName] += @{ + moduleTestFilePath = $deploymentTestPath + } } $deploymentTestPathsOutput = $testTable | ConvertTo-Json -Compress + Write-Host ('##vso[task.setVariable variable=moduleTests;isOutput=true]{0}' -f ($testTable | ConvertTo-Json -Compress)) Write-Verbose "Module test files: $deploymentTestPathsOutput" -Verbose diff --git a/.azuredevops/pipelineTemplates/jobs.validateModuleDeployment.yml b/.azuredevops/pipelineTemplates/jobs.validateModuleDeployment.yml index c9186bc3df..7297d60866 100644 --- a/.azuredevops/pipelineTemplates/jobs.validateModuleDeployment.yml +++ b/.azuredevops/pipelineTemplates/jobs.validateModuleDeployment.yml @@ -217,6 +217,7 @@ jobs: $moduleTemplateFilePath = Join-Path '$(System.DefaultWorkingDirectory)' '${{ parameters.templateFilePath }}' $moduleTestFilePath = Join-Path '$(System.DefaultWorkingDirectory)' '$(modulePath)' '$(moduleTestFilePath)' + # Determine possible parameters depending on file type if ((Split-Path $moduleTemplateFilePath -Extension) -eq '.bicep') { $moduleTemplateContent = az bicep build --file $moduleTemplateFilePath --stdout | ConvertFrom-Json -AsHashtable $moduleTemplatePossibleParameters = $moduleTemplateContent.parameters.Keys @@ -230,13 +231,18 @@ jobs: # Building input object $functionInput = @{ - templateFilePath = $moduleTemplateFilePath - parameterFilePath = $moduleTestFilePath - location = $location - resourceGroupName = $resourceGroupName - subscriptionId = $subscriptionId - managementGroupId = $managementGroupId - additionalParameters = @{} + TemplateFilePath = ($moduleTestFilePath -like '*parameters.json') ? $moduleTemplateFilePath : $moduleTestFilePath + Location = $location + ResourceGroupName = $resourceGroupName + SubscriptionId = $subscriptionId + ManagementGroupId = $managementGroupId + AdditionalParameters = @{} + } + + # Handle template file + if ($moduleTestFilePath -like '*parameters.json') { + # Original parameter file + $functionInput['parameterFilePath'] = $moduleTestFilePath } # Handle additional parameters @@ -267,15 +273,16 @@ jobs: # Load used functions . (Join-Path '$(System.DefaultWorkingDirectory)' '$(pipelineFunctionsPath)' 'resourceDeployment' 'New-TemplateDeployment.ps1') + # Fetching parameters $location = '${{ parameters.location }}' $resourceGroupName = '${{ parameters.resourceGroupName }}' $subscriptionId = '${{ parameters.subscriptionId }}' $managementGroupId = '${{ parameters.managementGroupId }}' $moduleTemplateFilePath = Join-Path '$(System.DefaultWorkingDirectory)' '${{ parameters.templateFilePath }}' - $moduleTestFilePath = Join-Path '$(System.DefaultWorkingDirectory)' '$(modulePath)' '$(moduleTestFilePath)' + # Determine possible parameters depending on file type if ((Split-Path $moduleTemplateFilePath -Extension) -eq '.bicep') { $moduleTemplateContent = az bicep build --file $moduleTemplateFilePath --stdout | ConvertFrom-Json -AsHashtable $moduleTemplatePossibleParameters = $moduleTemplateContent.parameters.Keys @@ -289,13 +296,19 @@ jobs: # Building input object $functionInput = @{ - templateFilePath = $moduleTemplateFilePath - parameterFilePath = $moduleTestFilePath - location = $location - resourceGroupName = $resourceGroupName - subscriptionId = $subscriptionId - managementGroupId = $managementGroupId - additionalParameters = @{} + TemplateFilePath = ($moduleTestFilePath -like '*parameters.json') ? $moduleTemplateFilePath : $moduleTestFilePath + Location = $location + ResourceGroupName = $resourceGroupName + SubscriptionId = $subscriptionId + ManagementGroupId = $managementGroupId + doNotThrow = $true + AdditionalParameters = @{} + } + + # Handle template file + if ($moduleTestFilePath -like '*parameters.json') { + # Original paramter file + $functionInput['parameterFilePath'] = $moduleTestFilePath } # Handle additional parameters @@ -352,9 +365,11 @@ jobs: . (Join-Path '$(System.DefaultWorkingDirectory)' '$(pipelineFunctionsPath)' 'resourceRemoval' 'Initialize-DeploymentRemoval.ps1') $functionInput = @{ - TemplateFilePath = Join-Path '$(System.DefaultWorkingDirectory)' '${{ parameters.templateFilePath }}' + TemplateFilePath = ('$(moduleTestFilePath)' -like '*parameters.json') ? + (Join-Path '$(System.DefaultWorkingDirectory)' '${{ parameters.templateFilePath }}') : + (Join-Path '$(System.DefaultWorkingDirectory)' '$(modulePath)' '$(moduleTestFilePath)') ResourceGroupName = '${{ parameters.resourceGroupName }}' - subscriptionId = '${{ parameters.subscriptionId }}' + SubscriptionId = '${{ parameters.subscriptionId }}' ManagementGroupId = '${{ parameters.managementGroupId }}' deploymentName = '$(deploymentName)' Verbose = $true diff --git a/.github/actions/templates/getModuleTestFiles/action.yml b/.github/actions/templates/getModuleTestFiles/action.yml index c3c96538ba..f842952d39 100644 --- a/.github/actions/templates/getModuleTestFiles/action.yml +++ b/.github/actions/templates/getModuleTestFiles/action.yml @@ -22,13 +22,17 @@ runs: Write-Output '::group::Get parameter files' # Load used functions . (Join-Path $env:GITHUB_WORKSPACE 'utilities' 'pipelines' 'sharedScripts' 'Get-ModuleTestFileList.ps1') + $functionInput = @{ ModulePath = Join-Path $env:GITHUB_WORKSPACE '${{ inputs.modulePath }}' } + Write-Verbose "Invoke task with" -Verbose Write-Verbose ($functionInput | ConvertTo-Json | Out-String) -Verbose + # Get the list of parameter file paths $testFilePaths = Get-ModuleTestFileList @functionInput -Verbose + # Output values to be accessed by next jobs $compressedOutput = $testFilePaths | ConvertTo-Json -Compress if($compressedOutput -notmatch "\[.*\]") { diff --git a/.github/actions/templates/validateModuleDeployment/action.yml b/.github/actions/templates/validateModuleDeployment/action.yml index 496812ac9b..6d5e975869 100644 --- a/.github/actions/templates/validateModuleDeployment/action.yml +++ b/.github/actions/templates/validateModuleDeployment/action.yml @@ -211,29 +211,57 @@ runs: # Load used functions . (Join-Path $env:GITHUB_WORKSPACE 'utilities' 'pipelines' 'resourceDeployment' 'Test-TemplateDeployment.ps1') + # Prepare general parameters + # -------------------------- + # Fetching parameters + $location = '${{ inputs.location }}' + $resourceGroupName = '${{ inputs.resourceGroupName }}' + $subscriptionId = '${{ inputs.subscriptionId }}' + $managementGroupId = '${{ inputs.managementGroupId }}' + + # Determine template & test file path + if ('${{ inputs.templateFilePath }}' -like '*.test*') { + Write-Verbose 'Using template test file [${{ inputs.templateFilePath }}]' -Verbose + $moduleTestFilePath = Join-Path $env:GITHUB_WORKSPACE '${{ inputs.templateFilePath }}' + $moduleTemplateFilePath = Resolve-Path ('{0}/deploy.*' -f ($moduleTestFilePath | Split-Path | Split-Path | Split-Path)) + } else { + Write-Verbose 'Using parameter test file [${{ inputs.parameterFilePath }}]' -Verbose + $moduleTestFilePath = Join-Path $env:GITHUB_WORKSPACE '${{ inputs.parameterFilePath }}' + $moduleTemplateFilePath = Join-Path $env:GITHUB_WORKSPACE '${{ inputs.templateFilePath }}' + } + + # Determine possible parameters depending on file type + if ((Split-Path $moduleTemplateFilePath -Extension) -eq '.bicep') { + $moduleTemplateContent = az bicep build --file $moduleTemplateFilePath --stdout | ConvertFrom-Json -AsHashtable + $moduleTemplatePossibleParameters = $moduleTemplateContent.parameters.Keys + } else { + $moduleTemplatePossibleParameters = ((Get-Content $moduleTemplateFilePath -Raw) | ConvertFrom-Json -AsHashtable).parameters.keys + } + # ----------------- # # Invoke validation # # ----------------- # $functionInput = @{ - templateFilePath = '${{ inputs.templateFilePath }}' - location = '${{ inputs.location }}' - resourceGroupName = '${{ inputs.resourceGroupName }}' - subscriptionId = '${{ inputs.subscriptionId }}' - managementGroupId = '${{ inputs.managementGroupId }}' - additionalParameters = @{} + TemplateFilePath = ($moduleTestFilePath -like '*parameters.json') ? $moduleTemplateFilePath : $moduleTestFilePath + Location = $location + ResourceGroupName = $resourceGroupName + SubscriptionId = $subscriptionId + ManagementGroupId = $managementGroupId + AdditionalParameters = @{} } - if(-not [String]::IsNullOrEmpty('${{ inputs.parameterFilePath }}')) { - $functionInput['parameterFilePath'] = Join-Path $env:GITHUB_WORKSPACE '${{ inputs.parameterFilePath }}' + # Handle template file + if ($moduleTestFilePath -like '*parameters.json') { + $functionInput['parameterFilePath'] = $moduleTestFilePath } - if (-not [System.Convert]::ToBoolean('${{ env.enableDefaultTelemetry }}') -and (Get-Content -Path $functionInput.templateFilePath -Raw) -like '*param enableDefaultTelemetry*') { + if (-not [System.Convert]::ToBoolean('${{ env.enableDefaultTelemetry }}') -and ($moduleTemplatePossibleParameters.Keys -contains 'enableDefaultTelemetry')) { $functionInput['additionalParameters'] += @{ enableDefaultTelemetry = [System.Convert]::ToBoolean('${{ env.enableDefaultTelemetry }}') } } - Write-Verbose "Invoke task with" -Verbose + Write-Verbose 'Invoke task with' -Verbose Write-Verbose ($functionInput | ConvertTo-Json | Out-String) -Verbose Test-TemplateDeployment @functionInput -Verbose @@ -254,30 +282,55 @@ runs: # Load used functions . (Join-Path $env:GITHUB_WORKSPACE 'utilities' 'pipelines' 'resourceDeployment' 'New-TemplateDeployment.ps1') + # Prepare general parameters + # -------------------------- + $location = '${{ inputs.location }}' + $resourceGroupName = '${{ inputs.resourceGroupName }}' + $subscriptionId = '${{ inputs.subscriptionId }}' + $managementGroupId = '${{ inputs.managementGroupId }}' + + # Determine template & test file path + if ('${{ inputs.templateFilePath }}' -like '*.test*') { + $moduleTestFilePath = Join-Path $env:GITHUB_WORKSPACE '${{ inputs.templateFilePath }}' + $moduleTemplateFilePath = Resolve-Path ('{0}/deploy.*' -f ($moduleTestFilePath | Split-Path | Split-Path | Split-Path)) + } else { + $moduleTestFilePath = Join-Path $env:GITHUB_WORKSPACE '${{ inputs.parameterFilePath }}' + $moduleTemplateFilePath = Join-Path $env:GITHUB_WORKSPACE '${{ inputs.templateFilePath }}' + } + + # Determine possible parameters depending on file type + if ((Split-Path $moduleTemplateFilePath -Extension) -eq '.bicep') { + $moduleTemplateContent = az bicep build --file $moduleTemplateFilePath --stdout | ConvertFrom-Json -AsHashtable + $moduleTemplatePossibleParameters = $moduleTemplateContent.parameters.Keys + } else { + $moduleTemplatePossibleParameters = ((Get-Content $moduleTemplateFilePath -Raw) | ConvertFrom-Json -AsHashtable).parameters.keys + } + # ----------------- # # Invoke deployment # # ----------------- # $functionInput = @{ - templateFilePath = Join-Path $env:GITHUB_WORKSPACE '${{ inputs.templateFilePath }}' - location = '${{ inputs.location }}' - resourceGroupName = '${{ inputs.resourceGroupName }}' - subscriptionId = '${{ inputs.subscriptionId }}' - managementGroupId = '${{ inputs.managementGroupId }}' - doNotThrow = $true - additionalParameters = @{} + TemplateFilePath = ($moduleTestFilePath -like '*parameters.json') ? $moduleTemplateFilePath : $moduleTestFilePath + Location = $location + ResourceGroupName = $resourceGroupName + SubscriptionId = $subscriptionId + ManagementGroupId = $managementGroupId + DoNotThrow = $true + AdditionalParameters = @{} } - if(-not [String]::IsNullOrEmpty('${{ inputs.parameterFilePath }}')) { - $functionInput['parameterFilePath'] = Join-Path $env:GITHUB_WORKSPACE '${{ inputs.parameterFilePath }}' + # Handle template file + if ($moduleTestFilePath -like '*parameters.json') { + $functionInput['parameterFilePath'] = $moduleTestFilePath } - if (-not [System.Convert]::ToBoolean('${{ env.enableDefaultTelemetry }}') -and (Get-Content -Path $functionInput.templateFilePath -Raw) -like '*param enableDefaultTelemetry*') { + if (-not [System.Convert]::ToBoolean('${{ env.enableDefaultTelemetry }}') -and ($moduleTemplatePossibleParameters.Keys -contains 'enableDefaultTelemetry')) { $functionInput['additionalParameters'] += @{ enableDefaultTelemetry = [System.Convert]::ToBoolean('${{ env.enableDefaultTelemetry }}') } } - Write-Verbose "Invoke task with" -Verbose + Write-Verbose 'Invoke task with' -Verbose Write-Verbose ($functionInput | ConvertTo-Json | Out-String) -Verbose # Invoke deployment @@ -287,11 +340,11 @@ runs: Write-Output ('::set-output name={0}::{1}' -f 'deploymentName', $res.deploymentName) # Populate further outputs - $deploymentOutputHash=@{} + $deploymentOutputHash = @{} foreach ($outputKey in $res.deploymentOutput.Keys) { Write-Output ('::set-output name={0}::{1}' -f $outputKey, $res.deploymentOutput[$outputKey].Value) - $deploymentOutputHash.add($outputKey,$res.deploymentOutput[$outputKey].Value) + $deploymentOutputHash.add($outputKey, $res.deploymentOutput[$outputKey].Value) } $deploymentOutput = $deploymentOutputHash | ConvertTo-Json -Compress -Depth 100 @@ -319,19 +372,17 @@ runs: # Load used function . (Join-Path $env:GITHUB_WORKSPACE 'utilities' 'pipelines' 'resourceRemoval' 'Initialize-DeploymentRemoval.ps1') - if (-not [String]::IsNullOrEmpty('${{ steps.deploy_step.outputs.deploymentName }}')) { - $functionInput = @{ - DeploymentName = '${{ steps.deploy_step.outputs.deploymentName }}' - TemplateFilePath = Join-Path $env:GITHUB_WORKSPACE '${{ inputs.templateFilePath }}' - ResourceGroupName = '${{ inputs.resourceGroupName }}' - ManagementGroupId = '${{ inputs.managementGroupId }}' - Verbose = $true - } + $functionInput = @{ + TemplateFilePath = Join-Path $env:GITHUB_WORKSPACE '${{ inputs.templateFilePath }}' + DeploymentName = '${{ steps.deploy_step.outputs.deploymentName }}' + ResourceGroupName = '${{ inputs.resourceGroupName }}' + ManagementGroupId = '${{ inputs.managementGroupId }}' + Verbose = $true + } - Write-Verbose 'Invoke task with' -Verbose - Write-Verbose ($functionInput | ConvertTo-Json | Out-String) -Verbose + Write-Verbose 'Invoke task with' -Verbose + Write-Verbose ($functionInput | ConvertTo-Json | Out-String) -Verbose - Initialize-DeploymentRemoval @functionInput - } + Initialize-DeploymentRemoval @functionInput Write-Output '::endgroup::' diff --git a/docs/wiki/Contribution guide - Validate module locally.md b/docs/wiki/Contribution guide - Validate module locally.md index 2d53990eff..8c086c62d7 100644 --- a/docs/wiki/Contribution guide - Validate module locally.md +++ b/docs/wiki/Contribution guide - Validate module locally.md @@ -1,4 +1,4 @@ -Use this script to test a module from your PC locally, without a CI environment. You can use it to run only the static validation (Pester tests), a deployment validation (dryRun) or an actual deployment to Azure. In the latter cases the script also takes care to replace placeholder tokens in the used parameter file for you. +Use this script to test a module from your PC locally, without a CI environment. You can use it to run only the static validation (Pester tests), a deployment validation (dryRun) or an actual deployment to Azure. In the latter cases the script also takes care to replace placeholder tokens in the used module test & template files for you. --- @@ -19,14 +19,14 @@ If the switch for Pester tests (`-PesterTest`) is provided the script will 1. Invoke the module test for the provided template file path and run all tests for it. If the switch for either the validation test (`-ValidationTest`) or deployment test (`-DeploymentTest`) is provided alongside a HashTable for the token replacement (`-ValidateOrDeployParameters`), the script will -1. Either fetch all parameter files of the module's parameter folder (default) or you can specify a single parameter file by leveraging the `parameterFilePath` parameter instead. -1. Create a dictionary to replace all tokens in these parameter files with actual values. This dictionary will consist +1. Either fetch all module test files of the module's `.test` folder (default) or you can specify a single module test file by leveraging the `ModuleTestFilePath` parameter instead. +1. Create a dictionary to replace all tokens in these module test files with actual values. This dictionary will consist - of the subscriptionID & managementGroupID of the provided `ValidateOrDeployParameters` object, - add all key-value pairs of the `-AdditionalTokens` object to it, - and optionally also add all key-value pairs specified in the `settings.yml`, under the 'local tokens settings'. 1. If the `-ValidationTest` parameter was set, it runs a deployment validation using the `Test-TemplateDeployment` script. 1. If the `-DeploymentTest` parameter was set, it runs a deployment using the `New-TemplateDeployment` script (with no retries). -1. As a final step, it rolls the parameter files back to their original state if either the `-ValidationTest` or `-DeploymentTest` parameters were provided. +1. As a final step, it rolls the module test files back to their original state if either the `-ValidationTest` or `-DeploymentTest` parameters were provided. # How to use it diff --git a/docs/wiki/Fetching latest changes - Scenario 2 Module library and CI environment.md b/docs/wiki/Fetching latest changes - Scenario 2 Module library and CI environment.md index 17e2969c21..0ae06aa333 100644 --- a/docs/wiki/Fetching latest changes - Scenario 2 Module library and CI environment.md +++ b/docs/wiki/Fetching latest changes - Scenario 2 Module library and CI environment.md @@ -10,7 +10,7 @@ The update process is the following: - [2. Apply specific settings to files](#2-apply-specific-settings-to-files) - [3. (Optional) Re-apply your customizations](#3-optional-re-apply-your-customizations) - [4. Run dependencies pipeline](#4-run-dependencies-pipeline) -- [5. Update module parameter files](#5-update-module-parameter-files) +- [5. Update module module test files](#5-update-module-module-test-files) - [6. (Optional) Convert library to ARM](#6-optional-convert-library-to-arm) - [7. Push updated code](#7-push-updated-code) - [8. Test and publish modules](#8-test-and-publish-modules) @@ -72,13 +72,13 @@ This process can be automated, by script or CI, if customization tasks are repea Run the '*dependencies pipeline*' to update dependencies configuration that can be updated on the downloaded CARML release. Follow [Deploy dependencies](./Fetching%20latest%20changes%20-%20Scenario%202%20Module%20library%20only#4-deploy-dependencies) section in Getting started - Scenario 2 Onboard module library and CI environment documentation to do this. -# 5. Update module parameter files +# 5. Update module module test files -Follow the [Update module parameter files](./Getting%20started%20-%20Scenario%202%20Onboard%20module%20library%20and%20CI%20environment#5-update-module-parameter-files) procedure +Follow the [Update module module test files](./Getting%20started%20-%20Scenario%202%20Onboard%20module%20library%20and%20CI%20environment#5-update-module-module-test-files) procedure # 6. (Optional) Convert library to ARM -Follow istructions in [(Optional) Convert library to ARM](./Fetching%20latest%20changes%20-%20Scenario%202%20Module%20library%20only#6-optional-convert-library-to-arm) +Follow instructions in [(Optional) Convert library to ARM](./Fetching%20latest%20changes%20-%20Scenario%202%20Module%20library%20only#6-optional-convert-library-to-arm) # 7. Push updated code diff --git a/docs/wiki/Getting started - Check NamePrefix availability.md b/docs/wiki/Getting started - Check NamePrefix availability.md index 98aca48592..e3592f27e9 100644 --- a/docs/wiki/Getting started - Check NamePrefix availability.md +++ b/docs/wiki/Getting started - Check NamePrefix availability.md @@ -17,11 +17,11 @@ You can find the script under [`utilities/tools/Test-NamePrefixAvailability.ps1` When invoked, the script -1. Fetches all parameter files for modules that require unique names. For example +1. Fetches all module test files for modules that require unique names. For example - `'Microsoft.Storage/storageAccounts'` - `'Microsoft.ContainerRegistry/registries'` - `'Microsoft.KeyVault/vaults'` -1. Replace any tokens contained in the parameter files with the key-value pairs provided in the `Tokens` input parameter. +1. Replace any tokens contained in the module test files with the key-value pairs provided in the `Tokens` input parameter. 1. Search for each resource resource type if the final name would be taken. 1. Return the result for each resource alongside a final recommendation to use / not use the chosen `'namePrefix'`. diff --git a/docs/wiki/Getting started - Scenario 2 Onboard module library and CI environment.md b/docs/wiki/Getting started - Scenario 2 Onboard module library and CI environment.md index 96e98f3bed..02be9ae6bc 100644 --- a/docs/wiki/Getting started - Scenario 2 Onboard module library and CI environment.md +++ b/docs/wiki/Getting started - Scenario 2 Onboard module library and CI environment.md @@ -6,7 +6,7 @@ This requires several steps: 1. [Fork/clone the repository into your DevOps environment](#2-forkclone-the-repository-into-your-devops-environment) 1. [Configure the CI environment](#3-configure-the-ci-environment) 1. [Deploy dependencies](#4-deploy-dependencies) -1. [Update module parameter files](#5-update-module-parameter-files) +1. [Update module test files](#5-update-module-module-test-files) 1. [(Optional) Convert library to ARM](#6-optional-convert-library-to-arm) Depending on the DevOps environment you choose (GitHub or Azure DevOps), make sure you also account for the specific requirements outlined below. @@ -98,7 +98,7 @@ To lower the barrier to entry and allow users to easily define their own naming > **Note:** This prefix is only used by the CI environment you validate your modules in, and doesn't affect the naming of any resources you deploy as part of any multi-module solutions (applications/workloads) based on the modules. -Each pipeline in CARML deploying resources uses a logic that automatically replaces "tokens" (i.e., placeholders) in any parameter file. Tokens are stored in only a few central locations to facilitate maintenance (e.g., local `settings.yml`, repository secrets or variable groups). +Each pipeline in CARML deploying resources uses a logic that automatically replaces "tokens" (i.e., placeholders) in any module test file. Tokens are stored in only a few central locations to facilitate maintenance (e.g., local `settings.yml`, repository secrets or variable groups). To update the `namePrefix`, perform the following steps: @@ -185,7 +185,7 @@ The primary pipeline settings file ([`settings.yml`](https://github.com/Azure/Re | Variable Name | Example Value | Description | | - | - | - | -| `location` | `"WestEurope"` | The default location to deploy resources to and store deployment metadata at. If no location is specified in the deploying parameter file, this location is used. | +| `location` | `"WestEurope"` | The default location to deploy resources to and store deployment metadata at. If no location is specified in the deploying module test file, this location is used. | | `resourceGroupName` | `"validation-rg"` | The resource group to deploy all resources for validation to. | @@ -295,7 +295,7 @@ The primary pipeline settings file ([`settings.yml`](https://github.com/Azure/Re | Variable Name | Example Value | Description | | - | - | - | -| `location` | `'WestEurope'` | The default location to deploy resources to. If no location is specified in the deploying parameter file, this location is used. | +| `location` | `'WestEurope'` | The default location to deploy resources to. If no location is specified in the deploying module test file, this location is used. | | `resourceGroupName` | `'validation-rg'` | The resource group to deploy all resources for validation into. | | `serviceConnection` | `'Contoso-Connection'` | The service connection that points to the subscription to test in and publish to. | @@ -383,7 +383,7 @@ In special cases, manual actions may be required to provision certain resources To successfully deploy the sites module using the `fa.parameters.json` parameter file, you need to create an Azure Active Directory App with its API endpoint enabled (e.g., `api://`) and add a secret. The secret value needs then to be stored in a Key Vault secret. -# 5. Update module parameter files +# 5. Update module test files Once the required dependencies are deployed, there is one more step left to get as many module pipelines running as possible. diff --git a/docs/wiki/Known issues.md b/docs/wiki/Known issues.md index 2bd7b4a51c..b3eff50726 100644 --- a/docs/wiki/Known issues.md +++ b/docs/wiki/Known issues.md @@ -16,7 +16,7 @@ This section provides an overview of the most impactful limitations and known is - [CI environment specific](#ci-environment-specific) - [Static validation](#static-validation) - [Deployment validation](#deployment-validation) - - [Limited parameter file set](#limited-parameter-file-set) + - [Limited module test file set](#limited-module-test-file-set) - [Publishing](#publishing) --- @@ -89,11 +89,11 @@ This section outlines known issues that currently affect the CI environment stat This section outlines known issues that currently affect the CI environment deployment validation step. -### Limited parameter file set +### Limited module test file set -The deployment validation step aims to validate multiple configurations for each module. This is done by providing multiple parameter files to be leveraged by the same resource module, each covering a specific scenario. +The deployment validation step aims to validate multiple configurations for each module. This is done by providing multiple module test files to be leveraged by the same resource module, each covering a specific scenario. -The first planned step for each module is to provide a 'minimum-set' parameter file, limited to the top-level resource required parameters, vs. a 'maximum-set' parameter file, including all possible properties, child resources and extension resources. Some of the modules are still tested through one parameter file only. This is tracked by issue [#401](https://github.com/Azure/ResourceModules/issues/401). +The first planned step for each module is to provide a 'minimum-set' module test file, limited to the top-level resource required parameters, vs. a 'maximum-set' module test file, including all possible properties, child resources and extension resources. Some of the modules are still tested through one module test file only. This is tracked by issue [#401](https://github.com/Azure/ResourceModules/issues/401). ## Publishing diff --git a/docs/wiki/The CI environment - Deployment validation.md b/docs/wiki/The CI environment - Deployment validation.md index 1f4ccef177..fa741f7f82 100644 --- a/docs/wiki/The CI environment - Deployment validation.md +++ b/docs/wiki/The CI environment - Deployment validation.md @@ -21,19 +21,21 @@ The deployment validation phase can be divided into three steps, running in sequ # Template validation -The template validation step performs a dry-run with each parameter file in the module's `'.test'` folder +The template validation step performs a dry-run with each module test file in the module's `'.test'` folder (and its subfolders) -In particular, the step runs a `Test-AzDeployment` cmdlet (_the command may vary based on the template schema_) for each provided module parameter file to verify if the template could be deployed using them. +In particular, the step runs a `Test-AzDeployment` cmdlet (_the command may vary based on the template schema_) for each provided module test file to verify if the template could be deployed using them. -The intention of this test is to **fail fast**, before getting to the later deployment step. The template validation could fail either because the template is invalid, or because any of the parameter files is configured incorrectly. +The intention of this test is to **fail fast**, before getting to the later deployment step. The template validation could fail either because the template is invalid, or because any of the module test files is configured incorrectly. # Azure deployment validation -This step performs the actual Azure deployments using each available & configured module parameter file. The purpose of this step is to prove the module can be deployed in different configurations based on the different parameters provided. Deployments for the different variants happen in parallel. +This step performs the actual Azure deployments using each available & configured module module test file. The purpose of this step is to prove the module can be deployed in different configurations based on the different parameters provided. Deployments for the different variants happen in parallel. If any of these parallel deployments require multiple/different/specific resource instances already present, these resources are deployed by the [dependencies pipeline](./The%20CI%20environment%20-%20Pipeline%20design#dependencies-pipeline). E.g., for the Azure Firewall to be tested with multiple configurations, the dependencies pipeline deploys multiple VNET instances, with a dedicated "AzureFirewallSubnet" in each. -The parameter files used in this stage should ideally cover as many configurations as possible to validate the template flexibility, i.e., to verify that the module can cover multiple scenarios in which the given Azure resource may be used. Using the example of the CosmosDB module, we may want to have one parameter file for the minimum amount of required parameters, one parameter file for each CosmosDB type to test individual configurations, and at least one parameter file testing the supported extension resources such as RBAC & diagnostic settings. +> NOTE: Once the issue [1583](https://github.com/Azure/ResourceModules/issues/1583) is resolved, the deployment of these dependencies will be moved into the module test files. You can find additional information about this effort [here](./The%20library%20-%20Module%20design#module-test-files). + +The module test files used in this stage should ideally cover as many configurations as possible to validate the template flexibility, i.e., to verify that the module can cover multiple scenarios in which the given Azure resource may be used. Using the example of the CosmosDB module, we may want to have one module test file for the minimum amount of required parameters, one module test file for each CosmosDB type to test individual configurations, and at least one module test file testing the supported extension resources such as RBAC & diagnostic settings. > **Note:** Since every customer environment might be different due to applied Azure Policies or security policies, modules might behave differently and naming conventions need to be verified beforehand. diff --git a/docs/wiki/The CI environment - Pipeline design.md b/docs/wiki/The CI environment - Pipeline design.md index 426d097c92..9b8bd774b0 100644 --- a/docs/wiki/The CI environment - Pipeline design.md +++ b/docs/wiki/The CI environment - Pipeline design.md @@ -46,7 +46,7 @@ The following paragraphs provide an overview of the different phases and shared This paragraph provides an overview of the three phases performed by each module pipeline. Further details about the implementation and design of each phase are provided on the dedicated pages linked below. 1. **Static Validation**: Runs a set of static Pester tests on the module and its templates to ensure they comply with the design principles of CARML. Further details for this phase are provided on the corresponding wiki page - see the [Static validation](./The%20CI%20environment%20-%20Static%20validation) section. -1. **Deployment Validation**: An actual Azure deployment is run in a sandbox subscription leveraging a predefined set of parameter files, each validating a different configuration of the same Azure resource in parallel. The test suite is cleaned up by default, removing all test resources post-deployment. Further details for this phase are provided on the corresponding wiki page - see the [Deployment validation](./The%20CI%20environment%20-%20Deployment%20validation) section. +1. **Deployment Validation**: An actual Azure deployment is run in a sandbox subscription leveraging a predefined set of module test files, each validating a different configuration of the same Azure resource in parallel. The test suite is cleaned up by default, removing all test resources post-deployment. Further details for this phase are provided on the corresponding wiki page - see the [Deployment validation](./The%20CI%20environment%20-%20Deployment%20validation) section. 1. **Publishing**: Runs only if the previous steps are successful. A new module version is published to all configured target locations such as template specs, private Bicep registry and Azure DevOps Universal Packages. Published module versions can then be referenced by solutions using them. Further details for this phase are provided on the corresponding wiki page - see the [Publishing](./The%20CI%20environment%20-%20Publishing) page. Pipeline phases @@ -118,6 +118,8 @@ In addition to module pipelines, the repository includes several platform pipeli ## Dependencies pipeline +> NOTE: The dependencies deployed as part of this pipeline will be moved to the individual modules that depend on them once issue [1583](https://github.com/Azure/ResourceModules/issues/1583) is resolved. You can find further information about this effort [here](./The%20library%20-%20Module%20design#module-test-files). + In order to successfully run module pipelines to validate and publish CARML modules to the target environment, certain Azure resources may need to be deployed beforehand. For example, any instance of the \[Virtual Machine] module needs an existing virtual network to be connected to and a Key Vault hosting its required local admin credentials to be referenced. diff --git a/docs/wiki/The CI environment - Pipeline usage.md b/docs/wiki/The CI environment - Pipeline usage.md index b80157ea72..e270ab982b 100644 --- a/docs/wiki/The CI environment - Pipeline usage.md +++ b/docs/wiki/The CI environment - Pipeline usage.md @@ -22,7 +22,7 @@ This section provides a guideline on how to use the CARML CI environment pipelin To validate updates to a module template, you can perform the following steps: -1. (Optionally) Update the module's parameter files to reflect your changes. +1. (Optionally) Update the module's test files to reflect your changes. 1. Push the local changes to the repository (using a branch that is not `main|master`). 1. On the DevOps platform, navigate to your pipelines and select the pipeline that was registered for the module you updated. 1. Select the branch with your updated template. diff --git a/docs/wiki/The CI environment - Static validation.md b/docs/wiki/The CI environment - Static validation.md index 5bdd9ccaa8..3c97c59a88 100644 --- a/docs/wiki/The CI environment - Static validation.md +++ b/docs/wiki/The CI environment - Static validation.md @@ -30,11 +30,11 @@ The following activities are performed by the [`utilities/pipelines/staticValida - describes all the parameters - describes all outputs - describes all cross-references -- **Parameter Files**, e.g.: - - at least one `*parameters.json` exists - - files should be valid JSON - - contains all required parameters - - (if tokens are used) Tests that no token values (e.g., `11111111-1111-1111-1111-11111111111`) from the specified token list (i.e., `deploymentSpId`, `subscriptionId`, `managementGroupId`, `tenantId`) are used in the parameter files. Instead, the token itself should be referenced. +- **Module Test Files**, e.g.: + - at least one `*parameters.json`/`deploy.test.bicep` exists + - files should (optionally) be valid JSON + - must contain all required parameters + - (if tokens are used) Tests that no token values (e.g., `11111111-1111-1111-1111-11111111111`) from the specified token list (i.e., `deploymentSpId`, `subscriptionId`, `managementGroupId`, `tenantId`) are used in the module test files. Instead, the token itself should be referenced. ## Output example diff --git a/docs/wiki/The CI environment - Token replacement.md b/docs/wiki/The CI environment - Token replacement.md index 115e46ee96..7a6dcc668f 100644 --- a/docs/wiki/The CI environment - Token replacement.md +++ b/docs/wiki/The CI environment - Token replacement.md @@ -1,4 +1,4 @@ -This section provides details on the tokens replacement functionality that enables the use of tokens inside template parameter files instead of plain text strings. +This section provides details on the tokens replacement functionality that enables the use of tokens inside module test files instead of plain text strings. --- @@ -9,7 +9,7 @@ This section provides details on the tokens replacement functionality that enabl - [Default Tokens](#default-tokens) - [(Optional) Local Custom Tokens](#optional-local-custom-tokens) - [How it works](#how-it-works) - - [How tokens are replaced in a parameter file](#how-tokens-are-replaced-in-a-parameter-file) + - [How tokens are replaced in a module test file](#how-tokens-are-replaced-in-a-module-test-file) --- @@ -17,7 +17,7 @@ This section provides details on the tokens replacement functionality that enabl Tokens allow you to test deploying modules in your own environment (i.e., using tokens for your naming conventions), or apply other customizations to your resources (i.e., injecting a subscription ID inside a Resource ID string). -The [module pipelines](./The%20CI%20environment%20-%20Pipeline%20design#module-pipelines) leverage a token replacement function that enables parameter files to contain tokens (i.e., `<>`, `<>`) instead of using static values. This helps with the following: +The [module pipelines](./The%20CI%20environment%20-%20Pipeline%20design#module-pipelines) leverage a token replacement function that enables module test files to contain tokens (i.e., `<>`, `<>`) instead of using static values. This helps with the following: - Allows the repository to be portable without having static values from where it was cloned. - Enables dynamic updates of the tokens from single locations without having to modify all files. @@ -25,7 +25,7 @@ The [module pipelines](./The%20CI%20environment%20-%20Pipeline%20design#module-p # Token Types -There are 2 types of tokens that can be applied on a parameter file: +There are 2 types of tokens that can be applied on a module test file: ## Default Tokens @@ -49,7 +49,7 @@ localToken_tokenB: 'bar' > **Note:** The CI pipelines automatically removes the `localToken_` prefix from the name when processing the tokens replacement. This means that your actual token name is `tokenA` and NOT `localToken_tokenA`. -Let's say you'd want to use this token inside a Key Vault parameter file, to deploy the Key Vault with a name that contains this token: +Let's say you'd want to use this token inside a Key Vault module test file, to deploy the Key Vault with a name that contains this token: ```json "parameters": { @@ -76,19 +76,19 @@ When validating modules through the CI environment, you must update it to a cust # How it works -The below image compares the different token types that can be used for parameter file tokens: +The below image compares the different token types that can be used for module test file tokens: tokenTypes -## How tokens are replaced in a parameter file +## How tokens are replaced in a module test file The below diagram illustrates the Token Replacement Functionality via the [validate module deployment](https://github.com/Azure/ResourceModules/blob/main/.github/actions/templates/validateModuleDeployment/action.yml) Action/Template. tokenReplacement - **1A.** The user creates default tokens as [GitHub Secrets](https://docs.github.com/en/actions/security-guides/encrypted-secrets#creating-encrypted-secrets-for-a-repository) or [Azure DevOps Pipeline Variables](https://docs.microsoft.com/en-us/azure/devops/pipelines/library/?view=azure-devops), that are injected as environment variables. -- **1B.** The user can also create local custom Parameter File Tokens in the [settings.yml](https://github.com/Azure/ResourceModules/blob/main/settings.yml). Tokens start with `localTokens_` and then followed by the actual token name (e.g. `tokenA`). This prefix gets removed by the CI leaving the original token name -- **2.** The parameter files can now be tokenized as per required value. And the token format can look like `<>`. Example: +- **1B.** The user can also create local custom Tokens in the [settings.yml](https://github.com/Azure/ResourceModules/blob/main/settings.yml). Tokens start with `localTokens_` and then followed by the actual token name (e.g. `tokenA`). This prefix gets removed by the CI leaving the original token name +- **2.** The module test files can now be tokenized as per required value. And the token format can look like `<>`. Example: ```json "adminPassword": { @@ -106,10 +106,10 @@ The below diagram illustrates the Token Replacement Functionality via the [valid - **3B.** The Replace Tokens function gets local custom tokens from the [settings.yml](https://github.com/Azure/ResourceModules/blob/main/settings.yml). > Local Tokens are easier to scale as you just need to define them in this file without adding new environment variables or modifying workflows or tasks. -- **3C.** The Replace Tokens function gets the Module Parameter file (tokenized and not deployable) and then all tokens are processed for replacement. +- **3C.** The Replace Tokens function gets the module test file (tokenized and not deployable) and then all tokens are processed for replacement. -- **3D.** The updated Module Parameter file is then saved, replacing the tokenized version. This file is now 'deployable'. +- **3D.** The updated module test file is then saved, replacing the tokenized version. This file is now 'deployable'. -- **4A.** The Validate/Deploy function retrieves the latest updated module Parameter file. +- **4A.** The Validate/Deploy function retrieves the latest updated module test file. - **4B.** The Validate/Deploy function validates the deployment artifacts for the module before deploying it to the Azure Sandbox Subscription. diff --git a/docs/wiki/The context - CARML CI environment.md b/docs/wiki/The context - CARML CI environment.md index 5701964290..afc38c0107 100644 --- a/docs/wiki/The context - CARML CI environment.md +++ b/docs/wiki/The context - CARML CI environment.md @@ -22,7 +22,7 @@ This paragraph provides an overview of the standard development-to-deployment fl This flow generally covers 3 phases: -1. In the **Develop modules** phase modules are first implemented/updated and then validated using one or multiple test-parameter files, testing their successful deployment to a sandbox subscription to prove their correctness. +1. In the **Develop modules** phase modules are first implemented/updated and then validated using one or multiple module test files, testing their successful deployment to a sandbox subscription to prove their correctness. 1. The next phase, **Publish modules**, packages and publishes the tested and approved modules to a target location for later consumption. The target location (also known as package store or artifact store) should support versioning to allow referencing a specific module version and to avoid breaking changes when referencing them. diff --git a/docs/wiki/The context - CARML library.md b/docs/wiki/The context - CARML library.md index b01a07f626..50a883782f 100644 --- a/docs/wiki/The context - CARML library.md +++ b/docs/wiki/The context - CARML library.md @@ -20,7 +20,7 @@ Using configuration & template files that represent the deployed infrastructure - Repeatability: You can deploy your infrastructure in a repeatable fashion, hence minimizing the chance of manual errors. - Reusability: You can reuse your automation to deploy the same infrastructure to different environments. For example, leveraging a multi-stage deployment from a sandbox environment, via integration to production using the same code. -In the context of Bicep or ARM/JSON templates, we usually leverage a combination of flexible templates that are deployed using multiple parameter files mapped to different scenarios. +In the context of Bicep or ARM/JSON templates, we usually leverage a combination of flexible templates that are deployed using multiple module test files mapped to different scenarios. # A module in CARML @@ -28,7 +28,7 @@ In the context of _CARML_, we define a module as a reusable, template-based **bu Each module is generalized for maximum flexibility. Each template should be able to cover as many resource-specific scenarios as possible and not restrict the user by making assumptions on the user's behalf. Eventually, the injected parameters should decide what the template deploys. -Furthermore, each module comes with default values for its optional parameters, a detailed documentation for its usage and one or multiple parameter files to prove its correctness. +Furthermore, each module comes with default values for its optional parameters, a detailed documentation for its usage and one or multiple module test files to prove its correctness. ## CARML module features @@ -38,7 +38,7 @@ A CARML module should comply with the following characteristics: > For example, a virtual machine module also deploys related OS disks and network interfaces. - **Reusable**: Several modules can be combined together to create & orchestrate more complex architectures (i.e., multi-module solutions) like workloads/applications or single services. > For example, the resource group, the network security group and the virtual network modules can be combined to create a resource group hosting a virtual network with multiple subnets associated to specific NSGs. -- **Multi-purpose**: Each module aims to cover most of the main resource's capabilities, without the need to maintain multiple module instances for different use cases. Instead, a generalized module can be consumed through parameter files. +- **Multi-purpose**: Each module aims to cover most of the main resource's capabilities, without the need to maintain multiple module instances for different use cases. Instead, a generalized module can be consumed through module test files. > For example, the same virtual machine module can deploy a Windows OS VM or a Linux-based VM depending on input parameters. - **Integrates child resources**: Each module can deploy **_one_** instance of a resource and optionally **_n_** instances of its child resources. > For example, the Key Vault module can deploy **_one_** Key Vault and optionally **_n_** Key Vault access policies. @@ -53,7 +53,7 @@ CARML can be considered "*opinionated*" as its code strictly follows a set of de This section illustrates the previously described module features applied to the storage account module. -Leveraging five different parameter files, the same storage account module is able to deploy five different storage account configurations. +Leveraging five different module test files, the same storage account module is able to deploy five different storage account configurations. Library: storage variants diff --git a/docs/wiki/The library - Module design.md b/docs/wiki/The library - Module design.md index 673299cde8..a2bbe007e0 100644 --- a/docs/wiki/The library - Module design.md +++ b/docs/wiki/The library - Module design.md @@ -25,7 +25,7 @@ This section details the design principles followed by the CARML Bicep modules. - [Deployment names](#deployment-names) - [Outputs](#outputs) - [ReadMe](#readme) -- [Parameter files](#parameter-files) +- [Module test files](#module-test-files) - [Telemetry](#telemetry) --- @@ -60,12 +60,13 @@ They can be deployed in different configurations just by changing the input para A **CARML module** consists of - The Bicep template deployment file (`deploy.bicep`). -- One or multiple template parameters files (`*parameters.json`) that will be used for testing, located in the `.test` subfolder. +- One or multiple template parameters files (`*parameters.json`) or module test files (`deploy.test.bicep`) that will be used for testing, located in the `.test` folder and its subfolders. - A `readme.md` file which describes the module itself. A module usually represents a single resource or a set of closely related resources. For example, a storage account and the associated lock or virtual machine and network interfaces. Modules are located in the `modules` folder. Also, each module should be implemented with all capabilities it and its children support. This includes + - `Locks` - `Role assignments (RBAC)` - `Diagnostic Settings` @@ -104,28 +105,29 @@ Use the following naming standard for module files and folders: - Module folders are in camelCase and their name reflects the main resource type of the Bicep module they are hosting (e.g., `storageAccounts`, `virtualMachines`). - Extension resource modules are placed in the `.bicep` subfolder and named `nested_.bicep` - ``` txt + ```txt Microsoft. └─ ├─ .bicep | ├─ nested_extensionResource1.bicep ├─ .test - | └─ parameters.json + | └─ ... ├─ deploy.bicep └─ readme.md ``` - >**Example**: `nested_roleAssignments.bicep` in the `Microsoft.Web\sites\.bicep` folder contains the `site` resource RBAC implementation. - >``` txt - >Microsoft.Web - >└─ sites + > **Example**: `nested_roleAssignments.bicep` in the `Microsoft.Web\sites\.bicep` folder contains the `site` resource RBAC implementation. + > + > ```txt + > Microsoft.Web + > └─ sites > ├─ .bicep > | └─ nested_roleAssignments.bicep > ├─ .test - > | └─ parameters.json + > | └─ ... > ├─ deploy.bicep > └─ readme.md - >``` + > ``` ## Patterns @@ -158,10 +160,11 @@ resource _lock 'Microsoft.Authorization/locks@2017-04-01' = if (!e ``` > **Note:** How locks are passed to other resource templates depends on the type of module relationship: +> > - Child and extension resources > - Locks are not automatically passed down, as they are inherited by default in Azure > - The reference of the child/extension template should look similar to: `lock: contains(, 'lock') ? .lock : ''` -> - Using this implementation, a lock is only deployed to the child/extension resource if explicitly specified in the module's parameter file +> - Using this implementation, a lock is only deployed to the child/extension resource if explicitly specified in the module's test file > - For example, the lock of a Storage Account module is not automatically passed to a Storage Container child-deployment. Instead, the Storage Container resource is automatically locked by Azure together with a locked Storage Account > - Cross-referenced resources > - All cross-referenced resources share the lock with the main resource to prevent depending resources to be changed or deleted @@ -181,6 +184,7 @@ The RBAC deployment has 2 elements. A module that contains the implementation, a Details #### 1st Element in main resource + ```bicep @description('Optional. Array of role assignment objects that contain the \'roleDefinitionIdOrName\' and \'principalId\' to define RBAC role assignments on this resource. In the roleDefinitionIdOrName attribute, you can provide either the display name of the role definition, or its fully qualified ID in the following format: \'/providers/Microsoft.Authorization/roleDefinitions/c2f4ef07-c644-48eb-af81-4b1b4947fb11\'.') param roleAssignments array = [] @@ -200,7 +204,8 @@ module _roleAssignments '.bicep/nested_roleAssignments.bicep' = [f Here, you specify the platform roles available for the main resource. The `builtInRoleNames` variable contains the list of applicable roles for the specific resource which the `nested_roleAssignments.bicep` template applies. ->**Note**: You use the helper script [Get-FormattedRBACRoles.ps1](./Contribution%20guide%20-%20Get%20formatted%20RBAC%20roles) to extract a formatted list of RBAC roles used in the CARML modules based on the RBAC lists in Azure. + +> **Note**: You use the helper script [Get-FormattedRBACRoles.ps1](./Contribution%20guide%20-%20Get%20formatted%20RBAC%20roles) to extract a formatted list of RBAC roles used in the CARML modules based on the RBAC lists in Azure. The element requires you to provide both the `principalIds` & `roleDefinitionOrIdName` to assign to the principal IDs. Also, the `resourceId` is target resource's resource ID that allows us to reference it as an `existing` resource. Note, the implementation of the `split` in the resource reference becomes longer the deeper you go in the child resource hierarchy. @@ -318,11 +323,13 @@ resource _diagnosticSettings 'Microsoft.Insights/diagnosticsetting scope: } ``` +

### Private Endpoints + The Private Endpoint deployment has 2 elements. A module that contains the implementation, and a module reference in the parent resource. The first one loops through the endpoints we want to create, the second one processes them.

@@ -390,9 +397,12 @@ Within a bicep file, use the following conventions: - `resource storageAccount 'Microsoft.Storage/storageAccounts@2019-06-01'` - `resource virtualMachine 'Microsoft.Compute/virtualMachines@2020-06-01'` - Parent reference + - If working on a child resource, refrain from string concatenation and instead, use the parent reference via the `existing` keyword. - The way this is implemented differs slightly the lower you go in the hierarchy. Note the following examples: + - 1st level child resource (example _storageAccount/blobService_) + ```bicep resource storageAccount 'Microsoft.Storage/storageAccounts@2021-06-01' existing = { name: storageAccountName @@ -404,7 +414,9 @@ Within a bicep file, use the following conventions: properties: {...} } ``` + - 2nd level child resource (example _storageAccount/blobService/container_) + ```bicep resource storageAccount 'Microsoft.Storage/storageAccounts@2021-06-01' existing = { name: storageAccountName @@ -420,7 +432,9 @@ Within a bicep file, use the following conventions: properties: {...} } ``` + - 3rd level child resource (example _storageAccount/blobService/container/immutabilityPolicies_) + ```bicep resource storageAccount 'Microsoft.Storage/storageAccounts@2021-06-01' existing = { name: storageAccountName @@ -440,11 +454,12 @@ Within a bicep file, use the following conventions: properties: {...} } ``` + ## Modules - - Module symbolic names are in camel_Snake_Case, following the schema `_` e.g., `storageAccount_fileServices`, `virtualMachine_nic`, `resourceGroup_roleAssignments`. - - Modules enable you to reuse code from a Bicep file in other Bicep files. As such, they're normally leveraged for deploying child resources (e.g., file services in a storage account), cross referenced resources (e.g., network interface in a virtual machine) or extension resources (e.g., role assignment in a resource group). - - When a module requires to deploy a resource whose resource type is outside of the main module's provider namespace, the module of this additional resource is referenced locally. For example, when extending the Key Vault module with Private Endpoints, instead of including in the Key Vault module an ad hoc implementation of a Private Endpoint, the Key Vault directly references the Private Endpoint module (i.e., `module privateEndpoint 'https://github.com/Azure/ResourceModules/blob/main/Microsoft.Network/privateEndpoints/deploy.bicep'`). Major benefits of this implementation are less code duplication, more consistency throughout the module library and allowing the consumer to leverage the full interface provided by the referenced module. +- Module symbolic names are in camel_Snake_Case, following the schema `_` e.g., `storageAccount_fileServices`, `virtualMachine_nic`, `resourceGroup_roleAssignments`. +- Modules enable you to reuse code from a Bicep file in other Bicep files. As such, they're normally leveraged for deploying child resources (e.g., file services in a storage account), cross referenced resources (e.g., network interface in a virtual machine) or extension resources (e.g., role assignment in a resource group). +- When a module requires to deploy a resource whose resource type is outside of the main module's provider namespace, the module of this additional resource is referenced locally. For example, when extending the Key Vault module with Private Endpoints, instead of including in the Key Vault module an ad hoc implementation of a Private Endpoint, the Key Vault directly references the Private Endpoint module (i.e., `module privateEndpoint 'https://github.com/Azure/ResourceModules/blob/main/Microsoft.Network/privateEndpoints/deploy.bicep'`). Major benefits of this implementation are less code duplication, more consistency throughout the module library and allowing the consumer to leverage the full interface provided by the referenced module. > **Note**: Cross-referencing modules from the local repository creates a dependency for the modules applying this technique on the referenced modules being part of the local repository. Reusing the example from above, the Key Vault module has a dependency on the referenced Private Endpoint module, meaning that the repository from which the Key Vault module is deployed also requires the Private Endpoint module to be present. For this reason, we provide a utility to check for any local module references in a given path. This can be useful to determine which module folders you'd need if you don't want to keep the entire library. For further information on how to use the tool, please refer to the tool-specific [documentation](./Getting%20started%20-%20Get%20module%20cross-references). ### Deployment names @@ -469,7 +484,9 @@ While exceptions might be needed, the following guidance should be followed as m ... }] ``` + > **Example**: for the `roleAssignment` deployment in the Key Vault `secrets` template + > > ``` > module secret_roleAssignments '.bicep/nested_roleAssignments.bicep' = [for (roleAssignment, index) in roleAssignments: { > name: '${deployment().name}-Rbac-${index}' @@ -480,7 +497,9 @@ While exceptions might be needed, the following guidance should be followed as m ``` '${uniqueString(deployment().name, location)}--' ``` + > **Example**: for the `tableServices` deployment inside the `storageAccount` template + > > ``` > name: '${uniqueString(deployment().name, location)}-Storage-TableServices' > ``` @@ -490,7 +509,9 @@ While exceptions might be needed, the following guidance should be followed as m ``` '${deployment().name}-[-${index}]' ``` + > **Example**: for the `tables` deployment in the `tableServices` template + > > ``` > name: '${deployment().name}-Table-${index}' > ``` @@ -512,6 +533,7 @@ While exceptions might be needed, the following guidance should be followed as m Each module must come with a ReadMe Markdown file that outlines what the module contains and 'how' it can be used. Its primary components are in order: + - A title with a reference to the primary resource in Start Case followed by the primary resource namespace e.g., Key Vaults `[Microsoft.KeyVault/vaults]`. - A short description - A **Resource types** section with a table that outlines all resources that can be deployed as part of the module. @@ -521,22 +543,127 @@ Its primary components are in order: - A **Template references** section listing relevant resources [Azure resource reference](https://docs.microsoft.com/en-us/azure/templates). Note the following recommendations: + - Refer to [Generate module Readme](./Contribution%20guide%20-%20Generate%20module%20Readme) for creating from scratch or updating the module ReadMe Markdown file. - It is not recommended to describe how to use child resources in the parent readme file (for example, 'How to define a \[container] entry for the \[storage account]'). Instead, it is recommended to reference the child resource's ReadMe (for example, 'container/readme.md'). -# Parameter files +# Module test files + +Module test files in CARML are implemented in -Parameter files in CARML leverage the common `deploymentParameters.json` schema for ARM deployments. As parameters are usually specific to their corresponding template, we only have a few general recommendations: -- Parameter filenames should ideally relate to the content they deploy. For example, a parameter file `min.parameters.json` should be chosen for a parameter file that contains only the minimum set of parameters to deploy the module. -- Likewise, the `name` parameter we have in most modules should give some indication of the file it was deployed with. For example, a `min.parameters.json` parameter file for the virtual network module may have a `name` property with the value `sxx-az-vnet-min-001` where `min` relates to the prefix of the parameter file itself. -- A module should have as many parameter files as it needs to evaluate all parts of the module's functionality. -- Sensitive data should not be stored inside the parameter file but rather be injected by the use of tokens, as described in the [Token replacement](./The%20CI%20environment%20-%20Token%20replacement) section, or via a [Key Vault reference](https://docs.microsoft.com/en-us/azure/azure-resource-manager/templates/key-vault-parameter?tabs=azure-cli#reference-secrets-with-static-id). +- a classic way leveraging the common `deploymentParameters.json` schema for ARM deployments or +- using comprehensive `.bicep` test files that not only test the module's template in a certain scenario, but also deploy any required dependency for it. All classic test files will be migrated to this module following the issue [1583](https://github.com/Azure/ResourceModules/issues/1583). + +In either case, we follow the following, general guidelines: + +- A module should have as many module test files as it needs to evaluate all parts of the module's functionality. +- Sensitive data should not be stored inside the module test file but rather be injected by the use of tokens, as described in the [Token replacement](./The%20CI%20environment%20-%20Token%20replacement) section, or via a [Key Vault reference](https://docs.microsoft.com/en-us/azure/azure-resource-manager/templates/key-vault-parameter?tabs=azure-cli#reference-secrets-with-static-id). + +In addition, we follow the following, file-type-specific guidelines: + +- JSON Parameter file specific + - Parameter file names should ideally relate to the content they deploy. For example, a parameter file `min.parameters.json` should be chosen for a parameter file that contains only the minimum set of parameters to deploy the module. + - Likewise, the `name` parameter we have in most modules should give some indication of the file it was deployed with. For example, a `min.parameters.json` parameter file for the virtual network module may have a `name` property with the value `sxx-az-vnet-min-001` where `min` relates to the prefix of the parameter file itself. +- Bicep file specific + + - Each scenario should be setup in its own sub-folder (e.g. `.test/linux`) + - Each folder should contain at least a file `deploy.test.bicep` and optionally an additional `dependencies.bicep` file. The `deploy.test.bicep` file should deploy any immediate dependencies (e.g. a resource group, if required) and invoke the module's main template while providing all parameters for a given test scenario. The `dependencies.bicep` should optionally be used if any additional dependencies must be deployed into a nested scope (e.g. into a deployed resource group). + - Parameters + - Each file should define a parameter `serviceShort`. This parameter should be unique to this file (i.e, no two test files should share the same) as it is injected into all resource deployments, making them unique too and account for corresponding requirements. As a reference you can create a identifier by combining a substring of the resource type and test scenario (e.g., in case of a Linux Virtual Machine Deployment: `vmlin`) + - If the module deploys a resource group level resource, the template should further have a `resourceGroupName` parameter and subsequent resource deployment. As a reference for the default name you can use `ms..-${serviceShort}-test-rg`. + - Each file should also provide a `location` parameter that may default to the deployments default location + - It is recommended to define all major resource names in the `deploy.test.bicep` file as it makes later maintenance easier. To implement this, make sure to pass all resource names to any referenced module. + - References to dependencies should be implemented using resource references in combination with outputs. In other words: You should not hardcode any references into the module template's deployment. Instead use references such as `resourceGroupResources.outputs.managedIdentityPrincipalId` + - If any diagnostic resources (e.g., a Log Analytics workspace) are required for a test scenario, you can reference the centralized `modules/.shared/dependencyConstructs/diagnostic.dependencies.bicep` template. It will also provide you with all outputs you'd need. + +
+ Example (for a resource group level resource) + + ```Bicep + targetScope = 'subscription' + + // ========== // + // Parameters // + // ========== // + @description('Optional. The name of the resource group to deploy for a testing purposes') + @maxLength(90) + param resourceGroupName string = 'ms.analysisservices.servers-${serviceShort}-test-rg' + + @description('Optional. The location to deploy resources to') + param location string = deployment().location + + @description('Optional. A short identifier for the kind of deployment. Should be kept short to not run into resource-name length-constraints') + param serviceShort string = 'asdef' + + // =========== // + // Deployments // + // =========== // + + // General resources + // ================= + resource resourceGroup 'Microsoft.Resources/resourceGroups@2021-04-01' = { + name: resourceGroupName + location: location + } + + module resourceGroupResources 'dependencies.bicep' = { + scope: resourceGroup + name: '${uniqueString(deployment().name, location)}-nestedDependencies' + params: { + managedIdentityName: 'dep-<>-msi-${serviceShort}' + } + } + + // Diagnostics + // =========== + module diagnosticDependencies '../../../../.shared/dependencyConstructs/diagnostic.dependencies.bicep' = { + scope: resourceGroup + name: '${uniqueString(deployment().name, location)}-diagnosticDependencies' + params: { + storageAccountName: 'dep<>azsa${serviceShort}01' + logAnalyticsWorkspaceName: 'dep-<>-law-${serviceShort}' + eventHubNamespaceEventHubName: 'dep-<>-evh-${serviceShort}' + eventHubNamespaceName: 'dep-<>-evhns-${serviceShort}' + location: location + } + } + + // ============== // + // Test Execution // + // ============== // + + module testDeployment '../../deploy.bicep' = { + scope: resourceGroup + name: '${uniqueString(deployment().name)}-test-${serviceShort}' + params: { + name: '<>az${serviceShort}' + lock: 'CanNotDelete' + skuName: 'S0' + roleAssignments: [ + { + roleDefinitionIdOrName: 'Reader' + principalIds: [ + resourceGroupResources.outputs.managedIdentityPrincipalId + ] + } + ] + diagnosticLogsRetentionInDays: 7 + diagnosticStorageAccountId: diagnosticDependencies.outputs.storageAccountResourceId + diagnosticWorkspaceId: diagnosticDependencies.outputs.logAnalyticsWorkspaceResourceId + diagnosticEventHubAuthorizationRuleId: diagnosticDependencies.outputs.eventHubAuthorizationRuleId + diagnosticEventHubName: diagnosticDependencies.outputs.eventHubNamespaceEventHubName + } + } + ``` + +
# Telemetry -Each module in CARML contains a `defaultTelemetry` deployment `'pid--${uniqueString(deployment().name)}'`, resulting in deployments such as `'pid--nx2c3rnlt2wru'`. +Each module in CARML contains a `defaultTelemetry` deployment `'pid--${uniqueString(deployment().name)}'`, resulting in deployments such as `'pid--nx2c3rnlt2wru'`. > **Note:** Though implemented at each level in a module hierarchy (e.g., storage account & blobServices), the deployment will only happen for the top-level module in a deployment, but not for its children. To illustrate this better, see the following examples: +> > - Deployment of the KeyVault module and 2 Secrets: Results in 1 `PID` deployment for the KeyVault (and none for the secrets) > - Deployment of the Secret module: Results in 1 `PID` deployment for the Secret @@ -545,6 +672,7 @@ This resource enables the CARML product team to query the number of deployments When using CARML's CI environment you can enable/disable this deployment by switching the `enableDefaultTelemetry` setting in the `settings.yml` file in the repository's root. This value is automatically injected into each individual deployment that is performed as part of the environment's pipeline. When consuming the modules outside of CARML's pipelines you can either + - Set the parameter to a default value of `'false'` - Set the parameter to false when deploying a module diff --git a/modules/.shared/dependencyConstructs/diagnostic.dependencies.bicep b/modules/.shared/dependencyConstructs/diagnostic.dependencies.bicep new file mode 100644 index 0000000000..db688cf95a --- /dev/null +++ b/modules/.shared/dependencyConstructs/diagnostic.dependencies.bicep @@ -0,0 +1,67 @@ +// ========== // +// Parameters // +// ========== // + +@description('Required. The name of the storage account to create.') +param storageAccountName string + +@description('Required. The name of the log analytics workspace to create.') +param logAnalyticsWorkspaceName string + +@description('Required. The name of the event hub namespace to create.') +param eventHubNamespaceName string + +@description('Required. The name of the event hub to create inside the event hub namespace.') +param eventHubNamespaceEventHubName string + +@description('Optional. The location to deploy resources to.') +param location string = resourceGroup().location + +// =========== // +// Deployments // +// =========== // +resource storageAccount 'Microsoft.Storage/storageAccounts@2021-08-01' = { + name: storageAccountName + location: location + kind: 'StorageV2' + sku: { + name: 'Standard_LRS' + } + properties: { + allowBlobPublicAccess: false + } +} + +resource logAnalyticsWorkspace 'Microsoft.OperationalInsights/workspaces@2021-12-01-preview' = { + name: logAnalyticsWorkspaceName + location: location +} + +resource eventHubNamespace 'Microsoft.EventHub/namespaces@2021-11-01' = { + name: eventHubNamespaceName + location: location + + resource eventHub 'eventhubs@2021-11-01' = { + name: eventHubNamespaceEventHubName + } + + resource authorizationRule 'authorizationRules@2021-06-01-preview' = { + name: 'RootManageSharedAccessKey' + properties: { + rights: [ + 'Listen' + 'Manage' + 'Send' + ] + } + } +} + +// ======= // +// Outputs // +// ======= // +output storageAccountResourceId string = storageAccount.id +output logAnalyticsWorkspaceResourceId string = logAnalyticsWorkspace.id +output eventHubNamespaceResourceId string = eventHubNamespace.id +output eventHubAuthorizationRuleId string = eventHubNamespace::authorizationRule.id +output eventHubNamespaceEventHubName string = eventHubNamespace::eventHub.name diff --git a/utilities/pipelines/resourceRemoval/Initialize-DeploymentRemoval.ps1 b/utilities/pipelines/resourceRemoval/Initialize-DeploymentRemoval.ps1 index 89c178e42f..ff6e202dc7 100644 --- a/utilities/pipelines/resourceRemoval/Initialize-DeploymentRemoval.ps1 +++ b/utilities/pipelines/resourceRemoval/Initialize-DeploymentRemoval.ps1 @@ -56,7 +56,12 @@ function Initialize-DeploymentRemoval { $null = Set-AzContext -Subscription $subscriptionId } - $moduleName = Split-Path (Split-Path $templateFilePath -Parent) -LeafBase + if (-not (Split-Path (Split-Path $templateFilePath -Parent) -LeafBase)) { + # In case of new dependency approach (template is in subfolder) + $moduleName = Split-Path (Split-Path (Split-Path $templateFilePath -Parent) -Parent) -LeafBase + } else { + $moduleName = Split-Path (Split-Path $templateFilePath -Parent) -LeafBase + } # The initial sequence is a general order-recommendation $removalSequence = @( @@ -67,6 +72,7 @@ function Initialize-DeploymentRemoval { 'Microsoft.OperationsManagement/solutions', 'Microsoft.OperationalInsights/workspaces/linkedServices', 'Microsoft.OperationalInsights/workspaces', + 'Microsoft.KeyVault/vaults', 'Microsoft.Resources/resourceGroups', 'Microsoft.Compute/virtualMachines' ) diff --git a/utilities/pipelines/resourceRemoval/helper/Invoke-ResourcePostRemoval.ps1 b/utilities/pipelines/resourceRemoval/helper/Invoke-ResourcePostRemoval.ps1 index 5bb4eddc0d..54a48513f7 100644 --- a/utilities/pipelines/resourceRemoval/helper/Invoke-ResourcePostRemoval.ps1 +++ b/utilities/pipelines/resourceRemoval/helper/Invoke-ResourcePostRemoval.ps1 @@ -63,7 +63,15 @@ function Invoke-ResourcePostRemoval { if ($matchingKeyVault -and -not $matchingKeyVault.EnablePurgeProtection) { Write-Verbose ("Purging key vault [$resourceName]") -Verbose if ($PSCmdlet.ShouldProcess(('Key Vault with ID [{0}]' -f $matchingKeyVault.Id), 'Purge')) { - $null = Remove-AzKeyVault -ResourceId $matchingKeyVault.Id -InRemovedState -Force -Location $matchingKeyVault.Location + try { + $null = Remove-AzKeyVault -ResourceId $matchingKeyVault.Id -InRemovedState -Force -Location $matchingKeyVault.Location -ErrorAction 'Stop' + } catch { + if ($_.Exception.Message -like '*DeletedVaultPurge*') { + Write-Warning ('Purge protection for key vault [{0}] enabled. Skipping. Scheduled purge date is [{1}]' -f $resourceName, $matchingKeyVault.ScheduledPurgeDate) + } else { + throw $_ + } + } } } break diff --git a/utilities/pipelines/resourceRemoval/helper/Invoke-ResourceRemoval.ps1 b/utilities/pipelines/resourceRemoval/helper/Invoke-ResourceRemoval.ps1 index 53a504e8a8..75a9f9e179 100644 --- a/utilities/pipelines/resourceRemoval/helper/Invoke-ResourceRemoval.ps1 +++ b/utilities/pipelines/resourceRemoval/helper/Invoke-ResourceRemoval.ps1 @@ -27,8 +27,7 @@ function Invoke-ResourceRemoval { [string] $Type ) - Write-Verbose ('Resource ID [{0}]' -f $resourceId) -Verbose - Write-Verbose ('Resource Type [{0}]' -f $type) -Verbose + Write-Verbose ('Removing resource [{0}]' -f $resourceId) -Verbose switch ($type) { 'Microsoft.Insights/diagnosticSettings' { diff --git a/utilities/pipelines/staticValidation/module.tests.ps1 b/utilities/pipelines/staticValidation/module.tests.ps1 index de04af02c2..bb5672d58c 100644 --- a/utilities/pipelines/staticValidation/module.tests.ps1 +++ b/utilities/pipelines/staticValidation/module.tests.ps1 @@ -532,6 +532,148 @@ Describe 'Readme tests' -Tag Readme { } } +Describe 'Parameter file tests' -Tag 'Parameter' { + + Context 'Deployment test file tests' { + + $deploymentTestFileTestCases = @() + + foreach ($moduleFolderPath in $moduleFolderPaths) { + if (Test-Path (Join-Path $moduleFolderPath '.test')) { + $testFilePaths = Get-ModuleTestFileList -ModulePath $moduleFolderPath | ForEach-Object { Join-Path $moduleFolderPath $_ } + foreach ($testFilePath in $testFilePaths) { + $testFileContent = Get-Content $testFilePath + + if ((Split-Path $testFilePath -Extension) -eq '.json') { + # Skip any classic parameter files + $contentHashtable = $testFileContent | ConvertFrom-Json -Depth 99 + $isParameterFile = $contentHashtable.'$schema' -like '*deploymentParameters*' + if ($isParameterFile) { + continue + } + } + + $deploymentTestFileTestCases += @{ + testFilePath = $testFilePath + testFileContent = $testFileContent + moduleFolderName = $moduleFolderPath.Replace('\', '/').Split('/modules/')[1] + } + } + } + } + + It "[] Bicep test deployment files should invoke test like [module testDeployment '../.*deploy.bicep' = {]" -TestCases ($deploymentTestFileTestCases | Where-Object { (Split-Path $_.testFilePath -Extension) -eq '.bicep' }) { + + param( + [object[]] $testFileContent + ) + + $testIndex = ($testFileContent | Select-String ("^module testDeployment '..\/.*deploy.bicep' = {$") | ForEach-Object { $_.LineNumber - 1 })[0] + + $testIndex -ne -1 | Should -Be $true -Because 'the module test invocation should be in the expected format to allow identification.' + } + + It '[] Bicep test deployment name should contain [-test-]' -TestCases ($deploymentTestFileTestCases | Where-Object { (Split-Path $_.testFilePath -Extension) -eq '.bicep' }) { + + param( + [object[]] $testFileContent + ) + + $expectedNameFormat = ($testFileContent | Out-String) -match '\s*name:.+-test-.+\s*' + + $expectedNameFormat | Should -Be $true -Because 'the handle ''-test-'' should be part of the module test invocation''s resource name to allow identification.' + } + + It '[] Bicep test deployment should have parameter [serviceShort]' -TestCases ($deploymentTestFileTestCases | Where-Object { (Split-Path $_.testFilePath -Extension) -eq '.bicep' }) { + + param( + [object[]] $testFileContent + ) + + $hasExpectedParam = ($testFileContent | Out-String) -match '\s*param\s+serviceShort\s+string\s*' + + $hasExpectedParam | Should -Be $true + } + + It '[] JSON test deployment name should contain [-test-]' -TestCases ($deploymentTestFileTestCases | Where-Object { (Split-Path $_.testFilePath -Extension) -eq '.json' }) { + + param( + [object[]] $testFileContent + ) + + # Handle case of deployment test file (instead of ARM-JSON parameter file) + $rawContentHashtable = $testFileContent | ConvertFrom-Json -Depth 99 + + # Uses deployment test file (instead of parameter file). Need to extract parameters. + $testResource = $rawContentHashtable.resources | Where-Object { $_.name -like '*-test-*' } + + $testResource | Should -Not -BeNullOrEmpty -Because 'the handle ''-test-'' should be part of the module test invocation''s resource name to allow identification.' + } + + It '[] JSON test deployment should have parameter [namePrefix]' -TestCases ($deploymentTestFileTestCases | Where-Object { (Split-Path $_.testFilePath -Extension) -eq '.json' }) { + + param( + [object[]] $testFileContent + ) + + $rawContentHashtable = $testFileContent | ConvertFrom-Json -Depth 99 -AsHashtable + $rawContentHashtable.parameters.keys | Should -Contain 'namePrefix' + } + + It '[] JSON test deployment should have parameter [serviceShort]' -TestCases ($deploymentTestFileTestCases | Where-Object { (Split-Path $_.testFilePath -Extension) -eq '.json' }) { + + param( + [object[]] $testFileContent + ) + + $rawContentHashtable = $testFileContent | ConvertFrom-Json -Depth 99 -AsHashtable + $rawContentHashtable.parameters.keys | Should -Contain 'serviceShort' + } + } + + Context 'Parameter file token tests' { + + # Parameter file test cases + $parameterFileTokenTestCases = @() + + foreach ($moduleFolderPath in $moduleFolderPaths) { + if (Test-Path (Join-Path $moduleFolderPath '.test')) { + $testFilePaths = Get-ModuleTestFileList -ModulePath $moduleFolderPath | ForEach-Object { Join-Path $moduleFolderPath $_ } + foreach ($testFilePath in $testFilePaths) { + foreach ($token in $enforcedTokenList.Keys) { + $parameterFileTokenTestCases += @{ + testFilePath = $testFilePath + parameterFileName = Split-Path $testFilePath -Leaf + tokenSettings = $Settings.parameterFileTokens + tokenName = $token + tokenValue = $enforcedTokenList[$token] + moduleFolderName = $moduleFolderPath.Replace('\', '/').Split('/modules/')[1] + } + } + } + } + } + + It '[] [Tokens] Parameter file [] should not contain the plain value for token [] guid' -TestCases $parameterFileTokenTestCases { + param ( + [string] $testFilePath, + [string] $parameterFileName, + [hashtable] $tokenSettings, + [string] $tokenName, + [string] $tokenValue, + [string] $moduleFolderName + ) + $ParameterFileTokenName = -join ($tokenSettings.tokenPrefix, $tokenName, $tokenSettings.tokenSuffix) + $ParameterFileContent = Get-Content -Path $testFilePath + + $incorrectReferencesFound = $ParameterFileContent | Select-String -Pattern $tokenValue -AllMatches + if ($incorrectReferencesFound.Matches) { + $incorrectReferencesFound.Matches.Count | Should -Be 0 -Because ('Test file should not contain the value [{0}], instead it should reference the token value [{1}]. Please check the {2} lines: [{3}]' -f $tokenName, $ParameterFileTokenName, $incorrectReferencesFound.Matches.Count, ($incorrectReferencesFound.Line.Trim() -join ",`n")) + } + } + } +} + Describe 'Deployment template tests' -Tag Template { Context 'Deployment template tests' { @@ -581,7 +723,16 @@ Describe 'Deployment template tests' -Tag Template { foreach ($moduleTestFilePath in $moduleTestFilePaths) { if ((Split-Path $moduleTestFilePath -Extension) -eq '.json') { - $deploymentTestFile_AllParameterNames = ((Get-Content $moduleTestFilePath) | ConvertFrom-Json -AsHashtable).parameters.Keys | Sort-Object + + $rawContentHashtable = (Get-Content $moduleTestFilePath) | ConvertFrom-Json -AsHashtable + + # Skipping any file that is not actually a ARM-JSON parameter file + $isParameterFile = $rawContentHashtable.'$schema' -like '*deploymentParameters*' + if (-not $isParameterFile) { + continue + } + + $deploymentTestFile_AllParameterNames = $rawContentHashtable.parameters.Keys | Sort-Object } else { $deploymentFileContent = az bicep build --file $moduleTestFilePath --stdout | ConvertFrom-Json -AsHashtable $deploymentTestFile_AllParameterNames = $deploymentFileContent.resources[-1].properties.parameters.keys | Sort-Object # The last resource should be the test @@ -1011,7 +1162,7 @@ Describe 'Deployment template tests' -Tag Template { foreach ($moduleFolderPath in $moduleFolderPaths) { if (Test-Path (Join-Path $moduleFolderPath '.test')) { - $TestFilePaths = (Get-ChildItem (Join-Path -Path $moduleFolderPath -ChildPath '.test') -Recurse -Force -File).FullName + $TestFilePaths = (Get-ChildItem (Join-Path -Path $moduleFolderPath -ChildPath '.test') -Recurse -File -Force).FullName foreach ($TestFilePath in $TestFilePaths) { foreach ($token in $tokenConfiguration.Tokens.Keys) { $parameterFileTokenTestCases += @{ diff --git a/utilities/tools/Clear-ManagementGroupDeployment.ps1 b/utilities/tools/Clear-ManagementGroupDeployment.ps1 index 4c1d1314b7..bcafa5ca6f 100644 --- a/utilities/tools/Clear-ManagementGroupDeployment.ps1 +++ b/utilities/tools/Clear-ManagementGroupDeployment.ps1 @@ -34,6 +34,8 @@ function Clear-ManagementGroupDeployment { [string[]] $DeploymentStatusToExclude = @('running', 'failed') ) + [Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 # Enables web reponse + # Load used functions . (Join-Path $PSScriptRoot 'helper' 'Split-Array.ps1') diff --git a/utilities/tools/Set-ModuleReadMe.ps1 b/utilities/tools/Set-ModuleReadMe.ps1 index f30170f48c..2494c8ed96 100644 --- a/utilities/tools/Set-ModuleReadMe.ps1 +++ b/utilities/tools/Set-ModuleReadMe.ps1 @@ -320,11 +320,10 @@ Mandatory. The readme file content array to update Optional. The identifier of the 'outputs' section. Defaults to '## Cross-referenced modules' .EXAMPLE -Set-ReferencesSection -TemplateFileContent @{ resource = @{}; ... } -ReadMeFileContent @('# Title', '', '## Section 1', ...) - +Set-CrossReferencesSection -TemplateFileContent @{ resource = @{}; ... } -ReadMeFileContent @('# Title', '', '## Section 1', ...) Update the given readme file's 'Cross-referenced modules' section based on the given template file content #> -function Set-ReferencesSection { +function Set-CrossReferencesSection { [CmdletBinding(SupportsShouldProcess)] param ( @@ -378,6 +377,445 @@ function Set-ReferencesSection { return $updatedFileContent } +<# +.SYNOPSIS +Add comments to indicate required & non-required parameters to the given Bicep example + +.DESCRIPTION +Add comments to indicate required & non-required parameters to the given Bicep example. +'Required' is only added if the example has at least one required parameter +'Non-Required' is only added if the example has at least one required parameter and at least one non-required parameter + +.PARAMETER BicepParams +Mandatory. The Bicep parameter block to add the comments to (i.e., should contain everything in between the brackets of a 'params: {...} block) + +.PARAMETER AllParametersList +Mandatory. A list of all top-level (i.e. non-nested) parameter names + +.PARAMETER RequiredParametersList +Mandatory. A list of all required top-level (i.e. non-nested) parameter names + +.EXAMPLE +Add-BicepParameterTypeComment -AllParametersList @('name', 'lock') -RequiredParametersList @('name') -BicepParams "name: 'carml'\nlock: 'CanNotDelete'" + +Add type comments to given bicep params string, using one required parameter 'name'. Would return: + +' + // Required parameters + name: 'carml' + // Non-required parameters + lock: 'CanNotDelete' +' +#> +function Add-BicepParameterTypeComment { + + [CmdletBinding()] + param ( + [Parameter(Mandatory = $true)] + [AllowEmptyString()] + [string] $BicepParams, + + [Parameter(Mandatory = $false)] + [AllowEmptyCollection()] + [string[]] $AllParametersList = @(), + + [Parameter(Mandatory = $false)] + [AllowEmptyCollection()] + [string[]] $RequiredParametersList = @() + ) + + if ($RequiredParametersList.Count -ge 1 -and $AllParametersList.Count -ge 2) { + + $BicepParamsArray = $BicepParams -split '\n' + + # [1/4] Check where the 'last' required parameter is located in the example (and what its indent is) + $parameterToSplitAt = $RequiredParametersList[-1] + $requiredParameterIndent = ([regex]::Match($BicepParamsArray[0], '^(\s+).*')).Captures.Groups[1].Value.Length + + # [2/4] Add a comment where the required parameters start + $BicepParamsArray = @('{0}// Required parameters' -f (' ' * $requiredParameterIndent)) + $BicepParamsArray[(0 .. ($BicepParamsArray.Count))] + + # [3/4] Find the location if the last required parameter + $requiredParameterStartIndex = ($BicepParamsArray | Select-String ('^[\s]{0}{1}:.+' -f "{$requiredParameterIndent}", $parameterToSplitAt) | ForEach-Object { $_.LineNumber - 1 })[0] + + # [4/4] If we have more than only required parameters, let's add a corresponding comment + if ($AllParametersList.Count -gt $RequiredParametersList.Count) { + $nextLineIndent = ([regex]::Match($BicepParamsArray[$requiredParameterStartIndex + 1], '^(\s+).*')).Captures.Groups[1].Value.Length + if ($nextLineIndent -gt $requiredParameterIndent) { + # Case Param is object/array: Search in rest of array for the next closing bracket with the same indent - and then add the search index (1) & initial index (1) count back in + $requiredParameterEndIndex = ($BicepParamsArray[($requiredParameterStartIndex + 1)..($BicepParamsArray.Count)] | Select-String "^[\s]{$requiredParameterIndent}\S+" | ForEach-Object { $_.LineNumber - 1 })[0] + 1 + $requiredParameterStartIndex + } else { + # Case Param is single line bool/string/int: Add an index (1) for the 'required' comment + $requiredParameterEndIndex = $requiredParameterStartIndex + } + + # Add a comment where the non-required parameters start + $BicepParamsArray = $BicepParamsArray[0..$requiredParameterEndIndex] + ('{0}// Non-required parameters' -f (' ' * $requiredParameterIndent)) + $BicepParamsArray[(($requiredParameterEndIndex + 1) .. ($BicepParamsArray.Count))] + } + + return ($BicepParamsArray | Out-String).TrimEnd() + } + + return $BicepParams +} + +<# +.SYNOPSIS +Sort the given JSON paramters into required & non-required parameters, each sorted alphabetically + +.DESCRIPTION +Sort the given JSON paramters into required & non-required parameters, each sorted alphabetically + +.PARAMETER ParametersJSON +Mandatory. The JSON parameters block to process (ideally already without 'value' property) + +.PARAMETER RequiredParametersList +Mandatory. A list of all required top-level (i.e. non-nested) parameter names + +.EXAMPLE +Get-OrderedParametersJSON -RequiredParametersList @('name') -ParametersJSON '{ "diagnosticLogsRetentionInDays": 7,"lock": "CanNotDelete","name": "carml" }' + +Order the given JSON object alphabetically. Would result into: + +@{ + name: 'carml' + diagnosticLogsRetentionInDays: 7 + lock: 'CanNotDelete' +} +#> +function Get-OrderedParametersJSON { + + [CmdletBinding()] + param ( + [Parameter(Mandatory = $true)] + [string] $ParametersJSON, + + [Parameter(Mandatory = $false)] + [AllowEmptyCollection()] + [string[]] $RequiredParametersList = @() + ) + + # Load used function(s) + . (Join-Path $PSScriptRoot 'helper' 'ConvertTo-OrderedHashtable.ps1') + + # [1/3] Get all parameters from the parameter object and order them recursively + $orderedContentInJSONFormat = ConvertTo-OrderedHashtable -JSONInputObject $parametersJSON + + # [2/3] Sort 'required' parameters to the front + $orderedJSONParameters = [ordered]@{} + $orderedTopLevelParameterNames = $orderedContentInJSONFormat.psbase.Keys # We must use PS-Base to handle conflicts of HashTable properties & keys (e.g. for a key 'keys'). + # [2.1] Add required parameters first + $orderedTopLevelParameterNames | Where-Object { $_ -in $RequiredParametersList } | ForEach-Object { $orderedJSONParameters[$_] = $orderedContentInJSONFormat[$_] } + # [2.2] Add rest after + $orderedTopLevelParameterNames | Where-Object { $_ -notin $RequiredParametersList } | ForEach-Object { $orderedJSONParameters[$_] = $orderedContentInJSONFormat[$_] } + + # [3/3] Handle empty dictionaries (in case the parmaeter file was empty) + if ($orderedJSONParameters.count -eq 0) { + $orderedJSONParameters = '' + } + + return $orderedJSONParameters +} + +<# +.SYNOPSIS +Sort the given JSON parameters into a new JSON parameter object, all parameter sorted into required & non-required parameters, each sorted alphabetically + +.DESCRIPTION +Sort the given JSON parameters into a new JSON parameter object, all parameter sorted into required & non-required parameters, each sorted alphabetically. +The location where required & non-required parameters start is highlighted with by a corresponding comment + +.PARAMETER ParametersJSON +Mandatory. The parameter JSON object to process + +.PARAMETER RequiredParametersList +Mandatory. A list of all required top-level (i.e. non-nested) parameter names + +.EXAMPLE +Build-OrderedJSONObject -RequiredParametersList @('name') -ParametersJSON '{ "lock": { "value": "CanNotDelete" }, "name": { "value": "carml" }, "diagnosticLogsRetentionInDays": { "value": 7 } }' + +Build a formatted Parameter-JSON object with one required parameter. Would result into: + +'{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentParameters.json#", + "contentVersion": "1.0.0.0", + "parameters": { + // Required parameters + "name": { + "value": "carml" + }, + // Non-required parameters + "diagnosticLogsRetentionInDays": { + "value": 7 + }, + "lock": { + "value": "CanNotDelete" + } + } +}' +#> +function Build-OrderedJSONObject { + + [CmdletBinding()] + param ( + [Parameter(Mandatory = $true)] + [string] $ParametersJSON, + + [Parameter(Mandatory = $false)] + [AllowEmptyCollection()] + [string[]] $RequiredParametersList = @() + ) + + # [1/9] Sort parameter alphabetically + $orderedJSONParameters = Get-OrderedParametersJSON -ParametersJSON $ParametersJSON -RequiredParametersList $RequiredParametersList + + # [2/9] Build the ordered parameter file syntax back up + $jsonExample = ([ordered]@{ + '$schema' = 'https://schema.management.azure.com/schemas/2019-04-01/deploymentParameters.json#' + contentVersion = '1.0.0.0' + parameters = (-not [String]::IsNullOrEmpty($orderedJSONParameters)) ? $orderedJSONParameters : @{} + } | ConvertTo-Json -Depth 99) + + # [3/8] If we have at least one required and one other parameter we want to add a comment + if ($RequiredParametersList.Count -ge 1 -and $OrderedJSONParameters.Keys.Count -ge 2) { + + $jsonExampleArray = $jsonExample -split '\n' + + # [4/8] Check where the 'last' required parameter is located in the example (and what its indent is) + $parameterToSplitAt = $RequiredParametersList[-1] + $parameterStartIndex = ($jsonExampleArray | Select-String '.*"parameters": \{.*' | ForEach-Object { $_.LineNumber - 1 })[0] + $requiredParameterIndent = ([regex]::Match($jsonExampleArray[($parameterStartIndex + 1)], '^(\s+).*')).Captures.Groups[1].Value.Length + + # [5/8] Add a comment where the required parameters start + $jsonExampleArray = $jsonExampleArray[0..$parameterStartIndex] + ('{0}// Required parameters' -f (' ' * $requiredParameterIndent)) + $jsonExampleArray[(($parameterStartIndex + 1) .. ($jsonExampleArray.Count))] + + # [6/8] Find the location if the last required parameter + $requiredParameterStartIndex = ($jsonExampleArray | Select-String "^[\s]{$requiredParameterIndent}`"$parameterToSplitAt`": \{.*" | ForEach-Object { $_.LineNumber - 1 })[0] + + # [7/8] If we have more than only required parameters, let's add a corresponding comment + if ($orderedJSONParameters.Keys.Count -gt $RequiredParametersList.Count ) { + # Search in rest of array for the next closing bracket with the same indent - and then add the search index (1) & initial index (1) count back in + $requiredParameterEndIndex = ($jsonExampleArray[($requiredParameterStartIndex + 1)..($jsonExampleArray.Count)] | Select-String "^[\s]{$requiredParameterIndent}\}" | ForEach-Object { $_.LineNumber - 1 })[0] + 1 + $requiredParameterStartIndex + + # Add a comment where the non-required parameters start + $jsonExampleArray = $jsonExampleArray[0..$requiredParameterEndIndex] + ('{0}// Non-required parameters' -f (' ' * $requiredParameterIndent)) + $jsonExampleArray[(($requiredParameterEndIndex + 1) .. ($jsonExampleArray.Count))] + } + + # [8/8] Convert the processed array back into a string + return $jsonExampleArray | Out-String + } + + return $jsonExample +} + +<# +.SYNOPSIS +Convert the given Bicep parameter block to JSON parameter block + +.DESCRIPTION +Convert the given Bicep parameter block to JSON parameter block + +.PARAMETER BicepParamBlock +Mandatory. The Bicep parameter block to process + +.EXAMPLE +ConvertTo-FormattedJSONParameterObject -BicepParamBlock "name: 'carml'\nlock: 'CanNotDelete'" + +Convert the Bicep string "name: 'carml'\nlock: 'CanNotDelete'" into a parameter JSON object. Would result into: + +@{ + lock = @{ + value = 'carml' + } + lock = @{ + value = 'CanNotDelete' + } +} +#> +function ConvertTo-FormattedJSONParameterObject { + + [CmdletBinding()] + param ( + [Parameter()] + [string] $BicepParamBlock + ) + + # [1/4] Detect top level params for later processing + $bicepParamBlockArray = $BicepParamBlock -split '\n' + $topLevelParamIndent = ([regex]::Match($bicepParamBlockArray[0], '^(\s+).*')).Captures.Groups[1].Value.Length + $topLevelParams = $bicepParamBlockArray | Where-Object { $_ -match "^\s{$topLevelParamIndent}[0-9a-zA-Z]+:.*" } | ForEach-Object { ($_ -split ':')[0].Trim() } + + # [2/4] Add JSON-specific syntax to the Bicep param block to enable us to treat is as such + # [2.1] Syntax: Outer brackets + $paramInJsonFormat = @( + '{', + $BicepParamBlock + '}' + ) | Out-String + + # [2.2] Syntax: All single-quotes are double-quotes + $paramInJsonFormat = $paramInJsonFormat -replace "'", '"' + + # [2.3] Split the object to format line-by-line (& also remove any empty lines) + $paramInJSONFormatArray = $paramInJsonFormat -split '\n' | Where-Object { -not [String]::IsNullOrEmpty($_.Trim()) } + + for ($index = 0; $index -lt $paramInJSONFormatArray.Count; $index++) { + + $line = $paramInJSONFormatArray[$index] + + # [2.4] Syntax: + # - Everything left of a leftest ':' should be wrapped in quotes (as a parameter name is always a string) + # - However, we don't want to accidently catch something like "CriticalAddonsOnly=true:NoSchedule" + [regex]$pattern = '^\s*\"{0}([0-9a-zA-Z]+):' + $line = $pattern.replace($line, '"$1":', 1) + + # [2.5] Syntax: Replace Bicep resource ID references + $mayHaveValue = $line -like '*:*' + if ($mayHaveValue) { + + # Individual checks + $isLineWithEmptyObjectValue = $line -match '^.+:\s*{\s*}\s*$' # e.g. test: {} + $isLineWithObjectPropertyReferenceValue = ($line -split ':')[1].Trim() -like '*.*' # e.g. resourceGroupResources.outputs.virtualWWANResourceId` + $isLineWithReferenceInLineKey = ($line -split ':')[0].Trim() -like '*.*' + + $lineValue = ($line -split ':')[1].Trim() + $isLineWithStringValue = $lineValue -match '".+"' # e.g. "value" + $isLineWithFunction = $lineValue -match '[a-zA-Z]+\(.+\)' # e.g. (split(resourceGroupResources.outputs.recoveryServicesVaultResourceId, "/"))[4] + $isLineWithPlainValue = $lineValue -match '^\w+$' # e.g. adminPassword: password + $isLineWithPrimitiveValue = $lineValue -match '^\s*true|false|[0-9]+$' # e.g. isSecure: true + + # Combined checks + # In case of an output reference like '"virtualWanId": resourceGroupResources.outputs.virtualWWANResourceId' we'll only show "" (but NOT e.g. 'reference': {}) + $isLineWithObjectPropertyReference = -not $isLineWithEmptyObjectValue -and -not $isLineWithStringValue -and $isLineWithObjectPropertyReferenceValue + # In case of a parameter/variable reference like 'adminPassword: password' we'll only show "" (but NOT e.g. enableMe: true) + $isLineWithParameterOrVariableReferenceValue = $isLineWithPlainValue -and -not $isLineWithPrimitiveValue + # In case of any contained line like ''${resourceGroupResources.outputs.managedIdentityResourceId}': {}' we'll only show "managedIdentityResourceId: {}" + $isLineWithObjectReferenceKeyAndEmptyObjectValue = $isLineWithEmptyObjectValue -and $isLineWithReferenceInLineKey + # In case of any contained function like '"backupVaultResourceGroup": (split(resourceGroupResources.outputs.recoveryServicesVaultResourceId, "/"))[4]' we'll only show "" + + if ($isLineWithObjectPropertyReference -or $isLineWithFunction -or $isLineWithParameterOrVariableReferenceValue) { + $line = '{0}: "<{1}>"' -f ($line -split ':')[0], ([regex]::Match(($line -split ':')[0], '"(.+)"')).Captures.Groups[1].Value + } elseif ($isLineWithObjectReferenceKeyAndEmptyObjectValue) { + $line = '"<{0}>": {1}' -f (($line -split ':')[0] -split '\.')[-1].TrimEnd('}"'), ($line -split ':')[1].Trim() + } + } else { + if ($line -notlike '*"*"*' -and $line -like '*.*') { + # In case of a array value like '[ \n -> resourceGroupResources.outputs.managedIdentityPrincipalId <- \n ]' we'll only show """ + $line = '"<{0}>"' -f $line.Split('.')[-1].Trim() + } + } + + + $paramInJSONFormatArray[$index] = $line + } + + # [2.6] Syntax: Add comma everywhere unless: + # - the current line has an opening 'object: {' or 'array: [' character + # - the line after the current line has a closing 'object: {' or 'array: [' character + # - it's the last closing bracket + for ($index = 0; $index -lt $paramInJSONFormatArray.Count; $index++) { + if (($paramInJSONFormatArray[$index] -match '[\{|\[]\s*$') -or (($index -lt $paramInJSONFormatArray.Count - 1) -and $paramInJSONFormatArray[$index + 1] -match '^\s*[\]|\}]\s*$') -or ($index -eq $paramInJSONFormatArray.Count - 1)) { + continue + } + $paramInJSONFormatArray[$index] = '{0},' -f $paramInJSONFormatArray[$index].Trim() + } + + # [2.7] Format the final JSON string to an object to enable processing + $paramInJsonFormatObject = $paramInJSONFormatArray | Out-String | ConvertFrom-Json -AsHashtable -Depth 99 + + # [3/4] Inject top-level 'value`' properties + $paramInJsonFormatObjectWithValue = @{} + foreach ($paramKey in $topLevelParams) { + $paramInJsonFormatObjectWithValue[$paramKey] = @{ + value = $paramInJsonFormatObject[$paramKey] + } + } + + # [4/4] Return result + return $paramInJsonFormatObjectWithValue +} + +<# +.SYNOPSIS +Convert the given parameter JSON object into a formatted Bicep object (i.e., sorted & with required/non-required comments) + +.DESCRIPTION +Convert the given parameter JSON object into a formatted Bicep object (i.e., sorted & with required/non-required comments) + +.PARAMETER JSONParameters +Mandatory. The parameter JSON object to process. + +.PARAMETER RequiredParametersList +Mandatory. A list of all required top-level (i.e. non-nested) parameter names + +.EXAMPLE +ConvertTo-FormattedBicep -RequiredParametersList @('name') -JSONParameters @{ lock = @{ value = 'carml' }; lock = @{ value = 'CanNotDelete' } } + +Convert the given JSONParameters object with one required parameter to a formatted Bicep object. Would result into: + +' + // Required parameters + name: 'carml' + // Non-required parameters + diagnosticLogsRetentionInDays: 7 + lock: 'CanNotDelete' +' +#> +function ConvertTo-FormattedBicep { + + [CmdletBinding()] + param ( + [Parameter(Mandatory = $true)] + [hashtable] $JSONParameters, + + [Parameter(Mandatory = $false)] + [AllowEmptyCollection()] + [string[]] $RequiredParametersList = @() + ) + + # Remove 'value' parameter property, if any (e.g. when dealing with a classic parameter file) + $JSONParametersWithoutValue = @{} + foreach ($parameterName in $JSONParameters.psbase.Keys) { + $keysOnLevel = $JSONParameters[$parameterName].Keys + if ($keysOnLevel.count -eq 1 -and $keysOnLevel -eq 'value') { + $JSONParametersWithoutValue[$parameterName] = $JSONParameters[$parameterName].value + } else { + $JSONParametersWithoutValue[$parameterName] = $JSONParameters[$parameterName] + } + } + + # [1/4] Order parameters recursively + if ($JSONParametersWithoutValue.Keys.Count -gt 0) { + $orderedJSONParameters = Get-OrderedParametersJSON -ParametersJSON ($JSONParametersWithoutValue | ConvertTo-Json -Depth 99) -RequiredParametersList $RequiredParametersList + } else { + $orderedJSONParameters = @{} + } + # [2/4] Remove any JSON specific formatting + $templateParameterObject = $orderedJSONParameters | ConvertTo-Json -Depth 99 + if ($templateParameterObject -ne '{}') { + $contentInBicepFormat = $templateParameterObject -replace '"', "'" # Update any [xyz: "xyz"] to [xyz: 'xyz'] + $contentInBicepFormat = $contentInBicepFormat -replace ',', '' # Update any [xyz: xyz,] to [xyz: xyz] + $contentInBicepFormat = $contentInBicepFormat -replace "'(\w+)':", '$1:' # Update any ['xyz': xyz] to [xyz: xyz] + $contentInBicepFormat = $contentInBicepFormat -replace "'(.+.getSecret\('.+'\))'", '$1' # Update any [xyz: 'xyz.GetSecret()'] to [xyz: xyz.GetSecret()] + + $bicepParamsArray = $contentInBicepFormat -split '\n' + $bicepParamsArray = $bicepParamsArray[1..($bicepParamsArray.count - 2)] + } + + # [3/4] Format params with indent + $BicepParams = ($bicepParamsArray | ForEach-Object { " $_" } | Out-String).TrimEnd() + + # [4/4] Add comment where required & optional parameters start + $splitInputObject = @{ + BicepParams = $BicepParams + RequiredParametersList = $RequiredParametersList + AllParametersList = $JSONParametersWithoutValue.Keys + } + $commentedBicepParams = Add-BicepParameterTypeComment @splitInputObject + + return $commentedBicepParams +} + <# .SYNOPSIS Generate 'Deployment examples' for the ReadMe out of the parameter files currently used to test the template @@ -391,11 +829,14 @@ Mandatory. The path to the template file .PARAMETER TemplateFileContent Mandatory. The template file content object to crawl data from +.PARAMETER TemplateFilePath +Mandatory. The path to the template file + .PARAMETER ReadMeFileContent Mandatory. The readme file content array to update .PARAMETER SectionStartIdentifier -Optional. The identifier of the 'outputs' section. Defaults to '## Dependencies +Optional. The identifier of the 'outputs' section. Defaults to '## Deployment examples' .PARAMETER addJson Optional. A switch to control whether or not to add a ARM-JSON-Parameter file example. Defaults to true. @@ -404,7 +845,7 @@ Optional. A switch to control whether or not to add a ARM-JSON-Parameter file ex Optional. A switch to control whether or not to add a Bicep deployment example. Defaults to true. .EXAMPLE -Set-DeploymentExamplesSection -TemplateFilePath 'C:/deploy.bicep' -TemplateFileContent @{ resource = @{}; ... } -ReadMeFileContent @('# Title', '', '## Section 1', ...) +Set-DeploymentExamplesSection -TemplateFileContent @{ resource = @{}; ... } -TemplateFilePath 'C:/deploy.bicep' -ReadMeFileContent @('# Title', '', '## Section 1', ...) Update the given readme file's 'Deployment Examples' section based on the given template file content #> @@ -432,7 +873,7 @@ function Set-DeploymentExamplesSection { ) # Load used function(s) - . (Join-Path $PSScriptRoot 'helper' 'ConvertTo-OrderedHashtable.ps1') + . (Join-Path (Split-Path $PSScriptRoot -Parent) 'pipelines' 'sharedScripts' 'Get-ModuleTestFileList.ps1') # Process content $SectionContent = [System.Collections.ArrayList]@( @@ -445,234 +886,297 @@ function Set-DeploymentExamplesSection { $moduleRoot = Split-Path $TemplateFilePath -Parent $resourceTypeIdentifier = $moduleRoot.Replace('\', '/').Split('/modules/')[1].TrimStart('/') $resourceType = $resourceTypeIdentifier.Split('/')[1] - $parameterFiles = Get-ChildItem (Join-Path $moduleRoot '.test') -Filter '*parameters.json' -Recurse + $testFilePaths = Get-ModuleTestFileList -ModulePath $moduleRoot | ForEach-Object { Join-Path $moduleRoot $_ } - $requiredParameterNames = $TemplateFileContent.parameters.Keys | Where-Object { $TemplateFileContent.parameters[$_].Keys -notcontains 'defaultValue' } | Sort-Object + $RequiredParametersList = $TemplateFileContent.parameters.Keys | Where-Object { $TemplateFileContent.parameters[$_].Keys -notcontains 'defaultValue' } | Sort-Object ############################ ## Process test files ## ############################ $pathIndex = 1 - foreach ($testFilePath in $parameterFiles.FullName) { - $contentInJSONFormat = Get-Content -Path $testFilePath -Encoding 'utf8' | Out-String + foreach ($testFilePath in $testFilePaths) { - $exampleTitle = ((Split-Path $testFilePath -LeafBase) -replace '\.', ' ') -replace ' parameters', '' - $TextInfo = (Get-Culture).TextInfo + # Read content + $rawContentArray = Get-Content -Path $testFilePath + $rawContent = Get-Content -Path $testFilePath -Encoding 'utf8' | Out-String + + # Format example header + if ((Split-Path (Split-Path $testFilePath -Parent) -Leaf) -ne '.test') { + $exampleTitle = Split-Path (Split-Path $testFilePath -Parent) -Leaf + } else { + $exampleTitle = ((Split-Path $testFilePath -LeafBase) -replace '\.', ' ') -replace ' parameters', '' + } + $TextInfo = (Get-Culture -Name 'en-US').TextInfo $exampleTitle = $TextInfo.ToTitleCase($exampleTitle) $SectionContent += @( '

Example {0}: {1}

' -f $pathIndex, $exampleTitle ) - if ($addBicep) { - $JSONParametersHashTable = (ConvertFrom-Json $contentInJSONFormat -AsHashtable -Depth 99).parameters - - # Handle KeyVaut references - $keyVaultReferences = $JSONParametersHashTable.Keys | Where-Object { $JSONParametersHashTable[$_].Keys -contains 'reference' } - - if ($keyVaultReferences.Count -gt 0) { - $keyVaultReferenceData = @() - foreach ($reference in $keyVaultReferences) { - $resourceIdElem = $JSONParametersHashTable[$reference].reference.keyVault.id -split '/' - $keyVaultReferenceData += @{ - subscriptionId = $resourceIdElem[2] - resourceGroupName = $resourceIdElem[4] - vaultName = $resourceIdElem[-1] - secretName = $JSONParametersHashTable[$reference].reference.secretName - parameterName = $reference - } - } - } - - $extendedKeyVaultReferences = @() - $counter = 0 - foreach ($reference in ($keyVaultReferenceData | Sort-Object -Property 'vaultName' -Unique)) { - $counter++ - $extendedKeyVaultReferences += @( - "resource kv$counter 'Microsoft.KeyVault/vaults@2019-09-01' existing = {", - (" name: '{0}'" -f $reference.vaultName), - (" scope: resourceGroup('{0}','{1}')" -f $reference.subscriptionId, $reference.resourceGroupName), - '}', - '' - ) + ## ----------------------------------- ## + ## Handle by type (Bicep vs. JSON) ## + ## ----------------------------------- ## + if ((Split-Path $testFilePath -Extension) -eq '.bicep') { - # Add attribute for later correct reference - $keyVaultReferenceData | Where-Object { $_.vaultName -eq $reference.vaultName } | ForEach-Object { - $_['vaultResourceReference'] = "kv$counter" - } - } + # ------------------------- # + # Prepare Bicep to JSON # + # ------------------------- # - # Handle VALUE references (i.e. remove them) - $JSONParameters = (ConvertFrom-Json $contentInJSONFormat -Depth 99 -AsHashtable).parameters - $JSONParametersWithoutValue = @{} - foreach ($parameterName in $JSONParameters.Keys) { - if ($JSONParameters[$parameterName].Keys -eq 'value') { - $JSONParametersWithoutValue[$parameterName] = $JSONParameters[$parameterName]['value'] - } else { - # replace key vault references - $matchingTuple = $keyVaultReferenceData | Where-Object { $_.parameterName -eq $parameterName } - $JSONParametersWithoutValue[$parameterName] = "{0}.getSecret('{1}')" -f $matchingTuple.vaultResourceReference, $matchingTuple.secretName - } - } + # [1/6] Search for the relevant parameter start & end index + $bicepTestStartIndex = ($rawContentArray | Select-String ("^module testDeployment '..\/.*deploy.bicep' = {$") | ForEach-Object { $_.LineNumber - 1 })[0] - # Order parameters recursively - $JSONParametersWithoutValue = ConvertTo-OrderedHashtable -JSONInputObject ($JSONParametersWithoutValue | ConvertTo-Json -Depth 99) + $bicepTestEndIndex = $bicepTestStartIndex + do { + $bicepTestEndIndex++ + } while ($rawContentArray[$bicepTestEndIndex] -ne '}') - # Sort 'required' parameters to the front - $orderedJSONParameters = [ordered]@{} - $orderedTopLevelParameterNames = $JSONParametersWithoutValue.psbase.Keys # We must use PS-Base to handle conflicts of HashTable properties & keys (e.g. for a key 'keys'). - # Add required parameters first - $orderedTopLevelParameterNames | Where-Object { $_ -in $requiredParameterNames } | ForEach-Object { $orderedJSONParameters[$_] = $JSONParametersWithoutValue[$_] } - # Add rest after - $orderedTopLevelParameterNames | Where-Object { $_ -notin $requiredParameterNames } | ForEach-Object { $orderedJSONParameters[$_] = $JSONParametersWithoutValue[$_] } + $rawBicepExample = $rawContentArray[$bicepTestStartIndex..$bicepTestEndIndex] - if ($orderedJSONParameters.count -eq 0) { - # Handle empty dictionaries (in case the parmaeter file was empty) - $orderedJSONParameters = @{} - } + # [2/6] Replace placeholders + $serviceShort = ([regex]::Match($rawContent, "(?m)^param serviceShort string = '(.+)'\s*$")).Captures.Groups[1].Value - $templateParameterObject = $orderedJSONParameters | ConvertTo-Json -Depth 99 - if ($templateParameterObject -ne '{}') { - $contentInBicepFormat = $templateParameterObject -replace '"', "'" # Update any [xyz: "xyz"] to [xyz: 'xyz'] - $contentInBicepFormat = $contentInBicepFormat -replace ',', '' # Update any [xyz: xyz,] to [xyz: xyz] - $contentInBicepFormat = $contentInBicepFormat -replace "'(\w+)':", '$1:' # Update any ['xyz': xyz] to [xyz: xyz] - $contentInBicepFormat = $contentInBicepFormat -replace "'(.+.getSecret\('.+'\))'", '$1' # Update any [xyz: 'xyz.GetSecret()'] to [xyz: xyz.GetSecret()] + $rawBicepExampleString = ($rawBicepExample | Out-String) + $rawBicepExampleString = $rawBicepExampleString -replace '\$\{serviceShort\}', $serviceShort + $rawBicepExampleString = $rawBicepExampleString -replace '\$\{namePrefix\}', '' # Replacing with empty to not expose prefix and avoid potential deployment conflicts + $rawBicepExampleString = $rawBicepExampleString -replace '(?m):\s*location\s*$', ': ''''' - $bicepParamsArray = $contentInBicepFormat -split '\n' - $bicepParamsArray = $bicepParamsArray[1..($bicepParamsArray.count - 2)] - } + # [3/6] Format header, remove scope property & any empty line + $rawBicepExample = $rawBicepExampleString -split '\n' + $rawBicepExample[0] = "module $resourceType './$resourceTypeIdentifier/deploy.bicep' = {" + $rawBicepExample = $rawBicepExample | Where-Object { $_ -notmatch 'scope: *' } | Where-Object { -not [String]::IsNullOrEmpty($_) } - # Format params with indent - $bicepExample = $bicepParamsArray | ForEach-Object { " $_" } + # [4/6] Extract param block + $rawBicepExampleArray = $rawBicepExample -split '\n' + $moduleDeploymentPropertyIndent = ([regex]::Match($rawBicepExampleArray[1], '^(\s+).*')).Captures.Groups[1].Value.Length + $paramsStartIndex = ($rawBicepExampleArray | Select-String ("^[\s]{$moduleDeploymentPropertyIndent}params:[\s]*\{") | ForEach-Object { $_.LineNumber - 1 })[0] + 1 + $paramsEndIndex = ($rawBicepExampleArray[($paramsStartIndex + 1)..($rawBicepExampleArray.Count)] | Select-String "^[\s]{$moduleDeploymentPropertyIndent}\}" | ForEach-Object { $_.LineNumber - 1 })[0] + $paramsStartIndex + $paramBlock = ($rawBicepExampleArray[$paramsStartIndex..$paramsEndIndex] | Out-String).TrimEnd() - # Optional: Add comment where required & optional parameters start - # ---------------------------------------------------------------- - if ($requiredParameterNames -is [string]) { - $requiredParameterNames = @($requiredParameterNames) + # [5/6] Convert Bicep parameter block to JSON parameter block to enable processing + $conversionInputObject = @{ + BicepParamBlock = $paramBlock } + $paramsInJSONFormat = ConvertTo-FormattedJSONParameterObject @conversionInputObject - # If we have at least one required and one other parameter we want to add a comment - if ($requiredParameterNames.Count -ge 1 -and $orderedJSONParameters.Keys.Count -ge 2) { - - $bicepExampleArray = $bicepExample -split '\n' + # [6/6] Convert JSON parameters back to Bicep and order & format them + $conversionInputObject = @{ + JSONParameters = $paramsInJSONFormat + RequiredParametersList = $RequiredParametersList + } + $bicepExample = ConvertTo-FormattedBicep @conversionInputObject - # Check where the 'last' required parameter is located in the example (and what its indent is) - $parameterToSplitAt = $requiredParameterNames[-1] - $requiredParameterIndent = ([regex]::Match($bicepExampleArray[0], '^(\s+).*')).Captures.Groups[1].Value.Length + # --------------------- # + # Add Bicep example # + # --------------------- # + if ($addBicep) { - # Add a comment where the required parameters start - $bicepExampleArray = @('{0}// Required parameters' -f (' ' * $requiredParameterIndent)) + $bicepExampleArray[(0 .. ($bicepExampleArray.Count))] + $formattedBicepExample = $rawBicepExample[0..($paramsStartIndex - 1)] + ($bicepExample -split '\n') + $rawBicepExample[($paramsEndIndex + 1)..($rawBicepExample.Count)] - # Find the location if the last required parameter - $requiredParameterStartIndex = ($bicepExampleArray | Select-String ('^[\s]{0}{1}:.+' -f "{$requiredParameterIndent}", $parameterToSplitAt) | ForEach-Object { $_.LineNumber - 1 })[0] + $SectionContent += @( + '', + '
' + '' + 'via Bicep module' + '' + '```bicep', + ($formattedBicepExample | ForEach-Object { "$_" }).TrimEnd(), + '```', + '', + '
', + '

' + ) + } - # If we have more than only required parameters, let's add a corresponding comment - if ($orderedJSONParameters.Keys.Count -gt $requiredParameterNames.Count) { - $nextLineIndent = ([regex]::Match($bicepExampleArray[$requiredParameterStartIndex + 1], '^(\s+).*')).Captures.Groups[1].Value.Length - if ($nextLineIndent -gt $requiredParameterIndent) { - # Case Param is object/array: Search in rest of array for the next closing bracket with the same indent - and then add the search index (1) & initial index (1) count back in - $requiredParameterEndIndex = ($bicepExampleArray[($requiredParameterStartIndex + 1)..($bicepExampleArray.Count)] | Select-String "^[\s]{$requiredParameterIndent}\S+" | ForEach-Object { $_.LineNumber - 1 })[0] + 1 + $requiredParameterStartIndex - } else { - # Case Param is single line bool/string/int: Add an index (1) for the 'required' comment - $requiredParameterEndIndex = $requiredParameterStartIndex - } + # -------------------- # + # Add JSON example # + # -------------------- # + if ($addJson) { - # Add a comment where the non-required parameters start - $bicepExampleArray = $bicepExampleArray[0..$requiredParameterEndIndex] + ('{0}// Non-required parameters' -f (' ' * $requiredParameterIndent)) + $bicepExampleArray[(($requiredParameterEndIndex + 1) .. ($bicepExampleArray.Count))] + # [1/2] Get all parameters from the parameter object and order them recursively + $orderingInputObject = @{ + ParametersJSON = $paramsInJSONFormat | ConvertTo-Json -Depth 99 + RequiredParametersList = $RequiredParametersList } + $orderedJSONExample = Build-OrderedJSONObject @orderingInputObject - $bicepExample = $bicepExampleArray | Out-String + # [2/2] Create the final content block + $SectionContent += @( + '', + '

' + '' + 'via JSON Parameter file' + '' + '```json', + $orderedJSONExample.Trim() + '```', + '', + '
', + '

' + ) } + } else { + # ------------------------- # + # Prepare JSON to Bicep # + # ------------------------- # - $SectionContent += @( - '', - '

' - '' - 'via Bicep module' - '' - '```bicep', - $extendedKeyVaultReferences, - "module $resourceType './$resourceTypeIdentifier/deploy.bicep' = {" - " name: '`${uniqueString(deployment().name)}-$resourceType'" - ' params: {' - $bicepExample.TrimEnd(), - ' }' - '}' - '```', - '', - '
' - '

' - ) - } - - if ($addJson) { - $orderedContentInJSONFormat = ConvertTo-OrderedHashtable -JSONInputObject (($contentInJSONFormat | ConvertFrom-Json).parameters | ConvertTo-Json -Depth 99) - - # Sort 'required' parameters to the front - $orderedJSONParameters = [ordered]@{} - $orderedTopLevelParameterNames = $orderedContentInJSONFormat.psbase.Keys # We must use PS-Base to handle conflicts of HashTable properties & keys (e.g. for a key 'keys'). - # Add required parameters first - $orderedTopLevelParameterNames | Where-Object { $_ -in $requiredParameterNames } | ForEach-Object { $orderedJSONParameters[$_] = $orderedContentInJSONFormat[$_] } - # Add rest after - $orderedTopLevelParameterNames | Where-Object { $_ -notin $requiredParameterNames } | ForEach-Object { $orderedJSONParameters[$_] = $orderedContentInJSONFormat[$_] } + $rawContentHashtable = $rawContent | ConvertFrom-Json -Depth 99 -AsHashtable -NoEnumerate - if ($orderedJSONParameters.count -eq 0) { - # Handle empty dictionaries (in case the parmaeter file was empty) - $orderedJSONParameters = '' - } + # First we need to check if we're dealing with classic JSON-Parameter file, or a deployment test file (which contains resource deployments & parameters) + $isParameterFile = $rawContentHashtable.'$schema' -like '*deploymentParameters*' + if (-not $isParameterFile) { + # Case 1: Uses deployment test file (instead of parameter file). + # [1/3] Need to extract parameters. The target is to get an object which 1:1 represents a classic JSON-Parameter file (aside from KeyVault references) + $testResource = $rawContentHashtable.resources | Where-Object { $_.name -like '*-test-*' } - $jsonExample = ([ordered]@{ + # [2/3] Build the full ARM-JSON parameter file + $jsonParameterContent = [ordered]@{ '$schema' = 'https://schema.management.azure.com/schemas/2019-04-01/deploymentParameters.json#' contentVersion = '1.0.0.0' - parameters = (-not [String]::IsNullOrEmpty($orderedJSONParameters)) ? $orderedJSONParameters : @{} - } | ConvertTo-Json -Depth 99) + parameters = $testResource.properties.parameters + } + $jsonParameterContent = ($jsonParameterContent | ConvertTo-Json -Depth 99).TrimEnd() + + # [3/3] Remove 'externalResourceReferences' that are generated for Bicep's 'existing' resource references. Removing them will make the file more readable + $jsonParameterContentArray = $jsonParameterContent -split '\n' + foreach ($row in ($jsonParameterContentArray | Where-Object { $_ -like '*reference(extensionResourceId*' })) { + $expectedValue = ([regex]::Match($row, '.+\[reference\(extensionResourceId.+\.(.+)\.value\]"')).Captures.Groups[1].Value + $toReplaceValue = ([regex]::Match($row, '"(\[reference\(extensionResourceId.+)"')).Captures.Groups[1].Value - # Optional: Add comment where required & optional parameters start - # ---------------------------------------------------------------- - if ($requiredParameterNames -is [string]) { - $requiredParameterNames = @($requiredParameterNames) + $jsonParameterContent = $jsonParameterContent.Replace($toReplaceValue, ('<{0}>' -f $expectedValue)) + } + } else { + # Case 2: Uses ARM-JSON parameter file + $jsonParameterContent = $rawContent.TrimEnd() } - # If we have at least one required and one other parameter we want to add a comment - if ($requiredParameterNames.Count -ge 1 -and $orderedJSONParameters.Keys.Count -ge 2) { + # --------------------- # + # Add Bicep example # + # --------------------- # + if ($addBicep) { + + # [1/5] Get all parameters from the parameter object + $JSONParametersHashTable = (ConvertFrom-Json $jsonParameterContent -AsHashtable -Depth 99).parameters + + # [2/5] Handle the special case of Key Vault secret references (that have a 'reference' instead of a 'value' property) + # [2.1] Find all references and split them into managable objects + $keyVaultReferences = $JSONParametersHashTable.Keys | Where-Object { $JSONParametersHashTable[$_].Keys -contains 'reference' } + + if ($keyVaultReferences.Count -gt 0) { + $keyVaultReferenceData = @() + foreach ($reference in $keyVaultReferences) { + $resourceIdElem = $JSONParametersHashTable[$reference].reference.keyVault.id -split '/' + $keyVaultReferenceData += @{ + subscriptionId = $resourceIdElem[2] + resourceGroupName = $resourceIdElem[4] + vaultName = $resourceIdElem[-1] + secretName = $JSONParametersHashTable[$reference].reference.secretName + parameterName = $reference + } + } + } - $jsonExampleArray = $jsonExample -split '\n' + # [2.2] Remove any duplicates from the referenced key vaults and build 'existing' Key Vault references in Bicep format from them. + # Also, add a link to the corresponding Key Vault 'resource' to each identified Key Vault secret reference + $extendedKeyVaultReferences = @() + $counter = 0 + foreach ($reference in ($keyVaultReferenceData | Sort-Object -Property 'vaultName' -Unique)) { + $counter++ + $extendedKeyVaultReferences += @( + "resource kv$counter 'Microsoft.KeyVault/vaults@2019-09-01' existing = {", + (" name: '{0}'" -f $reference.vaultName), + (" scope: resourceGroup('{0}','{1}')" -f $reference.subscriptionId, $reference.resourceGroupName), + '}', + '' + ) - # Check where the 'last' required parameter is located in the example (and what its indent is) - $parameterToSplitAt = $requiredParameterNames[-1] - $parameterStartIndex = ($jsonExampleArray | Select-String '.*"parameters": \{.*' | ForEach-Object { $_.LineNumber - 1 })[0] - $requiredParameterIndent = ([regex]::Match($jsonExampleArray[($parameterStartIndex + 1)], '^(\s+).*')).Captures.Groups[1].Value.Length + # Add attribute for later correct reference + $keyVaultReferenceData | Where-Object { $_.vaultName -eq $reference.vaultName } | ForEach-Object { + $_['vaultResourceReference'] = "kv$counter" + } + } - # Add a comment where the required parameters start - $jsonExampleArray = $jsonExampleArray[0..$parameterStartIndex] + ('{0}// Required parameters' -f (' ' * $requiredParameterIndent)) + $jsonExampleArray[(($parameterStartIndex + 1) .. ($jsonExampleArray.Count))] + # [3/5] Remove the 'value' property from each parameter + # If we're handling a classic ARM-JSON parameter file that includes replacing all 'references' with the link to one of the 'existing' Key Vault resources + if ((ConvertFrom-Json $rawContent -Depth 99).'$schema' -like '*deploymentParameters*') { + # If handling a classic parameter file + $JSONParameters = (ConvertFrom-Json $rawContent -Depth 99 -AsHashtable -NoEnumerate).parameters + $JSONParametersWithoutValue = @{} + foreach ($parameterName in $JSONParameters.psbase.Keys) { + $keysOnLevel = $JSONParameters[$parameterName].Keys + if ($keysOnLevel.count -eq 1 -and $keysOnLevel -eq 'value') { + $JSONParametersWithoutValue[$parameterName] = $JSONParameters[$parameterName]['value'] + } else { + # replace key vault references + $matchingTuple = $keyVaultReferenceData | Where-Object { $_.parameterName -eq $parameterName } + $JSONParametersWithoutValue[$parameterName] = "{0}.getSecret('{1}')" -f $matchingTuple.vaultResourceReference, $matchingTuple.secretName + } + } + } else { + # If handling a test deployment file + $JSONParametersWithoutValue = @{} + foreach ($parameter in $JSONParametersHashTable.Keys) { + $JSONParametersWithoutValue[$parameter] = $JSONParametersHashTable.$parameter.value + } + } - # Find the location if the last required parameter - $requiredParameterStartIndex = ($jsonExampleArray | Select-String "^[\s]{$requiredParameterIndent}`"$parameterToSplitAt`": \{.*" | ForEach-Object { $_.LineNumber - 1 })[0] + # [4/5] Convert the JSON parameters to a Bicep parameters block + $conversionInputObject = @{ + JSONParameters = $JSONParametersWithoutValue + RequiredParametersList = $null -ne $RequiredParametersList ? $RequiredParametersList : @() + } + $bicepExample = ConvertTo-FormattedBicep @conversionInputObject + + # [5/5] Create the final content block: That means + # - the 'existing' Key Vault resources + # - a 'module' header that mimics a module deployment + # - all parameters in Bicep format + $SectionContent += @( + '', + '

' + '' + 'via Bicep module' + '' + '```bicep', + $extendedKeyVaultReferences, + "module $resourceType './$resourceTypeIdentifier/deploy.bicep' = {" + " name: '`${uniqueString(deployment().name)}-$resourceType'" + ' params: {' + $bicepExample.TrimEnd(), + ' }' + '}' + '```', + '', + '
' + '

' + ) + } - # If we have more than only required parameters, let's add a corresponding comment - if ($orderedJSONParameters.Keys.Count -gt $requiredParameterNames.Count ) { - # Search in rest of array for the next closing bracket with the same indent - and then add the search index (1) & initial index (1) count back in - $requiredParameterEndIndex = ($jsonExampleArray[($requiredParameterStartIndex + 1)..($jsonExampleArray.Count)] | Select-String "^[\s]{$requiredParameterIndent}\}" | ForEach-Object { $_.LineNumber - 1 })[0] + 1 + $requiredParameterStartIndex + # -------------------- # + # Add JSON example # + # -------------------- # + if ($addJson) { - # Add a comment where the non-required parameters start - $jsonExampleArray = $jsonExampleArray[0..$requiredParameterEndIndex] + ('{0}// Non-required parameters' -f (' ' * $requiredParameterIndent)) + $jsonExampleArray[(($requiredParameterEndIndex + 1) .. ($jsonExampleArray.Count))] + # [1/2] Get all parameters from the parameter object and order them recursively + $orderingInputObject = @{ + ParametersJSON = (($jsonParameterContent | ConvertFrom-Json).parameters | ConvertTo-Json -Depth 99) + RequiredParametersList = $null -ne $RequiredParametersList ? $RequiredParametersList : @() } - - $jsonExample = $jsonExampleArray | Out-String + $orderedJSONExample = Build-OrderedJSONObject @orderingInputObject + + # [2/2] Create the final content block + $SectionContent += @( + '', + '

', + '', + 'via JSON Parameter file', + '', + '```json', + $orderedJSONExample.TrimEnd(), + '```', + '', + '
' + '

' + ) } - - $SectionContent += @( - '', - '

', - '', - 'via JSON Parameter file', - '', - '```json', - $jsonExample.TrimEnd(), - '```', - '', - '
' - '

' - ) } $SectionContent += @( @@ -682,7 +1186,9 @@ function Set-DeploymentExamplesSection { $pathIndex++ } - # Build result + ###################### + ## Built result ## + ###################### if ($SectionContent) { if ($PSCmdlet.ShouldProcess('Original file with new template references content', 'Merge')) { return Merge-FileWithNewContent -oldContent $ReadMeFileContent -newContent $SectionContent -SectionStartIdentifier $SectionStartIdentifier @@ -811,7 +1317,7 @@ function Set-ModuleReadMe { [string] $TemplateFilePath, [Parameter(Mandatory = $false)] - [Hashtable] $TemplateFileContent, + [hashtable] $TemplateFileContent, [Parameter(Mandatory = $false)] [string] $ReadMeFilePath = (Join-Path (Split-Path $TemplateFilePath -Parent) 'readme.md'), @@ -845,7 +1351,7 @@ function Set-ModuleReadMe { $TemplateFilePath = Resolve-Path -Path $TemplateFilePath -ErrorAction Stop if (-not (Test-Path $TemplateFilePath -PathType 'Leaf')) { - throw "[$TemplateFilePath] is not a valid file path." + throw "[$TemplateFilePath] is no valid file path." } if (-not $TemplateFileContent) { @@ -940,27 +1446,25 @@ function Set-ModuleReadMe { } if ($SectionsToRefresh -contains 'CrossReferences') { - # Handle [Dependencies] section + # Handle [CrossReferences] section # ======================== $inputObject = @{ ReadMeFileContent = $readMeFileContent TemplateFileContent = $templateFileContent } - $readMeFileContent = Set-ReferencesSection @inputObject - } - - if ($SectionsToRefresh -contains 'Deployment examples') { - $isTopLevelModule = $TemplateFilePath.Replace('\', '/').Split('/modules/')[1].Split('/').Count -eq 3 # //deploy.* - if ($SectionsToRefresh -contains 'Deployment examples' -and $isTopLevelModule) { - # Handle [Deployment examples] section - # =================================== - $inputObject = @{ - ReadMeFileContent = $readMeFileContent - TemplateFilePath = $TemplateFilePath - TemplateFileContent = $templateFileContent - } - $readMeFileContent = Set-DeploymentExamplesSection @inputObject + $readMeFileContent = Set-CrossReferencesSection @inputObject + } + + $isTopLevelModule = $TemplateFilePath.Replace('\', '/').Split('/modules/')[1].Split('/').Count -eq 3 # //deploy.* + if ($SectionsToRefresh -contains 'Deployment examples' -and $isTopLevelModule) { + # Handle [Deployment examples] section + # =================================== + $inputObject = @{ + ReadMeFileContent = $readMeFileContent + TemplateFilePath = $TemplateFilePath + TemplateFileContent = $templateFileContent } + $readMeFileContent = Set-DeploymentExamplesSection @inputObject } if ($SectionsToRefresh -contains 'Navigation') { diff --git a/utilities/tools/Test-ModuleLocally.ps1 b/utilities/tools/Test-ModuleLocally.ps1 index be56b7c2f4..eb7c9e3a90 100644 --- a/utilities/tools/Test-ModuleLocally.ps1 +++ b/utilities/tools/Test-ModuleLocally.ps1 @@ -249,7 +249,12 @@ function Test-ModuleLocally { # Loop through test files foreach ($moduleTestFile in $moduleTestFiles) { Write-Verbose ('Validating module [{0}] with test file [{1}]' -f $ModuleName, (Split-Path $moduleTestFile -Leaf)) -Verbose - Test-TemplateDeployment @functionInput -ParameterFilePath $moduleTestFile + if ((Split-Path $moduleTestFile -Extension) -eq '.json') { + Test-TemplateDeployment @functionInput -ParameterFilePath $moduleTestFile + } else { + $functionInput['TemplateFilePath'] = $moduleTestFile + Test-TemplateDeployment @functionInput + } } } @@ -260,8 +265,15 @@ function Test-ModuleLocally { # Loop through test files foreach ($moduleTestFile in $moduleTestFiles) { Write-Verbose ('Deploy Module [{0}] with test file [{1}]' -f $ModuleName, (Split-Path $moduleTestFile -Leaf)) -Verbose - if ($PSCmdlet.ShouldProcess(('Module [{0}] with test file [{1}]' -f $ModuleName, (Split-Path $moduleTestFile -Leaf)), 'Deploy')) { - New-TemplateDeployment @functionInput -ParameterFilePath $moduleTestFile + if ((Split-Path $moduleTestFile -Extension) -eq '.json') { + if ($PSCmdlet.ShouldProcess(('Module [{0}] with test file [{1}]' -f $ModuleName, (Split-Path $moduleTestFile -Leaf)), 'Deploy')) { + New-TemplateDeployment @functionInput -ParameterFilePath $moduleTestFile + } + } else { + $functionInput['TemplateFilePath'] = $moduleTestFile + if ($PSCmdlet.ShouldProcess(('Module [{0}] with test file [{1}]' -f $ModuleName, (Split-Path $moduleTestFile -Leaf)), 'Deploy')) { + New-TemplateDeployment @functionInput + } } } }