From 1bd531a1a891b68c199f6ba25ae8a5b2efd13827 Mon Sep 17 00:00:00 2001 From: David Duque Date: Fri, 13 Nov 2020 11:53:01 +0100 Subject: [PATCH 01/23] First functional create datalake store module --- plugins/module_utils/azure_rm_common.py | 16 ++- plugins/modules/azure_rm_datalakestore.py | 147 ++++++++++++++++++++++ requirements-azure.txt | 1 + 3 files changed, 161 insertions(+), 3 deletions(-) create mode 100644 plugins/modules/azure_rm_datalakestore.py diff --git a/plugins/module_utils/azure_rm_common.py b/plugins/module_utils/azure_rm_common.py index 3ba51923e..1dad393fd 100644 --- a/plugins/module_utils/azure_rm_common.py +++ b/plugins/module_utils/azure_rm_common.py @@ -268,7 +268,8 @@ def default_api_version(self): from azure.mgmt.resource.locks import ManagementLockClient from azure.mgmt.recoveryservicesbackup import RecoveryServicesBackupClient import azure.mgmt.recoveryservicesbackup.models as RecoveryServicesBackupModels - + from azure.mgmt.datalake.store import DataLakeStoreAccountManagementClient + import azure.mgmt.datalake.store.models as DataLakeStoreAccountModel except ImportError as exc: Authentication = object HAS_AZURE_EXC = traceback.format_exc() @@ -428,6 +429,7 @@ def __init__(self, derived_arg_spec, bypass_checks=False, no_log=False, self._IoThub_client = None self._lock_client = None self._recovery_services_backup_client = None + self._datalake_store_client = None self.check_mode = self.module.check_mode self.api_profile = self.module.params.get('api_profile') @@ -1259,7 +1261,6 @@ def lock_models(self): @property def recovery_services_backup_client(self): self.log('Getting recovery services backup client') - if not self._recovery_services_backup_client: self._recovery_services_backup_client = self.get_mgmt_svc_client(RecoveryServicesBackupClient, base_url=self._cloud_environment.endpoints.resource_manager) return self._recovery_services_backup_client @@ -1268,8 +1269,17 @@ def recovery_services_backup_client(self): def recovery_services_backup_models(self): return RecoveryServicesBackupModels + def datalake_store_client(self): + self.log('Getting datalake store client...') + self._datalake_store_client = self.get_mgmt_svc_client(DataLakeStoreAccountManagementClient, + base_url=self._cloud_environment.endpoints.resource_manager, + api_version='2016-11-01') + return self._datalake_store_client + + @property + def datalake_store_models(self): + return DataLakeStoreAccountModel -class AzureSASAuthentication(Authentication): """Simple SAS Authentication. An implementation of Authentication in https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/authentication.py diff --git a/plugins/modules/azure_rm_datalakestore.py b/plugins/modules/azure_rm_datalakestore.py new file mode 100644 index 000000000..7f6613b34 --- /dev/null +++ b/plugins/modules/azure_rm_datalakestore.py @@ -0,0 +1,147 @@ +#!/usr/bin/python + +from __future__ import absolute_import, division, print_function +import datetime + +__metaclass__ = type + + +ANSIBLE_METADATA = {'metadata_version': '1.1', + 'status': ['preview'], + 'supported_by': 'community'} + +DOCUMENTATION = ''' + +''' + +EXAMPLES = ''' +''' + +RETURN = ''' + +''' + +from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase +import uuid + +try: + from msrestazure.azure_exceptions import CloudError + from azure.graphrbac.models import GraphErrorException + from azure.graphrbac.models import PasswordCredential + from azure.graphrbac.models import ApplicationUpdateParameters + from dateutil.relativedelta import relativedelta +except ImportError: + # This is handled in azure_rm_common + pass + + +class AzureRMDatalakeStore(AzureRMModuleBase): + def __init__(self): + + self.module_arg_spec = dict( + resource_group=dict(type='str',required=True), + name=dict(type='str',required=True), + location=dict(type='str'), + state=dict(type='str', default='present', choices=['present', 'absent']), + ) + + self.state = None + self.name = None + self.resource_group = None + self.location = None + self.results = dict(changed=False) + + self.client = None + + super(AzureRMDatalakeStore, self).__init__(derived_arg_spec=self.module_arg_spec, + supports_check_mode=False, + supports_tags=False) + + def exec_module(self, **kwargs): + for key in list(self.module_arg_spec.keys()): + setattr(self, key, kwargs[key]) + + resource_group = self.get_resource_group(self.resource_group) + if not self.location: + self.location = resource_group.location + + if self.state == 'present': + self.create_datalake_store() + # else: + + + return self.results + + def check_name_availability(self): + self.log('Checking name availability for {0}'.format(self.name)) + try: + response = self.datalake_store_client.accounts.check_name_availability(self.location, self.name) + except CloudError as e: + self.log('Error attempting to validate name.') + self.fail("Error checking name availability: {0}".format(str(e))) + if not response.name_available: + self.log('Error name not available.') + self.fail("{0} - {1}".format(response.message, response.reason)) + + def create_datalake_store(self): + self.log("Creating datalake store {0}".format(self.name)) + + if not self.location: + self.fail('Parameter error: location required when creating a datalake store account.') + + self.check_name_availability() + self.results['changed'] = True + + if self.check_mode: + account_dict = dict( + name=self.name, + resource_group=self.resource_group, + location=self.location + ) + return account_dict + + parameters = self.datalake_store_models.CreateDataLakeStoreAccountParameters( + location=self.location + ) + + self.log(str(parameters)) + try: + poller = self.datalake_store_client.accounts.create(self.resource_group, self.name, parameters) + self.get_poller_result(poller) + except CloudError as e: + self.log('Error creating datalake store.') + self.fail("Failed to create datalake store: {0}".format(str(e))) + + return self.get_account() + + def get_account(self): + self.log('Get properties for datalake store {0}'.format(self.name)) + datalake_store_obj = None + account_dict = None + + try: + datalake_store_obj = self.datalake_store_client.accounts.get(self.resource_group, self.name) + except CloudError: + pass + + if datalake_store_obj: + account_dict = self.account_obj_to_dict(datalake_store_obj) + + return account_dict + + def account_obj_to_dict(self, datalake_store_obj): + account_dict = dict( + id=datalake_store_obj.id, + name=datalake_store_obj.name, + type=datalake_store_obj.type, + location=datalake_store_obj.location, + tags=datalake_store_obj.tags + ) + return account_dict + +def main(): + AzureRMDatalakeStore() + + +if __name__ == '__main__': + main() diff --git a/requirements-azure.txt b/requirements-azure.txt index 28d74f260..ea79243f7 100644 --- a/requirements-azure.txt +++ b/requirements-azure.txt @@ -11,6 +11,7 @@ azure-mgmt-compute==10.0.0 azure-mgmt-containerinstance==1.4.0 azure-mgmt-containerregistry==2.0.0 azure-mgmt-containerservice==9.1.0 +azure-mgmt-datalake-store==0.5.0 azure-mgmt-dns==2.1.0 azure-mgmt-keyvault==1.1.0 azure-mgmt-marketplaceordering==0.1.0 From 8d214bace8f5e28dc1bcc3285b897554d7307e44 Mon Sep 17 00:00:00 2001 From: David Duque Date: Fri, 13 Nov 2020 13:55:22 +0100 Subject: [PATCH 02/23] Added absent action and added Api profile to latest in order to avoid warning --- plugins/module_utils/azure_rm_common.py | 3 ++- plugins/modules/azure_rm_datalakestore.py | 24 +++++++++++++++++++---- 2 files changed, 22 insertions(+), 5 deletions(-) diff --git a/plugins/module_utils/azure_rm_common.py b/plugins/module_utils/azure_rm_common.py index 1dad393fd..ae1548a6d 100644 --- a/plugins/module_utils/azure_rm_common.py +++ b/plugins/module_utils/azure_rm_common.py @@ -103,7 +103,8 @@ def default_api_version(self): 'PostgreSQLManagementClient': '2017-12-01', 'MySQLManagementClient': '2017-12-01', 'MariaDBManagementClient': '2019-03-01', - 'ManagementLockClient': '2016-09-01' + 'ManagementLockClient': '2016-09-01', + 'DataLakeStoreAccountManagementClient': '2016-11-01' }, '2019-03-01-hybrid': { 'StorageManagementClient': '2017-10-01', diff --git a/plugins/modules/azure_rm_datalakestore.py b/plugins/modules/azure_rm_datalakestore.py index 7f6613b34..5f4c927eb 100644 --- a/plugins/modules/azure_rm_datalakestore.py +++ b/plugins/modules/azure_rm_datalakestore.py @@ -67,8 +67,8 @@ def exec_module(self, **kwargs): if self.state == 'present': self.create_datalake_store() - # else: - + else: + self.dalete_datalake_store() return self.results @@ -112,9 +112,25 @@ def create_datalake_store(self): self.log('Error creating datalake store.') self.fail("Failed to create datalake store: {0}".format(str(e))) - return self.get_account() + return self.get_datalake_store() + + def dalete_datalake_store(self): + self.log('Delete datalake store {0}'.format(self.name)) + + datalake_store_obj = self.get_datalake_store() + + self.results['changed'] = True if datalake_store_obj is not None else False + if not self.check_mode and datalake_store_obj is not None: + try: + status = self.datalake_store_client.accounts.delete(self.resource_group, self.name) + self.log("delete status: ") + self.log(str(status)) + except CloudError as e: + self.fail("Failed to delete datalake store: {0}".format(str(e))) + + return True - def get_account(self): + def get_datalake_store(self): self.log('Get properties for datalake store {0}'.format(self.name)) datalake_store_obj = None account_dict = None From aa64ae60cd11430fd856d02d1b12a0c0ca1f857f Mon Sep 17 00:00:00 2001 From: David Duque Date: Fri, 13 Nov 2020 14:18:12 +0100 Subject: [PATCH 03/23] Added more info from datalake to results.state and improved some piece of code --- plugins/modules/azure_rm_datalakestore.py | 53 ++++++++++++++++------- 1 file changed, 37 insertions(+), 16 deletions(-) diff --git a/plugins/modules/azure_rm_datalakestore.py b/plugins/modules/azure_rm_datalakestore.py index 5f4c927eb..9856b8824 100644 --- a/plugins/modules/azure_rm_datalakestore.py +++ b/plugins/modules/azure_rm_datalakestore.py @@ -22,14 +22,9 @@ ''' from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase -import uuid try: from msrestazure.azure_exceptions import CloudError - from azure.graphrbac.models import GraphErrorException - from azure.graphrbac.models import PasswordCredential - from azure.graphrbac.models import ApplicationUpdateParameters - from dateutil.relativedelta import relativedelta except ImportError: # This is handled in azure_rm_common pass @@ -50,8 +45,7 @@ def __init__(self): self.resource_group = None self.location = None self.results = dict(changed=False) - - self.client = None + self.account_dict = None super(AzureRMDatalakeStore, self).__init__(derived_arg_spec=self.module_arg_spec, supports_check_mode=False, @@ -65,10 +59,19 @@ def exec_module(self, **kwargs): if not self.location: self.location = resource_group.location + self.account_dict = self.get_datalake_store() + + if self.account_dict is not None: + self.results['state'] = self.account_dict + else: + self.results['state'] = dict() + if self.state == 'present': - self.create_datalake_store() + if not self.account_dict: + self.results['state'] = self.create_datalake_store() else: - self.dalete_datalake_store() + self.delete_datalake_store() + self.results['state'] = dict(state='Deleted') return self.results @@ -114,20 +117,18 @@ def create_datalake_store(self): return self.get_datalake_store() - def dalete_datalake_store(self): + def delete_datalake_store(self): self.log('Delete datalake store {0}'.format(self.name)) - datalake_store_obj = self.get_datalake_store() - - self.results['changed'] = True if datalake_store_obj is not None else False - if not self.check_mode and datalake_store_obj is not None: + self.results['changed'] = True if self.account_dict is not None else False + if not self.check_mode and self.account_dict is not None: try: status = self.datalake_store_client.accounts.delete(self.resource_group, self.name) self.log("delete status: ") self.log(str(status)) except CloudError as e: self.fail("Failed to delete datalake store: {0}".format(str(e))) - + return True def get_datalake_store(self): @@ -151,7 +152,27 @@ def account_obj_to_dict(self, datalake_store_obj): name=datalake_store_obj.name, type=datalake_store_obj.type, location=datalake_store_obj.location, - tags=datalake_store_obj.tags + tags=datalake_store_obj.tags, + identity=datalake_store_obj.identity, + account_id=datalake_store_obj.account_id, + provisioning_state=datalake_store_obj.provisioning_state, + state=datalake_store_obj.state, + creation_time=datalake_store_obj.creation_time, + last_modified_time=datalake_store_obj.last_modified_time, + endpoint=datalake_store_obj.endpoint, + default_group=datalake_store_obj.default_group, + encryption_config=dict(type=datalake_store_obj.encryption_config.type, + key_vault_meta_info=datalake_store_obj.encryption_config.key_vault_meta_info), + encryption_state=datalake_store_obj.encryption_state, + encryption_provisioning_state=datalake_store_obj.encryption_provisioning_state, + firewall_rules=datalake_store_obj.firewall_rules, + virtual_network_rules=datalake_store_obj.virtual_network_rules, + firewall_state=datalake_store_obj.firewall_state, + firewall_allow_azure_ips=datalake_store_obj.firewall_allow_azure_ips, + trusted_id_providers=datalake_store_obj.trusted_id_providers, + trusted_id_provider_state=datalake_store_obj.trusted_id_provider_state, + new_tier=datalake_store_obj.new_tier, + current_tier=datalake_store_obj.current_tier ) return account_dict From 037c66b90f3075cc1d7c769245c956f63a36c3c8 Mon Sep 17 00:00:00 2001 From: David Duque Date: Fri, 13 Nov 2020 18:35:15 +0100 Subject: [PATCH 04/23] Added tags and new tier capability. Update method added --- plugins/modules/azure_rm_datalakestore.py | 42 ++++++++++++++++++++--- 1 file changed, 38 insertions(+), 4 deletions(-) diff --git a/plugins/modules/azure_rm_datalakestore.py b/plugins/modules/azure_rm_datalakestore.py index 9856b8824..e8fe04f3d 100644 --- a/plugins/modules/azure_rm_datalakestore.py +++ b/plugins/modules/azure_rm_datalakestore.py @@ -34,16 +34,21 @@ class AzureRMDatalakeStore(AzureRMModuleBase): def __init__(self): self.module_arg_spec = dict( - resource_group=dict(type='str',required=True), - name=dict(type='str',required=True), location=dict(type='str'), + name=dict(type='str',required=True), + new_tier=dict(type='str', choices=['Consumption', 'Commitment_1TB', 'Commitment_10TB', 'Commitment_100TB', 'Commitment_500TB', 'Commitment_1PB', 'Commitment_5PB']), + resource_group=dict(type='str',required=True), state=dict(type='str', default='present', choices=['present', 'absent']), + tags=dict(type='dict'), ) self.state = None self.name = None self.resource_group = None self.location = None + self.tags = None + self.new_tier = None + self.results = dict(changed=False) self.account_dict = None @@ -52,7 +57,7 @@ def __init__(self): supports_tags=False) def exec_module(self, **kwargs): - for key in list(self.module_arg_spec.keys()): + for key in list(self.module_arg_spec.keys()) + ['tags']: setattr(self, key, kwargs[key]) resource_group = self.get_resource_group(self.resource_group) @@ -69,6 +74,8 @@ def exec_module(self, **kwargs): if self.state == 'present': if not self.account_dict: self.results['state'] = self.create_datalake_store() + else: + self.results['state'] = self.update_datalake_store() else: self.delete_datalake_store() self.results['state'] = dict(state='Deleted') @@ -104,7 +111,8 @@ def create_datalake_store(self): return account_dict parameters = self.datalake_store_models.CreateDataLakeStoreAccountParameters( - location=self.location + location=self.location, + new_tier=self.new_tier ) self.log(str(parameters)) @@ -117,6 +125,31 @@ def create_datalake_store(self): return self.get_datalake_store() + def update_datalake_store(self): + self.log("Updating datalake store {0}".format(self.name)) + + parameters = self.datalake_store_models.UpdateDataLakeStoreAccountParameters() + + if self.tags: + update_tags, self.account_dict['tags'] = self.update_tags(self.account_dict['tags']) + if update_tags: + self.results['changed'] = True + parameters.tags=self.account_dict['tags'] + + if self.new_tier: + self.results['changed'] = True + parameters.new_tier=self.new_tier + + self.log(str(parameters)) + try: + poller = self.datalake_store_client.accounts.update(self.resource_group, self.name, parameters) + self.get_poller_result(poller) + except CloudError as e: + self.log('Error creating datalake store.') + self.fail("Failed to create datalake store: {0}".format(str(e))) + + return self.get_datalake_store() + def delete_datalake_store(self): self.log('Delete datalake store {0}'.format(self.name)) @@ -174,6 +207,7 @@ def account_obj_to_dict(self, datalake_store_obj): new_tier=datalake_store_obj.new_tier, current_tier=datalake_store_obj.current_tier ) + return account_dict def main(): From 162df4b0c28f995e02806b5fc2332f9b03b937b2 Mon Sep 17 00:00:00 2001 From: David Duque Date: Fri, 13 Nov 2020 19:52:32 +0100 Subject: [PATCH 05/23] Added encryption state and config to variables --- plugins/modules/azure_rm_datalakestore.py | 41 ++++++++++++++++++++++- 1 file changed, 40 insertions(+), 1 deletion(-) diff --git a/plugins/modules/azure_rm_datalakestore.py b/plugins/modules/azure_rm_datalakestore.py index e8fe04f3d..64443821d 100644 --- a/plugins/modules/azure_rm_datalakestore.py +++ b/plugins/modules/azure_rm_datalakestore.py @@ -34,6 +34,22 @@ class AzureRMDatalakeStore(AzureRMModuleBase): def __init__(self): self.module_arg_spec = dict( + default_group=dict(type='str'), + encryption_config=dict( + type='dict', + options=dict( + type=dict(type='str', choices=['UserManaged', 'ServiceManaged']), + key_vault_meta_info=dict( + type='dict', + options=dict( + key_vault_resource_id=dict(type='str',required=True), + encryption_key_name=dict(type='str',required=True), + encryption_key_version=dict(type='str') + ) + ), + ) + ), + encryption_state=dict(type='str', choices=['Enabled', 'Disabled']), location=dict(type='str'), name=dict(type='str',required=True), new_tier=dict(type='str', choices=['Consumption', 'Commitment_1TB', 'Commitment_10TB', 'Commitment_100TB', 'Commitment_500TB', 'Commitment_1PB', 'Commitment_5PB']), @@ -48,6 +64,10 @@ def __init__(self): self.location = None self.tags = None self.new_tier = None + self.default_group = None + self.encryption_config = dict() + self.encryption_config_model = None + self.encryption_state = None self.results = dict(changed=False) self.account_dict = None @@ -60,6 +80,11 @@ def exec_module(self, **kwargs): for key in list(self.module_arg_spec.keys()) + ['tags']: setattr(self, key, kwargs[key]) + if self.encryption_config: + # TODO: Revisar todo lo referente a Key Vault Meta Info y hacer pruebas + self.encryption_config_model=self.datalake_store_models.EncryptionConfig(type=self.encryption_config.get('type'), + key_vault_meta_info=self.encryption_config.get('key_vault_meta_info')) + resource_group = self.get_resource_group(self.resource_group) if not self.location: self.location = resource_group.location @@ -111,8 +136,12 @@ def create_datalake_store(self): return account_dict parameters = self.datalake_store_models.CreateDataLakeStoreAccountParameters( + default_group=self.default_group, + encryption_config=self.encryption_config_model, + encryption_state=self.encryption_state, location=self.location, - new_tier=self.new_tier + new_tier=self.new_tier, + tags=self.tags ) self.log(str(parameters)) @@ -140,6 +169,16 @@ def update_datalake_store(self): self.results['changed'] = True parameters.new_tier=self.new_tier + if self.default_group: + self.results['changed'] = True + parameters.default_group=self.default_group + + if self.encryption_state and self.account_dict.get('encryption_state') != self.encryption_state: + self.fail("Encryption type cannot be updated.") + + if self.encryption_config and self.account_dict.get('encryption_config').get('type') != self.encryption_config.get('type'): + self.fail("Encryption type cannot be updated.") + self.log(str(parameters)) try: poller = self.datalake_store_client.accounts.update(self.resource_group, self.name, parameters) From 54efde11224765540e7a3032030ff988d90199b4 Mon Sep 17 00:00:00 2001 From: David Duque Date: Fri, 13 Nov 2020 21:42:07 +0100 Subject: [PATCH 06/23] Added firewall to parameters --- plugins/modules/azure_rm_datalakestore.py | 59 +++++++++++++++++++++-- 1 file changed, 56 insertions(+), 3 deletions(-) diff --git a/plugins/modules/azure_rm_datalakestore.py b/plugins/modules/azure_rm_datalakestore.py index 64443821d..511777649 100644 --- a/plugins/modules/azure_rm_datalakestore.py +++ b/plugins/modules/azure_rm_datalakestore.py @@ -50,6 +50,16 @@ def __init__(self): ) ), encryption_state=dict(type='str', choices=['Enabled', 'Disabled']), + firewall_allow_azure_ips=dict(type='str', choices=['Enabled', 'Disabled']), + firewall_rules=dict( + type='list', + options=dict( + name=dict(type='str',required=True), + start_ip_address=dict(type='str',required=True), + end_ip_address=dict(type='str',required=True) + ) + ), + firewall_state=dict(type='str', choices=['Enabled', 'Disabled']), location=dict(type='str'), name=dict(type='str',required=True), new_tier=dict(type='str', choices=['Consumption', 'Commitment_1TB', 'Commitment_10TB', 'Commitment_100TB', 'Commitment_500TB', 'Commitment_1PB', 'Commitment_5PB']), @@ -68,6 +78,10 @@ def __init__(self): self.encryption_config = dict() self.encryption_config_model = None self.encryption_state = None + self.firewall_state = None + self.firewall_allow_azure_ips = None + self.firewall_rules = None + self.firewall_rules_model = None self.results = dict(changed=False) self.account_dict = None @@ -134,11 +148,23 @@ def create_datalake_store(self): location=self.location ) return account_dict + + if self.firewall_rules: + self.firewall_rules_model = list() + for rule in self.firewall_rules: + rule_model = self.datalake_store_models.CreateFirewallRuleWithAccountParameters( + name=rule.get('name'), + start_ip_address=rule.get('start_ip_address'), + end_ip_address=rule.get('end_ip_address')) + self.firewall_rules_model.append(rule_model) parameters = self.datalake_store_models.CreateDataLakeStoreAccountParameters( default_group=self.default_group, encryption_config=self.encryption_config_model, encryption_state=self.encryption_state, + firewall_allow_azure_ips=self.firewall_allow_azure_ips, + firewall_rules=self.firewall_rules_model, + firewall_state=self.firewall_state, location=self.location, new_tier=self.new_tier, tags=self.tags @@ -165,11 +191,11 @@ def update_datalake_store(self): self.results['changed'] = True parameters.tags=self.account_dict['tags'] - if self.new_tier: + if self.new_tier and self.account_dict.get('new_tier') != self.new_tier: self.results['changed'] = True parameters.new_tier=self.new_tier - if self.default_group: + if self.default_group and self.account_dict.get('default_group') != self.default_group: self.results['changed'] = True parameters.default_group=self.default_group @@ -179,6 +205,25 @@ def update_datalake_store(self): if self.encryption_config and self.account_dict.get('encryption_config').get('type') != self.encryption_config.get('type'): self.fail("Encryption type cannot be updated.") + if self.firewall_state and self.account_dict.get('firewall_state') != self.firewall_state: + self.results['changed'] = True + parameters.firewall_state=self.firewall_state + + if self.firewall_allow_azure_ips and self.account_dict.get('firewall_allow_azure_ips') != self.firewall_allow_azure_ips: + self.results['changed'] = True + parameters.firewall_allow_azure_ips=self.firewall_allow_azure_ips + + if self.firewall_rules is not None: + self.firewall_rules_model = list() + for rule in self.firewall_rules: + rule_model = self.datalake_store_models.CreateFirewallRuleWithAccountParameters( + name=rule.get('name'), + start_ip_address=rule.get('start_ip_address'), + end_ip_address=rule.get('end_ip_address')) + self.firewall_rules_model.append(rule_model) + self.results['changed'] = True + parameters.firewall_rules=self.firewall_rules_model + self.log(str(parameters)) try: poller = self.datalake_store_client.accounts.update(self.resource_group, self.name, parameters) @@ -237,7 +282,6 @@ def account_obj_to_dict(self, datalake_store_obj): key_vault_meta_info=datalake_store_obj.encryption_config.key_vault_meta_info), encryption_state=datalake_store_obj.encryption_state, encryption_provisioning_state=datalake_store_obj.encryption_provisioning_state, - firewall_rules=datalake_store_obj.firewall_rules, virtual_network_rules=datalake_store_obj.virtual_network_rules, firewall_state=datalake_store_obj.firewall_state, firewall_allow_azure_ips=datalake_store_obj.firewall_allow_azure_ips, @@ -246,6 +290,15 @@ def account_obj_to_dict(self, datalake_store_obj): new_tier=datalake_store_obj.new_tier, current_tier=datalake_store_obj.current_tier ) + + account_dict['firewall_rules']=list() + for rule in datalake_store_obj.firewall_rules: + rule_item = dict( + name=rule.name, + start_ip_address=rule.start_ip_address, + end_ip_address=rule.end_ip_address + ) + account_dict['firewall_rules'].append(rule_item) return account_dict From 7e5d8ba8230c3bb9b5670f2cb60caec2fc99bcee Mon Sep 17 00:00:00 2001 From: David Duque Date: Sat, 14 Nov 2020 12:45:07 +0100 Subject: [PATCH 07/23] Added virtual network rules to parameters --- plugins/modules/azure_rm_datalakestore.py | 43 ++++++++++++++++++++--- 1 file changed, 39 insertions(+), 4 deletions(-) diff --git a/plugins/modules/azure_rm_datalakestore.py b/plugins/modules/azure_rm_datalakestore.py index 511777649..0066fe46d 100644 --- a/plugins/modules/azure_rm_datalakestore.py +++ b/plugins/modules/azure_rm_datalakestore.py @@ -66,6 +66,13 @@ def __init__(self): resource_group=dict(type='str',required=True), state=dict(type='str', default='present', choices=['present', 'absent']), tags=dict(type='dict'), + virtual_network_rules=dict( + type='list', + options=dict( + name=dict(type='str',required=True), + subnet_id=dict(type='str',required=True) + ) + ), ) self.state = None @@ -82,6 +89,8 @@ def __init__(self): self.firewall_allow_azure_ips = None self.firewall_rules = None self.firewall_rules_model = None + self.virtual_network_rules = None + self.virtual_network_rules_model = None self.results = dict(changed=False) self.account_dict = None @@ -149,7 +158,7 @@ def create_datalake_store(self): ) return account_dict - if self.firewall_rules: + if self.firewall_rules is not None: self.firewall_rules_model = list() for rule in self.firewall_rules: rule_model = self.datalake_store_models.CreateFirewallRuleWithAccountParameters( @@ -157,6 +166,14 @@ def create_datalake_store(self): start_ip_address=rule.get('start_ip_address'), end_ip_address=rule.get('end_ip_address')) self.firewall_rules_model.append(rule_model) + + if self.virtual_network_rules is not None: + self.virtual_network_rules_model = list() + for vnet_rule in self.virtual_network_rules: + vnet_rule_model = self.datalake_store_models.CreateVirtualNetworkRuleWithAccountParameters( + name=vnet_rule.get('name'), + subnet_id=vnet_rule.get('subnet_id')) + self.virtual_network_rules_model.append(vnet_rule_model) parameters = self.datalake_store_models.CreateDataLakeStoreAccountParameters( default_group=self.default_group, @@ -167,7 +184,8 @@ def create_datalake_store(self): firewall_state=self.firewall_state, location=self.location, new_tier=self.new_tier, - tags=self.tags + tags=self.tags, + virtual_network_rules=self.virtual_network_rules_model ) self.log(str(parameters)) @@ -216,13 +234,23 @@ def update_datalake_store(self): if self.firewall_rules is not None: self.firewall_rules_model = list() for rule in self.firewall_rules: - rule_model = self.datalake_store_models.CreateFirewallRuleWithAccountParameters( + rule_model = self.datalake_store_models.UpdateFirewallRuleWithAccountParameters( name=rule.get('name'), start_ip_address=rule.get('start_ip_address'), end_ip_address=rule.get('end_ip_address')) self.firewall_rules_model.append(rule_model) self.results['changed'] = True parameters.firewall_rules=self.firewall_rules_model + + if self.virtual_network_rules is not None: + self.virtual_network_rules_model = list() + for vnet_rule in self.virtual_network_rules: + vnet_rule_model = self.datalake_store_models.UpdateVirtualNetworkRuleWithAccountParameters( + name=vnet_rule.get('name'), + subnet_id=vnet_rule.get('subnet_id')) + self.virtual_network_rules_model.append(vnet_rule_model) + self.results['changed'] = True + parameters.virtual_network_rules=self.virtual_network_rules_model self.log(str(parameters)) try: @@ -282,7 +310,6 @@ def account_obj_to_dict(self, datalake_store_obj): key_vault_meta_info=datalake_store_obj.encryption_config.key_vault_meta_info), encryption_state=datalake_store_obj.encryption_state, encryption_provisioning_state=datalake_store_obj.encryption_provisioning_state, - virtual_network_rules=datalake_store_obj.virtual_network_rules, firewall_state=datalake_store_obj.firewall_state, firewall_allow_azure_ips=datalake_store_obj.firewall_allow_azure_ips, trusted_id_providers=datalake_store_obj.trusted_id_providers, @@ -299,6 +326,14 @@ def account_obj_to_dict(self, datalake_store_obj): end_ip_address=rule.end_ip_address ) account_dict['firewall_rules'].append(rule_item) + + account_dict['virtual_network_rules']=list() + for vnet_rule in datalake_store_obj.virtual_network_rules: + vnet_rule_item = dict( + name=vnet_rule.name, + subnet_id=vnet_rule.subnet_id + ) + account_dict['virtual_network_rules'].append(vnet_rule_item) return account_dict From ec65091117bfd31200fc28b38239106af190ebf2 Mon Sep 17 00:00:00 2001 From: David Duque Date: Sat, 14 Nov 2020 19:07:40 +0100 Subject: [PATCH 08/23] Fixed problems with encryption_config -> key_vault_meta_info parameter --- plugins/modules/azure_rm_datalakestore.py | 94 +++++++++++++++++------ 1 file changed, 70 insertions(+), 24 deletions(-) diff --git a/plugins/modules/azure_rm_datalakestore.py b/plugins/modules/azure_rm_datalakestore.py index 0066fe46d..ae455794e 100644 --- a/plugins/modules/azure_rm_datalakestore.py +++ b/plugins/modules/azure_rm_datalakestore.py @@ -44,7 +44,7 @@ def __init__(self): options=dict( key_vault_resource_id=dict(type='str',required=True), encryption_key_name=dict(type='str',required=True), - encryption_key_version=dict(type='str') + encryption_key_version=dict(type='str',required=True) ) ), ) @@ -60,6 +60,12 @@ def __init__(self): ) ), firewall_state=dict(type='str', choices=['Enabled', 'Disabled']), + identity=dict( + type='dict', + options=dict( + type=dict(type='str', choices=['SystemAssigned'],required=True) + ) + ), location=dict(type='str'), name=dict(type='str',required=True), new_tier=dict(type='str', choices=['Consumption', 'Commitment_1TB', 'Commitment_10TB', 'Commitment_100TB', 'Commitment_500TB', 'Commitment_1PB', 'Commitment_5PB']), @@ -91,6 +97,8 @@ def __init__(self): self.firewall_rules_model = None self.virtual_network_rules = None self.virtual_network_rules_model = None + self.identity = None + self.identity_model = None self.results = dict(changed=False) self.account_dict = None @@ -104,9 +112,20 @@ def exec_module(self, **kwargs): setattr(self, key, kwargs[key]) if self.encryption_config: - # TODO: Revisar todo lo referente a Key Vault Meta Info y hacer pruebas + key_vault_meta_info_model = None + if self.encryption_config.get('key_vault_meta_info'): + key_vault_meta_info_model = self.datalake_store_models.KeyVaultMetaInfo( + key_vault_resource_id=self.encryption_config.get('key_vault_meta_info').get('key_vault_resource_id'), + encryption_key_name=self.encryption_config.get('key_vault_meta_info').get('encryption_key_name'), + encryption_key_version=self.encryption_config.get('key_vault_meta_info').get('encryption_key_version') + ) self.encryption_config_model=self.datalake_store_models.EncryptionConfig(type=self.encryption_config.get('type'), - key_vault_meta_info=self.encryption_config.get('key_vault_meta_info')) + key_vault_meta_info=key_vault_meta_info_model) + + if self.identity is not None: + self.identity_model = self.datalake_store_models.EncryptionIdentity( + type=self.identity.get('type') + ) resource_group = self.get_resource_group(self.resource_group) if not self.location: @@ -182,6 +201,7 @@ def create_datalake_store(self): firewall_allow_azure_ips=self.firewall_allow_azure_ips, firewall_rules=self.firewall_rules_model, firewall_state=self.firewall_state, + identity=self.identity_model, location=self.location, new_tier=self.new_tier, tags=self.tags, @@ -190,8 +210,7 @@ def create_datalake_store(self): self.log(str(parameters)) try: - poller = self.datalake_store_client.accounts.create(self.resource_group, self.name, parameters) - self.get_poller_result(poller) + self.datalake_store_client.accounts.create(self.resource_group, self.name, parameters) except CloudError as e: self.log('Error creating datalake store.') self.fail("Failed to create datalake store: {0}".format(str(e))) @@ -220,8 +239,14 @@ def update_datalake_store(self): if self.encryption_state and self.account_dict.get('encryption_state') != self.encryption_state: self.fail("Encryption type cannot be updated.") - if self.encryption_config and self.account_dict.get('encryption_config').get('type') != self.encryption_config.get('type'): - self.fail("Encryption type cannot be updated.") + if self.encryption_config: + if self.encryption_config.get('type') == 'UserManaged' and self.encryption_config.get('key_vault_meta_info') != self.account_dict.get('encryption_config').get('key_vault_meta_info'): + self.results['changed'] = True + key_vault_meta_info_model = self.datalake_store_models.UpdateKeyVaultMetaInfo( + encryption_key_version=self.encryption_config.get('key_vault_meta_info').get('encryption_key_version') + ) + encryption_config_model = self.datalake_store_models.UpdateEncryptionConfig = key_vault_meta_info_model + parameters.encryption_config = encryption_config_model if self.firewall_state and self.account_dict.get('firewall_state') != self.firewall_state: self.results['changed'] = True @@ -252,10 +277,13 @@ def update_datalake_store(self): self.results['changed'] = True parameters.virtual_network_rules=self.virtual_network_rules_model + if self.identity_model is not None: + self.results['changed'] = True + parameters.identity=self.identity_model + self.log(str(parameters)) try: - poller = self.datalake_store_client.accounts.update(self.resource_group, self.name, parameters) - self.get_poller_result(poller) + self.datalake_store_client.accounts.update(self.resource_group, self.name, parameters) except CloudError as e: self.log('Error creating datalake store.') self.fail("Failed to create datalake store: {0}".format(str(e))) @@ -293,29 +321,31 @@ def get_datalake_store(self): def account_obj_to_dict(self, datalake_store_obj): account_dict = dict( - id=datalake_store_obj.id, - name=datalake_store_obj.name, - type=datalake_store_obj.type, - location=datalake_store_obj.location, - tags=datalake_store_obj.tags, - identity=datalake_store_obj.identity, account_id=datalake_store_obj.account_id, - provisioning_state=datalake_store_obj.provisioning_state, - state=datalake_store_obj.state, creation_time=datalake_store_obj.creation_time, - last_modified_time=datalake_store_obj.last_modified_time, - endpoint=datalake_store_obj.endpoint, + current_tier=datalake_store_obj.current_tier, default_group=datalake_store_obj.default_group, encryption_config=dict(type=datalake_store_obj.encryption_config.type, - key_vault_meta_info=datalake_store_obj.encryption_config.key_vault_meta_info), - encryption_state=datalake_store_obj.encryption_state, + key_vault_meta_info=None), encryption_provisioning_state=datalake_store_obj.encryption_provisioning_state, - firewall_state=datalake_store_obj.firewall_state, + encryption_state=datalake_store_obj.encryption_state, + endpoint=datalake_store_obj.endpoint, firewall_allow_azure_ips=datalake_store_obj.firewall_allow_azure_ips, + firewall_rules=None, + firewall_state=datalake_store_obj.firewall_state, + id=datalake_store_obj.id, + identity=None, + last_modified_time=datalake_store_obj.last_modified_time, + location=datalake_store_obj.location, + name=datalake_store_obj.name, + new_tier=datalake_store_obj.new_tier, + provisioning_state=datalake_store_obj.provisioning_state, + state=datalake_store_obj.state, + tags=datalake_store_obj.tags, trusted_id_providers=datalake_store_obj.trusted_id_providers, trusted_id_provider_state=datalake_store_obj.trusted_id_provider_state, - new_tier=datalake_store_obj.new_tier, - current_tier=datalake_store_obj.current_tier + type=datalake_store_obj.type, + virtual_network_rules=None ) account_dict['firewall_rules']=list() @@ -334,6 +364,22 @@ def account_obj_to_dict(self, datalake_store_obj): subnet_id=vnet_rule.subnet_id ) account_dict['virtual_network_rules'].append(vnet_rule_item) + + if datalake_store_obj.identity: + account_dict['identity']=dict( + type=datalake_store_obj.identity.type, + principal_id=datalake_store_obj.identity.principal_id, + tenant_id=datalake_store_obj.identity.tenant_id + ) + + if datalake_store_obj.encryption_config.key_vault_meta_info: + account_dict['encryption_config'] = dict( + key_vault_meta_info = dict( + key_vault_resource_id = datalake_store_obj.encryption_config.key_vault_meta_info.key_vault_resource_id, + encryption_key_name = datalake_store_obj.encryption_config.key_vault_meta_info.encryption_key_name, + encryption_key_version = datalake_store_obj.encryption_config.key_vault_meta_info.encryption_key_version + ) + ) return account_dict From 3288592906f9802a3089419a476bb4de8ae650a2 Mon Sep 17 00:00:00 2001 From: David Duque Date: Sat, 14 Nov 2020 19:33:49 +0100 Subject: [PATCH 09/23] Added info module --- .../modules/azure_rm_datalakestore_info.py | 201 ++++++++++++++++++ 1 file changed, 201 insertions(+) create mode 100644 plugins/modules/azure_rm_datalakestore_info.py diff --git a/plugins/modules/azure_rm_datalakestore_info.py b/plugins/modules/azure_rm_datalakestore_info.py new file mode 100644 index 000000000..039967a54 --- /dev/null +++ b/plugins/modules/azure_rm_datalakestore_info.py @@ -0,0 +1,201 @@ +#!/usr/bin/python + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + + +ANSIBLE_METADATA = {'metadata_version': '1.1', + 'status': ['preview'], + 'supported_by': 'community'} + + +DOCUMENTATION = ''' +''' + +EXAMPLES = ''' +''' + +RETURN = ''' +''' + +try: + from msrestazure.azure_exceptions import CloudError +except Exception: + # This is handled in azure_rm_common + pass + +from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase + + +class AzureRMDatalakeStoreInfo(AzureRMModuleBase): + def __init__(self): + + self.module_arg_spec = dict( + name=dict(type='str'), + resource_group=dict(type='str', aliases=['resource_group_name']) + ) + + self.results = dict( + changed=False, + datalake=[] + ) + + self.name = None + self.resource_group = None + + super(AzureRMDatalakeStoreInfo, self).__init__(self.module_arg_spec, + supports_tags=False) + + def exec_module(self, **kwargs): + for key in self.module_arg_spec: + setattr(self, key, kwargs[key]) + + if self.name and not self.resource_group: + self.fail("Parameter error: resource group required when filtering by name.") + + results = [] + if self.name: + results = self.get_datalake_store() + elif self.resource_group: + results = self.list_resource_group() + else: + results = self.list_all() + + self.results['datalake'] = results + return self.results + + def get_datalake_store(self): + self.log('Get properties for datalake store {0}'.format(self.name)) + datalake_store_obj = None + + try: + datalake_store_obj = self.datalake_store_client.accounts.get(self.resource_group, self.name) + except CloudError: + pass + + if datalake_store_obj: + return [self.account_obj_to_dict(datalake_store_obj)] + + return list() + + def list_resource_group(self): + self.log('Get basic properties for datalake store in resource group {0}'.format(self.resource_group)) + datalake_store_obj = None + results = list() + + try: + datalake_store_obj = self.datalake_store_client.accounts.list_by_resource_group(self.resource_group) + except CloudError: + pass + + if datalake_store_obj: + for datalake_item in datalake_store_obj: + results.append(self.account_obj_to_dict_basic(datalake_item)) + return results + + return list() + + def list_all(self): + self.log('Get basic properties for all datalake store') + datalake_store_obj = None + results = list() + + try: + datalake_store_obj = self.datalake_store_client.accounts.list() + except CloudError: + pass + + if datalake_store_obj: + for datalake_item in datalake_store_obj: + results.append(self.account_obj_to_dict_basic(datalake_item)) + return results + + return list() + + def account_obj_to_dict(self, datalake_store_obj): + account_dict = dict( + account_id=datalake_store_obj.account_id, + creation_time=datalake_store_obj.creation_time, + current_tier=datalake_store_obj.current_tier, + default_group=datalake_store_obj.default_group, + encryption_config=dict(type=datalake_store_obj.encryption_config.type, + key_vault_meta_info=None), + encryption_provisioning_state=datalake_store_obj.encryption_provisioning_state, + encryption_state=datalake_store_obj.encryption_state, + endpoint=datalake_store_obj.endpoint, + firewall_allow_azure_ips=datalake_store_obj.firewall_allow_azure_ips, + firewall_rules=None, + firewall_state=datalake_store_obj.firewall_state, + id=datalake_store_obj.id, + identity=None, + last_modified_time=datalake_store_obj.last_modified_time, + location=datalake_store_obj.location, + name=datalake_store_obj.name, + new_tier=datalake_store_obj.new_tier, + provisioning_state=datalake_store_obj.provisioning_state, + state=datalake_store_obj.state, + tags=datalake_store_obj.tags, + trusted_id_providers=datalake_store_obj.trusted_id_providers, + trusted_id_provider_state=datalake_store_obj.trusted_id_provider_state, + type=datalake_store_obj.type, + virtual_network_rules=None + ) + + account_dict['firewall_rules']=list() + for rule in datalake_store_obj.firewall_rules: + rule_item = dict( + name=rule.name, + start_ip_address=rule.start_ip_address, + end_ip_address=rule.end_ip_address + ) + account_dict['firewall_rules'].append(rule_item) + + account_dict['virtual_network_rules']=list() + for vnet_rule in datalake_store_obj.virtual_network_rules: + vnet_rule_item = dict( + name=vnet_rule.name, + subnet_id=vnet_rule.subnet_id + ) + account_dict['virtual_network_rules'].append(vnet_rule_item) + + if datalake_store_obj.identity: + account_dict['identity']=dict( + type=datalake_store_obj.identity.type, + principal_id=datalake_store_obj.identity.principal_id, + tenant_id=datalake_store_obj.identity.tenant_id + ) + + if datalake_store_obj.encryption_config.key_vault_meta_info: + account_dict['encryption_config'] = dict( + key_vault_meta_info = dict( + key_vault_resource_id = datalake_store_obj.encryption_config.key_vault_meta_info.key_vault_resource_id, + encryption_key_name = datalake_store_obj.encryption_config.key_vault_meta_info.encryption_key_name, + encryption_key_version = datalake_store_obj.encryption_config.key_vault_meta_info.encryption_key_version + ) + ) + + return account_dict + + def account_obj_to_dict_basic(self, datalake_store_obj): + account_dict = dict( + account_id=datalake_store_obj.account_id, + creation_time=datalake_store_obj.creation_time, + endpoint=datalake_store_obj.endpoint, + id=datalake_store_obj.id, + last_modified_time=datalake_store_obj.last_modified_time, + location=datalake_store_obj.location, + name=datalake_store_obj.name, + provisioning_state=datalake_store_obj.provisioning_state, + state=datalake_store_obj.state, + tags=datalake_store_obj.tags, + type=datalake_store_obj.type + ) + + return account_dict + +def main(): + AzureRMDatalakeStoreInfo() + + +if __name__ == '__main__': + main() From ca7ac8796af44585263309ae4695c718e94822d7 Mon Sep 17 00:00:00 2001 From: David Duque Date: Sat, 14 Nov 2020 21:03:08 +0100 Subject: [PATCH 10/23] Added documentation to info module --- .../modules/azure_rm_datalakestore_info.py | 256 ++++++++++++++++++ 1 file changed, 256 insertions(+) diff --git a/plugins/modules/azure_rm_datalakestore_info.py b/plugins/modules/azure_rm_datalakestore_info.py index 039967a54..71d4bf752 100644 --- a/plugins/modules/azure_rm_datalakestore_info.py +++ b/plugins/modules/azure_rm_datalakestore_info.py @@ -1,4 +1,8 @@ #!/usr/bin/python +# +# Copyright (c) 2020 David Duque Hernández, (@next-davidduquehernandez) +# +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type @@ -10,12 +14,264 @@ DOCUMENTATION = ''' +--- +module: azure_rm_datalakestore_info +version_added: "1.2.0" +short_description: Get Azure Data Lake Store info +description: + - Get Azure Data Lake Store info. + +options: + resource_group: + description: + - The name of the resource group. + type: str + name: + description: + - The name of the Azure Data Lake Store. + type: str + +extends_documentation_fragment: + - azure.azcollection.azure + +author: + - David Duque Hernández (@next-davidduquehernandez) + ''' EXAMPLES = ''' + - name: Get Azure Data Lake Store info + azure_rm_datalakestore_info: + resource_group: myResourceGroup + name: myDataLakeStore ''' RETURN = ''' +datalake: + description: + - A list of dictionaries containing facts for Azure Data Lake Store. + returned: always + type: complex + contains: + account_id: + description: + - The unique identifier associated with this Data Lake Store account. + returned: always + type: str + sample: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + creation_time: + description: + - The account creation time. + returned: always + type: str + sample: 2020-01-01T00:00:00.000000+00:00 + current_tier: + description: + - The commitment tier in use for the current month. + type: str + sample: Consumption + default_group: + description: + - The default owner group for all new folders and files created in the Data Lake Store account. + type: str + sample: null + encryption_config: + description: + - The Key Vault encryption configuration. + type: complex + contains: + type: + description: + - The type of encryption configuration being used. + type: str + returned: always + sample: ServiceManaged + key_vault_meta_info: + description: + - The Key Vault information for connecting to user managed encryption keys. + type: complex + contains: + key_vault_resource_id: + description: + - The resource identifier for the user managed Key Vault being used to encrypt. + type: str + returned: always + sample: /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.KeyVault/vaults/testkv + encryption_key_name: + description: + - The name of the user managed encryption key. + type: str + returned: always + sample: KeyName + encryption_key_version: + description: + - The version of the user managed encryption key. + type: str + returned: always + sample: 86a1e3b7406f45afa0d54e21eff47e39 + encryption_provisioning_state: + description: + - The current state of encryption provisioning for this Data Lake Store account. + type: str + sample: Succeeded + encryption_state: + description: + - The current state of encryption for this Data Lake Store account. + type: str + sample: Enabled + endpoint: + description: + - The full CName endpoint for this account. + returned: always + type: str + sample: testaccount.azuredatalakestore.net + firewall_allow_azure_ips: + description: + - The current state of allowing or disallowing IPs originating within Azure through the firewall. + type: str + sample: Disabled + firewall_rules: + description: + - The list of firewall rules associated with this Data Lake Store account. + type: list + contains: + name: + description: + - The resource name. + type: str + returned: always + sample: Example Name + start_ip_address: + description: + - The start IP address for the firewall rule. This can be either ipv4 or ipv6. Start and End should be in the same protocol. + type: str + returned: always + sample: 192.168.1.1 + end_ip_address: + description: + - The end IP address for the firewall rule. This can be either ipv4 or ipv6. Start and End should be in the same protocol. + type: str + returned: always + sample: 192.168.1.254 + firewall_state: + description: + - The current state of the IP address firewall for this Data Lake Store account. + type: str + sample: Enabled + id: + description: + - The resource identifier. + returned: always + type: str + sample: /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.DataLakeStore/accounts/testaccount + identity: + description: + - The Key Vault encryption identity, if any. + type: complex + contains: + type: + description: + - The type of encryption being used. + type: str + sample: SystemAssigned + name: + description: + - The principal identifier associated with the encryption. + type: str + sample: 00000000-0000-0000-0000-000000000000 + name: + description: + - The tenant identifier associated with the encryption. + type: str + sample: 00000000-0000-0000-0000-000000000000 + last_modified_time: + description: + - The account last modified time. + returned: always + type: str + sample: 2020-01-01T00:00:00.000000+00:00 + location: + description: + - The resource location. + returned: always + type: str + sample: westeurope + name: + description: + - The resource name. + returned: always + type: str + sample: testaccount + new_tier: + description: + - The commitment tier to use for next month. + type: str + sample: Consumption + provisioning_state: + description: + - The provisioning status of the Data Lake Store account. + returned: always + type: str + sample: Succeeded + state: + description: + - The state of the Data Lake Store account. + returned: always + type: str + sample: Active + tags: + description: + - The resource tags. + returned: always + type: dict + sample: { "tag1":"abc" } + trusted_id_providers: + description: + - The current state of the trusted identity provider feature for this Data Lake Store account. + type: list + contains: + id: + description: + - The resource identifier. + type: str + name: + description: + - The resource name. + type: str + type: + description: + - The resource type. + type: str + id_provider: + description: + - The URL of this trusted identity provider. + type: str + trusted_id_provider_state: + description: + - The list of trusted identity providers associated with this Data Lake Store account. + type: str + sample: Enabled + type: + description: + - The resource type. + returned: always + type: str + sample: Microsoft.DataLakeStore/accounts + virtual_network_rules: + description: + - The list of virtual network rules associated with this Data Lake Store account. + type: list + contains: + name: + description: + - The resource name. + type: str + sample: Rule Name + subnet_id: + description: + - The resource identifier for the subnet. + type: str + sample: /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.Network/virtualNetworks/vnet/subnets/default ''' try: From 6cb136558c3ff956dd4839848cc0d6d0a3fefd5e Mon Sep 17 00:00:00 2001 From: David Duque Date: Mon, 16 Nov 2020 07:36:36 +0100 Subject: [PATCH 11/23] Restore poller, added compare lists for update and fix some issues with account_obj_to_dict --- plugins/modules/azure_rm_datalakestore.py | 109 +++++++++++++--------- 1 file changed, 64 insertions(+), 45 deletions(-) diff --git a/plugins/modules/azure_rm_datalakestore.py b/plugins/modules/azure_rm_datalakestore.py index ae455794e..a9c7f04be 100644 --- a/plugins/modules/azure_rm_datalakestore.py +++ b/plugins/modules/azure_rm_datalakestore.py @@ -1,4 +1,8 @@ #!/usr/bin/python +# +# Copyright (c) 2020 David Duque, (@next-davidduquehernandez) +# +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function import datetime @@ -210,7 +214,8 @@ def create_datalake_store(self): self.log(str(parameters)) try: - self.datalake_store_client.accounts.create(self.resource_group, self.name, parameters) + poller = self.datalake_store_client.accounts.create(self.resource_group, self.name, parameters) + self.get_poller_result(poller) except CloudError as e: self.log('Error creating datalake store.') self.fail("Failed to create datalake store: {0}".format(str(e))) @@ -257,36 +262,40 @@ def update_datalake_store(self): parameters.firewall_allow_azure_ips=self.firewall_allow_azure_ips if self.firewall_rules is not None: - self.firewall_rules_model = list() - for rule in self.firewall_rules: - rule_model = self.datalake_store_models.UpdateFirewallRuleWithAccountParameters( - name=rule.get('name'), - start_ip_address=rule.get('start_ip_address'), - end_ip_address=rule.get('end_ip_address')) - self.firewall_rules_model.append(rule_model) - self.results['changed'] = True - parameters.firewall_rules=self.firewall_rules_model + if not self.compare_lists(self.firewall_rules, self.account_dict.get('firewall_rules')): + self.firewall_rules_model = list() + for rule in self.firewall_rules: + rule_model = self.datalake_store_models.UpdateFirewallRuleWithAccountParameters( + name=rule.get('name'), + start_ip_address=rule.get('start_ip_address'), + end_ip_address=rule.get('end_ip_address')) + self.firewall_rules_model.append(rule_model) + self.results['changed'] = True + parameters.firewall_rules=self.firewall_rules_model if self.virtual_network_rules is not None: - self.virtual_network_rules_model = list() - for vnet_rule in self.virtual_network_rules: - vnet_rule_model = self.datalake_store_models.UpdateVirtualNetworkRuleWithAccountParameters( - name=vnet_rule.get('name'), - subnet_id=vnet_rule.get('subnet_id')) - self.virtual_network_rules_model.append(vnet_rule_model) - self.results['changed'] = True - parameters.virtual_network_rules=self.virtual_network_rules_model + if not self.compare_lists(self.virtual_network_rules, self.account_dict.get('virtual_network_rules')): + self.virtual_network_rules_model = list() + for vnet_rule in self.virtual_network_rules: + vnet_rule_model = self.datalake_store_models.UpdateVirtualNetworkRuleWithAccountParameters( + name=vnet_rule.get('name'), + subnet_id=vnet_rule.get('subnet_id')) + self.virtual_network_rules_model.append(vnet_rule_model) + self.results['changed'] = True + parameters.virtual_network_rules=self.virtual_network_rules_model if self.identity_model is not None: self.results['changed'] = True parameters.identity=self.identity_model self.log(str(parameters)) - try: - self.datalake_store_client.accounts.update(self.resource_group, self.name, parameters) - except CloudError as e: - self.log('Error creating datalake store.') - self.fail("Failed to create datalake store: {0}".format(str(e))) + if self.results['changed']: + try: + poller = self.datalake_store_client.accounts.update(self.resource_group, self.name, parameters) + self.get_poller_result(poller) + except CloudError as e: + self.log('Error creating datalake store.') + self.fail("Failed to create datalake store: {0}".format(str(e))) return self.get_datalake_store() @@ -325,8 +334,7 @@ def account_obj_to_dict(self, datalake_store_obj): creation_time=datalake_store_obj.creation_time, current_tier=datalake_store_obj.current_tier, default_group=datalake_store_obj.default_group, - encryption_config=dict(type=datalake_store_obj.encryption_config.type, - key_vault_meta_info=None), + encryption_config=None, encryption_provisioning_state=datalake_store_obj.encryption_provisioning_state, encryption_state=datalake_store_obj.encryption_state, endpoint=datalake_store_obj.endpoint, @@ -349,21 +357,23 @@ def account_obj_to_dict(self, datalake_store_obj): ) account_dict['firewall_rules']=list() - for rule in datalake_store_obj.firewall_rules: - rule_item = dict( - name=rule.name, - start_ip_address=rule.start_ip_address, - end_ip_address=rule.end_ip_address - ) - account_dict['firewall_rules'].append(rule_item) + if datalake_store_obj.firewall_rules: + for rule in datalake_store_obj.firewall_rules: + rule_item = dict( + name=rule.name, + start_ip_address=rule.start_ip_address, + end_ip_address=rule.end_ip_address + ) + account_dict['firewall_rules'].append(rule_item) account_dict['virtual_network_rules']=list() - for vnet_rule in datalake_store_obj.virtual_network_rules: - vnet_rule_item = dict( - name=vnet_rule.name, - subnet_id=vnet_rule.subnet_id - ) - account_dict['virtual_network_rules'].append(vnet_rule_item) + if datalake_store_obj.virtual_network_rules: + for vnet_rule in datalake_store_obj.virtual_network_rules: + vnet_rule_item = dict( + name=vnet_rule.name, + subnet_id=vnet_rule.subnet_id + ) + account_dict['virtual_network_rules'].append(vnet_rule_item) if datalake_store_obj.identity: account_dict['identity']=dict( @@ -372,17 +382,26 @@ def account_obj_to_dict(self, datalake_store_obj): tenant_id=datalake_store_obj.identity.tenant_id ) - if datalake_store_obj.encryption_config.key_vault_meta_info: - account_dict['encryption_config'] = dict( - key_vault_meta_info = dict( - key_vault_resource_id = datalake_store_obj.encryption_config.key_vault_meta_info.key_vault_resource_id, - encryption_key_name = datalake_store_obj.encryption_config.key_vault_meta_info.encryption_key_name, - encryption_key_version = datalake_store_obj.encryption_config.key_vault_meta_info.encryption_key_version + if datalake_store_obj.encryption_config: + if datalake_store_obj.encryption_config.key_vault_meta_info: + account_dict['encryption_config'] = dict( + key_vault_meta_info = dict( + key_vault_resource_id = datalake_store_obj.encryption_config.key_vault_meta_info.key_vault_resource_id, + encryption_key_name = datalake_store_obj.encryption_config.key_vault_meta_info.encryption_key_name, + encryption_key_version = datalake_store_obj.encryption_config.key_vault_meta_info.encryption_key_version + ) ) - ) return account_dict + def compare_lists(self, list1, list2): + if len(list1) != len(list2): + return False + for element in list1: + if element not in list2: + return False + return True + def main(): AzureRMDatalakeStore() From be4a2c0b47e4ff0115f3eeb8d6778a4c205b05f7 Mon Sep 17 00:00:00 2001 From: David Duque Date: Mon, 16 Nov 2020 09:29:30 +0100 Subject: [PATCH 12/23] Added test --- .../targets/azure_rm_datalakestore/aliases | 3 + .../azure_rm_datalakestore/meta/main.yml | 2 + .../azure_rm_datalakestore/tasks/main.yml | 205 ++++++++++++++++++ 3 files changed, 210 insertions(+) create mode 100644 tests/integration/targets/azure_rm_datalakestore/aliases create mode 100644 tests/integration/targets/azure_rm_datalakestore/meta/main.yml create mode 100644 tests/integration/targets/azure_rm_datalakestore/tasks/main.yml diff --git a/tests/integration/targets/azure_rm_datalakestore/aliases b/tests/integration/targets/azure_rm_datalakestore/aliases new file mode 100644 index 000000000..aa77c071a --- /dev/null +++ b/tests/integration/targets/azure_rm_datalakestore/aliases @@ -0,0 +1,3 @@ +cloud/azure +shippable/azure/group2 +destructive diff --git a/tests/integration/targets/azure_rm_datalakestore/meta/main.yml b/tests/integration/targets/azure_rm_datalakestore/meta/main.yml new file mode 100644 index 000000000..95e1952f9 --- /dev/null +++ b/tests/integration/targets/azure_rm_datalakestore/meta/main.yml @@ -0,0 +1,2 @@ +dependencies: + - setup_azure diff --git a/tests/integration/targets/azure_rm_datalakestore/tasks/main.yml b/tests/integration/targets/azure_rm_datalakestore/tasks/main.yml new file mode 100644 index 000000000..dbc3cca67 --- /dev/null +++ b/tests/integration/targets/azure_rm_datalakestore/tasks/main.yml @@ -0,0 +1,205 @@ +- name: Create data lake store name + set_fact: + adl_name: "adl{{ resource_group | hash('md5') | truncate(21, True, '') }}" + vnet_name: "vnet{{ resource_group | hash('md5') | truncate(20, True, '') }}" + +- name: Create virtual network + azure_rm_virtualnetwork: + name: "{{ vnet_name }}" + resource_group: "{{ resource_group }}" + address_prefixes_cidr: + - 10.1.0.0/16 + register: vnet_output + +- name: Create subnet + azure_rm_subnet: + name: foobar + virtual_network_name: "{{ vnet_name }}" + resource_group: "{{ resource_group }}" + address_prefix_cidr: "10.1.1.0/24" + service_endpoints: + - service: Microsoft.AzureActiveDirectory + register: subnet_output + +- name: Create minimal data lake store + azure_rm_datalakestore: + resource_group: "{{ resource_group }}" + name: "{{ adl_name }}" + register: output + +- name: Assert status succeeded and results + assert: + that: + - output.changed + - output.state.id is defined + - output.state.account_id is defined + - output.state.creation_time is defined + - output.state.current_tier == "Consumption" + - output.state.encryption_state == "Enabled" + - output.state.endpoint == "{{ adl_name }}.azuredatalakestore.net" + - output.state.firewall_allow_azure_ips == "Disabled" + - output.state.firewall_rules | length == 0 + - output.state.firewall_state == "Disabled" + - output.state.last_modified_time is defined + - output.state.new_tier == "Consumption" + - output.state.provisioning_state == "Succeeded" + - output.state.trusted_id_provider_state == "Disabled" + +- name: Create minimal data lake store (Idempotence) + azure_rm_datalakestore: + resource_group: "{{ resource_group }}" + name: "{{ adl_name }}" + register: output + +- name: Assert that status has not changed + assert: + that: + - not output.changed + +- name: Update data lake store to add virtual_network_rules + azure_rm_datalakestore: + resource_group: "{{ resource_group }}" + name: "{{ adl_name }}" + virtual_network_rules: + - name: vnet_rule_1 + subnet_id: "{{ subnet_output.state.id }}" + register: output + +- name: Assert status succeeded and results include virtual_network_rules + assert: + that: + - output.changed + - output.state.virtual_network_rules | length == 1 + - output.state.virtual_network_rules[0].name == "vnet_rule_1" + - output.state.virtual_network_rules[0].subnet_id == "{{ subnet_output.state.id }}" + +- name: Update data lake store to change encryption state that must fail + azure_rm_datalakestore: + resource_group: "{{ resource_group }}" + name: "{{ adl_name }}" + encryption_state: Disabled + register: output + ignore_errors: yes + +- name: Assert that encryption state cannot change + assert: + that: + - not output.changed + - output.msg == 'Encryption type cannot be updated.' + +- name: Update data lake store to add new_tier + azure_rm_datalakestore: + resource_group: "{{ resource_group }}" + name: "{{ adl_name }}" + new_tier: Commitment_1TB + register: output + +- name: Assert status succeeded and results include virtual_network_rules + assert: + that: + - output.changed + - output.state.current_tier == "Consumption" + - output.state.new_tier == "Commitment_1TB" + +- name: Delete minimal data lake store + azure_rm_datalakestore: + resource_group: "{{ resource_group }}" + name: "{{ adl_name }}" + state: absent + register: output + +- name: Create new data lake store + azure_rm_datalakestore: + resource_group: "{{ resource_group }}" + name: "{{ adl_name }}" + tags: + BillingIdentifier: PI-3000012219_100% + P1: V1 + P2: V4 + P3: V3 + new_tier: Commitment_1TB + default_group: default_group_test + encryption_state: Enabled + firewall_state: Enabled + firewall_allow_azure_ips: Enabled + firewall_rules: + - + name: test_rule_1 + start_ip_address: 192.168.1.1 + end_ip_address: 192.168.1.254 + - + name: test_rule_2 + start_ip_address: 10.0.0.1 + end_ip_address: 10.1.0.1 + virtual_network_rules: + - name: vnet_rule_1 + subnet_id: "{{ subnet_output.state.id }}" + register: output + +- name: Assert status succeeded and results include an Id value + assert: + that: + - output.changed + - output.state.id is defined + - output.state.account_id is defined + - output.state.creation_time is defined + - output.state.current_tier == "Commitment_1TB" + - output.state.default_group == "default_group_test" + - output.state.encryption_state == "Enabled" + - output.state.endpoint == "{{ adl_name }}.azuredatalakestore.net" + - output.state.firewall_allow_azure_ips == "Enabled" + - output.state.firewall_rules | length == 2 + - output.state.firewall_state == "Enabled" + - output.state.last_modified_time is defined + - output.state.new_tier == "Commitment_1TB" + - output.state.provisioning_state == "Succeeded" + - output.state.tags | length == 4 + - output.state.trusted_id_provider_state == "Disabled" + - output.state.virtual_network_rules | length == 1 + - output.state.virtual_network_rules[0].name == "vnet_rule_1" + - output.state.virtual_network_rules[0].subnet_id == "{{ subnet_output.state.id }}" + +- name: Create new data lake store (Idempotence) + azure_rm_datalakestore: + resource_group: "{{ resource_group }}" + name: "{{ adl_name }}" + tags: + BillingIdentifier: PI-3000012219_100% + P1: V1 + P2: V4 + P3: V3 + new_tier: Commitment_1TB + default_group: default_group_test + encryption_state: Enabled + firewall_state: Enabled + firewall_allow_azure_ips: Enabled + firewall_rules: + - + name: test_rule_1 + start_ip_address: 192.168.1.1 + end_ip_address: 192.168.1.254 + - + name: test_rule_2 + start_ip_address: 10.0.0.1 + end_ip_address: 10.1.0.1 + virtual_network_rules: + - name: vnet_rule_1 + subnet_id: "{{ subnet_output.state.id }}" + register: output + +- name: Assert that status has not changed + assert: + that: + - not output.changed + +- name: Delete virtual network + azure_rm_virtualnetwork: + name: "{{ vnet_name }}" + resource_group: "{{ resource_group }}" + state: absent + +- name: Delete acccount + azure_rm_datalakestore: + resource_group: "{{ resource_group }}" + name: "{{ adl_name }}" + state: absent From f2a0ffc337782c992915ef9aac2f459339b474e3 Mon Sep 17 00:00:00 2001 From: David Duque Date: Sun, 22 Nov 2020 11:20:59 +0100 Subject: [PATCH 13/23] Completed documentation --- plugins/modules/azure_rm_datalakestore.py | 389 +++++++++++++++++- .../modules/azure_rm_datalakestore_info.py | 21 +- 2 files changed, 403 insertions(+), 7 deletions(-) diff --git a/plugins/modules/azure_rm_datalakestore.py b/plugins/modules/azure_rm_datalakestore.py index a9c7f04be..02d51f343 100644 --- a/plugins/modules/azure_rm_datalakestore.py +++ b/plugins/modules/azure_rm_datalakestore.py @@ -1,6 +1,6 @@ #!/usr/bin/python # -# Copyright (c) 2020 David Duque, (@next-davidduquehernandez) +# Copyright (c) 2020 David Duque Hernández, (@next-davidduquehernandez) # # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -15,13 +15,398 @@ 'supported_by': 'community'} DOCUMENTATION = ''' - +module: azure_rm_datalakestore +version_added: "1.2.0" +short_description: Manage Azure data lake store +description: + - Create, update or delete a data lake store. +options: + default_group: + description: + - The default owner group for all new folders and files created in the Data Lake Store account. + type: str + encryption_config: + description: + - The Key Vault encryption configuration. + type: dict + suboptions: + type: + description: + - The type of encryption configuration being used. + choices: + - UserManaged + - ServiceManaged + required: true + key_vault_meta_info: + description: + - The Key Vault information for connecting to user managed encryption keys. + type: dict + suboptions: + key_vault_resource_id: + description: + - The resource identifier for the user managed Key Vault being used to encrypt. + type: str + required: true + encryption_key_name: + description: + - The name of the user managed encryption key. + type: str + required: true + encryption_key_version: + description: + - The version of the user managed encryption key. + type: str + required: true + encryption_state: + description: + - The current state of encryption for this Data Lake Store account. + choices: + - Enabled + - Disabled + firewall_allow_azure_ips: + description: + - The current state of allowing or disallowing IPs originating within Azure through the firewall. + - If the firewall is disabled, this is not enforced. + choices: + - Enabled + - Disabled + firewall_rules: + description: + - The list of firewall rules associated with this Data Lake Store account. + type: list + suboptions: + name: + description: + - The unique name of the firewall rule to create. + type: str + required: true + start_ip_address: + description: + - The start IP address for the firewall rule. + - This can be either ipv4 or ipv6. + - Start and End should be in the same protocol. + type: str + required: true + end_ip_address: + description: + - The end IP address for the firewall rule. + - This can be either ipv4 or ipv6. + - Start and End should be in the same protocol. + type: str + required: true + firewall_state: + description: + - The current state of the IP address firewall for this Data Lake Store account. + choices: + - Enabled + - Disabled + identity: + description: + - The Key Vault encryption identity, if any. + choices: + - SystemAssigned + location: + description: + - The resource location. + type: str + name: + description: + - The name of the Data Lake Store account. + type: str + required: true + new_tier: + description: + - The commitment tier to use for next month. + choices: + - Consumption + - Commitment_1TB + - Commitment_10TB + - Commitment_100TB + - Commitment_500TB + - Commitment_1PB + - Commitment_5PB + resource_group: + description: + - The name of the Azure resource group to use. + required: true + state: + description: + - State of the data lake store. Use C(present) to create or update a data lake store and use C(absent) to delete it. + default: present + choices: + - absent + - present + virtual_network_rules: + description: + - The list of virtual network rules associated with this Data Lake Store account. + type: list + suboptions: + name: + description: + - The unique name of the virtual network rule to create. + type: str + required: true + subnet_id: + description: + - The resource identifier for the subnet. + type: str + required: true + +extends_documentation_fragment: + - azure.azcollection.azure + - azure.azcollection.azure_tags + +author: + David Duque Hernández (@next-davidduquehernandez) ''' EXAMPLES = ''' + - name: Get Azure Data Lake Store info + azure_rm_datalakestore: + resource_group: myResourceGroup + name: myDataLakeStore ''' RETURN = ''' +state: + description: + - Facts for Azure Data Lake Store created/updated. + returned: always + type: complex + contains: + account_id: + description: + - The unique identifier associated with this Data Lake Store account. + returned: always + type: str + sample: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + creation_time: + description: + - The account creation time. + returned: always + type: str + sample: 2020-01-01T00:00:00.000000+00:00 + current_tier: + description: + - The commitment tier in use for the current month. + type: str + returned: always + sample: Consumption + default_group: + description: + - The default owner group for all new folders and files created in the Data Lake Store account. + type: str + sample: null + encryption_config: + description: + - The Key Vault encryption configuration. + type: complex + contains: + type: + description: + - The type of encryption configuration being used. + type: str + returned: always + sample: ServiceManaged + key_vault_meta_info: + description: + - The Key Vault information for connecting to user managed encryption keys. + type: complex + contains: + key_vault_resource_id: + description: + - The resource identifier for the user managed Key Vault being used to encrypt. + type: str + returned: always + sample: /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.KeyVault/vaults/testkv + encryption_key_name: + description: + - The name of the user managed encryption key. + type: str + returned: always + sample: KeyName + encryption_key_version: + description: + - The version of the user managed encryption key. + type: str + returned: always + sample: 86a1e3b7406f45afa0d54e21eff47e39 + encryption_provisioning_state: + description: + - The current state of encryption provisioning for this Data Lake Store account. + type: str + sample: Succeeded + encryption_state: + description: + - The current state of encryption for this Data Lake Store account. + type: str + returned: always + sample: Enabled + endpoint: + description: + - The full CName endpoint for this account. + returned: always + type: str + sample: testaccount.azuredatalakestore.net + firewall_allow_azure_ips: + description: + - The current state of allowing or disallowing IPs originating within Azure through the firewall. + - If the firewall is disabled, this is not enforced. + type: str + returned: always + sample: Disabled + firewall_rules: + description: + - The list of firewall rules associated with this Data Lake Store account. + type: list + returned: always + contains: + name: + description: + - The resource name. + type: str + returned: always + sample: Example Name + start_ip_address: + description: + - The start IP address for the firewall rule. + - This can be either ipv4 or ipv6. + - Start and End should be in the same protocol. + type: str + returned: always + sample: 192.168.1.1 + end_ip_address: + description: + - The end IP address for the firewall rule. + - This can be either ipv4 or ipv6. + - Start and End should be in the same protocol. + type: str + returned: always + sample: 192.168.1.254 + firewall_state: + description: + - The current state of the IP address firewall for this Data Lake Store account. + type: str + returned: always + sample: Enabled + id: + description: + - The resource identifier. + returned: always + type: str + sample: /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.DataLakeStore/accounts/testaccount + identity: + description: + - The Key Vault encryption identity, if any. + type: complex + contains: + type: + description: + - The type of encryption being used. + type: str + sample: SystemAssigned + name: + description: + - The principal identifier associated with the encryption. + type: str + sample: 00000000-0000-0000-0000-000000000000 + name: + description: + - The tenant identifier associated with the encryption. + type: str + sample: 00000000-0000-0000-0000-000000000000 + last_modified_time: + description: + - The account last modified time. + returned: always + type: str + sample: 2020-01-01T00:00:00.000000+00:00 + location: + description: + - The resource location. + returned: always + type: str + sample: westeurope + name: + description: + - The resource name. + returned: always + type: str + sample: testaccount + new_tier: + description: + - The commitment tier to use for next month. + type: str + returned: always + sample: Consumption + provisioning_state: + description: + - The provisioning status of the Data Lake Store account. + returned: always + type: str + sample: Succeeded + state: + description: + - The state of the Data Lake Store account. + returned: always + type: str + sample: Active + tags: + description: + - The resource tags. + returned: always + type: dict + sample: { "tag1":"abc" } + trusted_id_providers: + description: + - The current state of the trusted identity provider feature for this Data Lake Store account. + type: list + returned: always + contains: + id: + description: + - The resource identifier. + type: str + name: + description: + - The resource name. + type: str + type: + description: + - The resource type. + type: str + id_provider: + description: + - The URL of this trusted identity provider. + type: str + trusted_id_provider_state: + description: + - The list of trusted identity providers associated with this Data Lake Store account. + type: str + returned: always + sample: Enabled + type: + description: + - The resource type. + returned: always + type: str + sample: Microsoft.DataLakeStore/accounts + virtual_network_rules: + description: + - The list of virtual network rules associated with this Data Lake Store account. + type: list + returned: always + contains: + name: + description: + - The resource name. + type: str + sample: Rule Name + subnet_id: + description: + - The resource identifier for the subnet. + type: str + sample: /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.Network/virtualNetworks/vnet/subnets/default ''' diff --git a/plugins/modules/azure_rm_datalakestore_info.py b/plugins/modules/azure_rm_datalakestore_info.py index 71d4bf752..49146d26c 100644 --- a/plugins/modules/azure_rm_datalakestore_info.py +++ b/plugins/modules/azure_rm_datalakestore_info.py @@ -24,11 +24,11 @@ options: resource_group: description: - - The name of the resource group. + - The name of the Azure resource group. type: str name: description: - - The name of the Azure Data Lake Store. + - The name of the Data Lake Store account. type: str extends_documentation_fragment: @@ -40,10 +40,17 @@ ''' EXAMPLES = ''' - - name: Get Azure Data Lake Store info + - name: Get Azure Data Lake Store info from resource group 'myResourceGroup' and name 'myDataLakeStore' azure_rm_datalakestore_info: resource_group: myResourceGroup name: myDataLakeStore + + - name: Get Azure Data Lake Store info from resource group 'myResourceGroup' + azure_rm_datalakestore_info: + resource_group: myResourceGroup + + - name: Get Azure Data Lake Store info + azure_rm_datalakestore_info: ''' RETURN = ''' @@ -143,13 +150,17 @@ sample: Example Name start_ip_address: description: - - The start IP address for the firewall rule. This can be either ipv4 or ipv6. Start and End should be in the same protocol. + - The start IP address for the firewall rule. + - This can be either ipv4 or ipv6. + - Start and End should be in the same protocol. type: str returned: always sample: 192.168.1.1 end_ip_address: description: - - The end IP address for the firewall rule. This can be either ipv4 or ipv6. Start and End should be in the same protocol. + - The end IP address for the firewall rule. + - This can be either ipv4 or ipv6. + - Start and End should be in the same protocol. type: str returned: always sample: 192.168.1.254 From 772dc7cf0bbe5e497c3976417812f811570e4229 Mon Sep 17 00:00:00 2001 From: David Duque Date: Sat, 5 Dec 2020 10:16:10 +0100 Subject: [PATCH 14/23] Added ignore sanity tests and added to pr-pipelines --- pr-pipelines.yml | 1 + .../integration/targets/azure_rm_datalakestore/tasks/main.yml | 2 -- tests/sanity/ignore-2.10.txt | 4 ++++ tests/sanity/ignore-2.11.txt | 4 ++++ 4 files changed, 9 insertions(+), 2 deletions(-) diff --git a/pr-pipelines.yml b/pr-pipelines.yml index c9269394c..b56cdd551 100644 --- a/pr-pipelines.yml +++ b/pr-pipelines.yml @@ -35,6 +35,7 @@ parameters: - "azure_rm_containerinstance" - "azure_rm_containerregistry" - "azure_rm_cosmosdbaccount" + - "azure_rm_datalakestore" - "azure_rm_deployment" - "azure_rm_dnsrecordset" - "azure_rm_dnszone" diff --git a/tests/integration/targets/azure_rm_datalakestore/tasks/main.yml b/tests/integration/targets/azure_rm_datalakestore/tasks/main.yml index dbc3cca67..c25712139 100644 --- a/tests/integration/targets/azure_rm_datalakestore/tasks/main.yml +++ b/tests/integration/targets/azure_rm_datalakestore/tasks/main.yml @@ -113,7 +113,6 @@ resource_group: "{{ resource_group }}" name: "{{ adl_name }}" tags: - BillingIdentifier: PI-3000012219_100% P1: V1 P2: V4 P3: V3 @@ -164,7 +163,6 @@ resource_group: "{{ resource_group }}" name: "{{ adl_name }}" tags: - BillingIdentifier: PI-3000012219_100% P1: V1 P2: V4 P3: V3 diff --git a/tests/sanity/ignore-2.10.txt b/tests/sanity/ignore-2.10.txt index 4d2342264..f7700e729 100644 --- a/tests/sanity/ignore-2.10.txt +++ b/tests/sanity/ignore-2.10.txt @@ -90,6 +90,10 @@ plugins/modules/azure_rm_containerregistrywebhook_info.py validate-modules:requi plugins/modules/azure_rm_containerregistrywebhook_info.py validate-modules:required_if-unknown-key plugins/modules/azure_rm_containerregistrywebhook_info.py validate-modules:return-syntax-error plugins/modules/azure_rm_containerregistrywebhook_info.py validate-modules:undocumented-parameter +plugins/modules/azure_rm_datalakestore.py validate-modules:required_if-unknown-key +plugins/modules/azure_rm_datalakestore.py validate-modules:required_if-requirements-unknown +plugins/modules/azure_rm_datalakestore_info.py validate-modules:required_if-unknown-key +plugins/modules/azure_rm_datalakestore_info.py validate-modules:required_if-requirements-unknown plugins/modules/azure_rm_deployment.py validate-modules:parameter-type-not-in-doc plugins/modules/azure_rm_deployment.py validate-modules:required_if-requirements-unknown plugins/modules/azure_rm_deployment.py validate-modules:required_if-unknown-key diff --git a/tests/sanity/ignore-2.11.txt b/tests/sanity/ignore-2.11.txt index ea2396266..68b29d19c 100644 --- a/tests/sanity/ignore-2.11.txt +++ b/tests/sanity/ignore-2.11.txt @@ -90,6 +90,10 @@ plugins/modules/azure_rm_containerregistrywebhook_info.py validate-modules:undoc plugins/modules/azure_rm_containerregistrywebhook_info.py validate-modules:required_if-requirements-unknown plugins/modules/azure_rm_containerregistrywebhook_info.py validate-modules:required_if-unknown-key plugins/modules/azure_rm_containerregistrywebhook_info.py validate-modules:return-syntax-error +plugins/modules/azure_rm_datalakestore.py validate-modules:required_if-unknown-key +plugins/modules/azure_rm_datalakestore.py validate-modules:required_if-requirements-unknown +plugins/modules/azure_rm_datalakestore_info.py validate-modules:required_if-unknown-key +plugins/modules/azure_rm_datalakestore_info.py validate-modules:required_if-requirements-unknown plugins/modules/azure_rm_deployment.py validate-modules:parameter-type-not-in-doc plugins/modules/azure_rm_deployment.py validate-modules:required_if-requirements-unknown plugins/modules/azure_rm_deployment.py validate-modules:required_if-unknown-key From 0df9eb6ebb0f4268f648b844943be487d099b561 Mon Sep 17 00:00:00 2001 From: David Duque Date: Sat, 5 Dec 2020 11:06:05 +0100 Subject: [PATCH 15/23] Resolved pylint messages --- plugins/modules/azure_rm_datalakestore.py | 95 ++++++++++++----------- 1 file changed, 50 insertions(+), 45 deletions(-) diff --git a/plugins/modules/azure_rm_datalakestore.py b/plugins/modules/azure_rm_datalakestore.py index 02d51f343..43536a9fb 100644 --- a/plugins/modules/azure_rm_datalakestore.py +++ b/plugins/modules/azure_rm_datalakestore.py @@ -82,7 +82,7 @@ required: true start_ip_address: description: - - The start IP address for the firewall rule. + - The start IP address for the firewall rule. - This can be either ipv4 or ipv6. - Start and End should be in the same protocol. type: str @@ -218,7 +218,7 @@ - The resource identifier for the user managed Key Vault being used to encrypt. type: str returned: always - sample: /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.KeyVault/vaults/testkv + sample: /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.KeyVault/vaults/tstkv encryption_key_name: description: - The name of the user managed encryption key. @@ -250,7 +250,7 @@ sample: testaccount.azuredatalakestore.net firewall_allow_azure_ips: description: - - The current state of allowing or disallowing IPs originating within Azure through the firewall. + - The current state of allowing or disallowing IPs originating within Azure through the firewall. - If the firewall is disabled, this is not enforced. type: str returned: always @@ -269,7 +269,7 @@ sample: Example Name start_ip_address: description: - - The start IP address for the firewall rule. + - The start IP address for the firewall rule. - This can be either ipv4 or ipv6. - Start and End should be in the same protocol. type: str @@ -406,7 +406,7 @@ description: - The resource identifier for the subnet. type: str - sample: /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.Network/virtualNetworks/vnet/subnets/default + sample: /subscriptions/{subscriptionId}/resourceGroups/myResourceGroup/providers/Microsoft.Network/virtualNetworks/vnet/subnets/default ''' @@ -429,11 +429,11 @@ def __init__(self): options=dict( type=dict(type='str', choices=['UserManaged', 'ServiceManaged']), key_vault_meta_info=dict( - type='dict', + type='dict', options=dict( - key_vault_resource_id=dict(type='str',required=True), - encryption_key_name=dict(type='str',required=True), - encryption_key_version=dict(type='str',required=True) + key_vault_resource_id=dict(type='str', required=True), + encryption_key_name=dict(type='str', required=True), + encryption_key_version=dict(type='str', required=True) ) ), ) @@ -443,29 +443,30 @@ def __init__(self): firewall_rules=dict( type='list', options=dict( - name=dict(type='str',required=True), - start_ip_address=dict(type='str',required=True), - end_ip_address=dict(type='str',required=True) + name=dict(type='str', required=True), + start_ip_address=dict(type='str', required=True), + end_ip_address=dict(type='str', required=True) ) ), firewall_state=dict(type='str', choices=['Enabled', 'Disabled']), identity=dict( type='dict', options=dict( - type=dict(type='str', choices=['SystemAssigned'],required=True) + type=dict(type='str', choices=['SystemAssigned'], required=True) ) ), location=dict(type='str'), - name=dict(type='str',required=True), - new_tier=dict(type='str', choices=['Consumption', 'Commitment_1TB', 'Commitment_10TB', 'Commitment_100TB', 'Commitment_500TB', 'Commitment_1PB', 'Commitment_5PB']), - resource_group=dict(type='str',required=True), + name=dict(type='str', required=True), + new_tier=dict(type='str', choices=['Consumption', 'Commitment_1TB', 'Commitment_10TB', 'Commitment_100TB', + 'Commitment_500TB', 'Commitment_1PB', 'Commitment_5PB']), + resource_group=dict(type='str', required=True), state=dict(type='str', default='present', choices=['present', 'absent']), tags=dict(type='dict'), virtual_network_rules=dict( type='list', options=dict( - name=dict(type='str',required=True), - subnet_id=dict(type='str',required=True) + name=dict(type='str', required=True), + subnet_id=dict(type='str', required=True) ) ), ) @@ -493,8 +494,8 @@ def __init__(self): self.account_dict = None super(AzureRMDatalakeStore, self).__init__(derived_arg_spec=self.module_arg_spec, - supports_check_mode=False, - supports_tags=False) + supports_check_mode=False, + supports_tags=False) def exec_module(self, **kwargs): for key in list(self.module_arg_spec.keys()) + ['tags']: @@ -508,8 +509,8 @@ def exec_module(self, **kwargs): encryption_key_name=self.encryption_config.get('key_vault_meta_info').get('encryption_key_name'), encryption_key_version=self.encryption_config.get('key_vault_meta_info').get('encryption_key_version') ) - self.encryption_config_model=self.datalake_store_models.EncryptionConfig(type=self.encryption_config.get('type'), - key_vault_meta_info=key_vault_meta_info_model) + self.encryption_config_model = self.datalake_store_models.EncryptionConfig(type=self.encryption_config.get('type'), + key_vault_meta_info=key_vault_meta_info_model) if self.identity is not None: self.identity_model = self.datalake_store_models.EncryptionIdentity( @@ -565,7 +566,7 @@ def create_datalake_store(self): location=self.location ) return account_dict - + if self.firewall_rules is not None: self.firewall_rules_model = list() for rule in self.firewall_rules: @@ -574,7 +575,7 @@ def create_datalake_store(self): start_ip_address=rule.get('start_ip_address'), end_ip_address=rule.get('end_ip_address')) self.firewall_rules_model.append(rule_model) - + if self.virtual_network_rules is not None: self.virtual_network_rules_model = list() for vnet_rule in self.virtual_network_rules: @@ -604,7 +605,7 @@ def create_datalake_store(self): except CloudError as e: self.log('Error creating datalake store.') self.fail("Failed to create datalake store: {0}".format(str(e))) - + return self.get_datalake_store() def update_datalake_store(self): @@ -616,21 +617,24 @@ def update_datalake_store(self): update_tags, self.account_dict['tags'] = self.update_tags(self.account_dict['tags']) if update_tags: self.results['changed'] = True - parameters.tags=self.account_dict['tags'] + parameters.tags = self.account_dict['tags'] if self.new_tier and self.account_dict.get('new_tier') != self.new_tier: self.results['changed'] = True - parameters.new_tier=self.new_tier + parameters.new_tier = self.new_tier if self.default_group and self.account_dict.get('default_group') != self.default_group: self.results['changed'] = True - parameters.default_group=self.default_group + parameters.default_group = self.default_group if self.encryption_state and self.account_dict.get('encryption_state') != self.encryption_state: self.fail("Encryption type cannot be updated.") if self.encryption_config: - if self.encryption_config.get('type') == 'UserManaged' and self.encryption_config.get('key_vault_meta_info') != self.account_dict.get('encryption_config').get('key_vault_meta_info'): + if ( + self.encryption_config.get('type') == 'UserManaged' + and self.encryption_config.get('key_vault_meta_info') != self.account_dict.get('encryption_config').get('key_vault_meta_info') + ): self.results['changed'] = True key_vault_meta_info_model = self.datalake_store_models.UpdateKeyVaultMetaInfo( encryption_key_version=self.encryption_config.get('key_vault_meta_info').get('encryption_key_version') @@ -640,12 +644,12 @@ def update_datalake_store(self): if self.firewall_state and self.account_dict.get('firewall_state') != self.firewall_state: self.results['changed'] = True - parameters.firewall_state=self.firewall_state + parameters.firewall_state = self.firewall_state if self.firewall_allow_azure_ips and self.account_dict.get('firewall_allow_azure_ips') != self.firewall_allow_azure_ips: self.results['changed'] = True - parameters.firewall_allow_azure_ips=self.firewall_allow_azure_ips - + parameters.firewall_allow_azure_ips = self.firewall_allow_azure_ips + if self.firewall_rules is not None: if not self.compare_lists(self.firewall_rules, self.account_dict.get('firewall_rules')): self.firewall_rules_model = list() @@ -656,8 +660,8 @@ def update_datalake_store(self): end_ip_address=rule.get('end_ip_address')) self.firewall_rules_model.append(rule_model) self.results['changed'] = True - parameters.firewall_rules=self.firewall_rules_model - + parameters.firewall_rules = self.firewall_rules_model + if self.virtual_network_rules is not None: if not self.compare_lists(self.virtual_network_rules, self.account_dict.get('virtual_network_rules')): self.virtual_network_rules_model = list() @@ -667,11 +671,11 @@ def update_datalake_store(self): subnet_id=vnet_rule.get('subnet_id')) self.virtual_network_rules_model.append(vnet_rule_model) self.results['changed'] = True - parameters.virtual_network_rules=self.virtual_network_rules_model + parameters.virtual_network_rules = self.virtual_network_rules_model if self.identity_model is not None: self.results['changed'] = True - parameters.identity=self.identity_model + parameters.identity = self.identity_model self.log(str(parameters)) if self.results['changed']: @@ -681,7 +685,7 @@ def update_datalake_store(self): except CloudError as e: self.log('Error creating datalake store.') self.fail("Failed to create datalake store: {0}".format(str(e))) - + return self.get_datalake_store() def delete_datalake_store(self): @@ -741,7 +745,7 @@ def account_obj_to_dict(self, datalake_store_obj): virtual_network_rules=None ) - account_dict['firewall_rules']=list() + account_dict['firewall_rules'] = list() if datalake_store_obj.firewall_rules: for rule in datalake_store_obj.firewall_rules: rule_item = dict( @@ -751,7 +755,7 @@ def account_obj_to_dict(self, datalake_store_obj): ) account_dict['firewall_rules'].append(rule_item) - account_dict['virtual_network_rules']=list() + account_dict['virtual_network_rules'] = list() if datalake_store_obj.virtual_network_rules: for vnet_rule in datalake_store_obj.virtual_network_rules: vnet_rule_item = dict( @@ -761,7 +765,7 @@ def account_obj_to_dict(self, datalake_store_obj): account_dict['virtual_network_rules'].append(vnet_rule_item) if datalake_store_obj.identity: - account_dict['identity']=dict( + account_dict['identity'] = dict( type=datalake_store_obj.identity.type, principal_id=datalake_store_obj.identity.principal_id, tenant_id=datalake_store_obj.identity.tenant_id @@ -770,13 +774,13 @@ def account_obj_to_dict(self, datalake_store_obj): if datalake_store_obj.encryption_config: if datalake_store_obj.encryption_config.key_vault_meta_info: account_dict['encryption_config'] = dict( - key_vault_meta_info = dict( - key_vault_resource_id = datalake_store_obj.encryption_config.key_vault_meta_info.key_vault_resource_id, - encryption_key_name = datalake_store_obj.encryption_config.key_vault_meta_info.encryption_key_name, - encryption_key_version = datalake_store_obj.encryption_config.key_vault_meta_info.encryption_key_version + key_vault_meta_info=dict( + key_vault_resource_id=datalake_store_obj.encryption_config.key_vault_meta_info.key_vault_resource_id, + encryption_key_name=datalake_store_obj.encryption_config.key_vault_meta_info.encryption_key_name, + encryption_key_version=datalake_store_obj.encryption_config.key_vault_meta_info.encryption_key_version ) ) - + return account_dict def compare_lists(self, list1, list2): @@ -787,6 +791,7 @@ def compare_lists(self, list1, list2): return False return True + def main(): AzureRMDatalakeStore() From f2ecf07c70be86d763523ef3bbb2a062960651c2 Mon Sep 17 00:00:00 2001 From: David Duque Date: Sat, 5 Dec 2020 12:02:51 +0100 Subject: [PATCH 16/23] Fixes sanity warnings --- plugins/modules/azure_rm_datalakestore.py | 49 +++++++++++-------- .../azure_rm_datalakestore/tasks/main.yml | 4 +- 2 files changed, 30 insertions(+), 23 deletions(-) diff --git a/plugins/modules/azure_rm_datalakestore.py b/plugins/modules/azure_rm_datalakestore.py index 43536a9fb..9aa6a917a 100644 --- a/plugins/modules/azure_rm_datalakestore.py +++ b/plugins/modules/azure_rm_datalakestore.py @@ -5,8 +5,6 @@ # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function -import datetime - __metaclass__ = type @@ -37,6 +35,7 @@ - UserManaged - ServiceManaged required: true + type: str key_vault_meta_info: description: - The Key Vault information for connecting to user managed encryption keys. @@ -63,6 +62,7 @@ choices: - Enabled - Disabled + type: str firewall_allow_azure_ips: description: - The current state of allowing or disallowing IPs originating within Azure through the firewall. @@ -70,6 +70,7 @@ choices: - Enabled - Disabled + type: str firewall_rules: description: - The list of firewall rules associated with this Data Lake Store account. @@ -100,11 +101,13 @@ choices: - Enabled - Disabled + type: str identity: description: - The Key Vault encryption identity, if any. choices: - SystemAssigned + type: str location: description: - The resource location. @@ -125,10 +128,12 @@ - Commitment_500TB - Commitment_1PB - Commitment_5PB + type: str resource_group: description: - The name of the Azure resource group to use. required: true + type: str state: description: - State of the data lake store. Use C(present) to create or update a data lake store and use C(absent) to delete it. @@ -136,6 +141,7 @@ choices: - absent - present + type: str virtual_network_rules: description: - The list of virtual network rules associated with this Data Lake Store account. @@ -185,7 +191,7 @@ - The account creation time. returned: always type: str - sample: 2020-01-01T00:00:00.000000+00:00 + sample: '2020-01-01T00:00:00.000000+00:00' current_tier: description: - The commitment tier in use for the current month. @@ -320,7 +326,7 @@ - The account last modified time. returned: always type: str - sample: 2020-01-01T00:00:00.000000+00:00 + sample: '2020-01-01T00:00:00.000000+00:00' location: description: - The resource location. @@ -411,6 +417,7 @@ ''' from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase +import datetime try: from msrestazure.azure_exceptions import CloudError @@ -418,6 +425,16 @@ # This is handled in azure_rm_common pass +firewall_rules_item = dict( + name=dict(type='str', required=True), + start_ip_address=dict(type='str', required=True), + end_ip_address=dict(type='str', required=True) +) + +virtual_network_rules_item = dict( + name=dict(type='str', required=True), + subnet_id=dict(type='str', required=True) +) class AzureRMDatalakeStore(AzureRMModuleBase): def __init__(self): @@ -427,7 +444,7 @@ def __init__(self): encryption_config=dict( type='dict', options=dict( - type=dict(type='str', choices=['UserManaged', 'ServiceManaged']), + type=dict(type='str', choices=['UserManaged', 'ServiceManaged'], required=True), key_vault_meta_info=dict( type='dict', options=dict( @@ -442,19 +459,11 @@ def __init__(self): firewall_allow_azure_ips=dict(type='str', choices=['Enabled', 'Disabled']), firewall_rules=dict( type='list', - options=dict( - name=dict(type='str', required=True), - start_ip_address=dict(type='str', required=True), - end_ip_address=dict(type='str', required=True) - ) + elements='dict', + options=firewall_rules_item ), firewall_state=dict(type='str', choices=['Enabled', 'Disabled']), - identity=dict( - type='dict', - options=dict( - type=dict(type='str', choices=['SystemAssigned'], required=True) - ) - ), + identity=dict(type='str', choices=['SystemAssigned']), location=dict(type='str'), name=dict(type='str', required=True), new_tier=dict(type='str', choices=['Consumption', 'Commitment_1TB', 'Commitment_10TB', 'Commitment_100TB', @@ -464,10 +473,8 @@ def __init__(self): tags=dict(type='dict'), virtual_network_rules=dict( type='list', - options=dict( - name=dict(type='str', required=True), - subnet_id=dict(type='str', required=True) - ) + elements='dict', + options=virtual_network_rules_item ), ) @@ -514,7 +521,7 @@ def exec_module(self, **kwargs): if self.identity is not None: self.identity_model = self.datalake_store_models.EncryptionIdentity( - type=self.identity.get('type') + type=self.identity ) resource_group = self.get_resource_group(self.resource_group) diff --git a/tests/integration/targets/azure_rm_datalakestore/tasks/main.yml b/tests/integration/targets/azure_rm_datalakestore/tasks/main.yml index c25712139..fa46c9d78 100644 --- a/tests/integration/targets/azure_rm_datalakestore/tasks/main.yml +++ b/tests/integration/targets/azure_rm_datalakestore/tasks/main.yml @@ -152,7 +152,7 @@ - output.state.last_modified_time is defined - output.state.new_tier == "Commitment_1TB" - output.state.provisioning_state == "Succeeded" - - output.state.tags | length == 4 + - output.state.tags | length == 3 - output.state.trusted_id_provider_state == "Disabled" - output.state.virtual_network_rules | length == 1 - output.state.virtual_network_rules[0].name == "vnet_rule_1" @@ -196,7 +196,7 @@ resource_group: "{{ resource_group }}" state: absent -- name: Delete acccount +- name: Delete Data Lake Store azure_rm_datalakestore: resource_group: "{{ resource_group }}" name: "{{ adl_name }}" From c465afdb4b3cd5327a08455aa7d2dfd90d9ab5ff Mon Sep 17 00:00:00 2001 From: David Duque Date: Mon, 21 Dec 2020 12:11:06 +0100 Subject: [PATCH 17/23] Fixed sanity errors --- plugins/module_utils/azure_rm_common.py | 4 +-- plugins/modules/azure_rm_datalakestore.py | 7 ++-- .../modules/azure_rm_datalakestore_info.py | 33 ++++++++++--------- tests/sanity/ignore-2.10.txt | 4 +++ tests/sanity/ignore-2.11.txt | 4 +++ 5 files changed, 31 insertions(+), 21 deletions(-) diff --git a/plugins/module_utils/azure_rm_common.py b/plugins/module_utils/azure_rm_common.py index ae1548a6d..a7cd0eb0e 100644 --- a/plugins/module_utils/azure_rm_common.py +++ b/plugins/module_utils/azure_rm_common.py @@ -1273,8 +1273,8 @@ def recovery_services_backup_models(self): def datalake_store_client(self): self.log('Getting datalake store client...') self._datalake_store_client = self.get_mgmt_svc_client(DataLakeStoreAccountManagementClient, - base_url=self._cloud_environment.endpoints.resource_manager, - api_version='2016-11-01') + base_url=self._cloud_environment.endpoints.resource_manager, + api_version='2016-11-01') return self._datalake_store_client @property diff --git a/plugins/modules/azure_rm_datalakestore.py b/plugins/modules/azure_rm_datalakestore.py index 9aa6a917a..d18b00df4 100644 --- a/plugins/modules/azure_rm_datalakestore.py +++ b/plugins/modules/azure_rm_datalakestore.py @@ -224,7 +224,7 @@ - The resource identifier for the user managed Key Vault being used to encrypt. type: str returned: always - sample: /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.KeyVault/vaults/tstkv + sample: /subscriptions/{subscriptionId}/resourceGroups/myResourceGroup/providers/Microsoft.KeyVault/vaults/tstkv encryption_key_name: description: - The name of the user managed encryption key. @@ -300,7 +300,7 @@ - The resource identifier. returned: always type: str - sample: /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.DataLakeStore/accounts/testaccount + sample: /subscriptions/{subscriptionId}/resourceGroups/myResourceGroup/providers/Microsoft.DataLakeStore/accounts/testaccount identity: description: - The Key Vault encryption identity, if any. @@ -436,6 +436,7 @@ subnet_id=dict(type='str', required=True) ) + class AzureRMDatalakeStore(AzureRMModuleBase): def __init__(self): @@ -468,7 +469,7 @@ def __init__(self): name=dict(type='str', required=True), new_tier=dict(type='str', choices=['Consumption', 'Commitment_1TB', 'Commitment_10TB', 'Commitment_100TB', 'Commitment_500TB', 'Commitment_1PB', 'Commitment_5PB']), - resource_group=dict(type='str', required=True), + resource_group=dict(type='str', required=True, aliases=['resource_group_name']), state=dict(type='str', default='present', choices=['present', 'absent']), tags=dict(type='dict'), virtual_network_rules=dict( diff --git a/plugins/modules/azure_rm_datalakestore_info.py b/plugins/modules/azure_rm_datalakestore_info.py index 49146d26c..d868f9a9d 100644 --- a/plugins/modules/azure_rm_datalakestore_info.py +++ b/plugins/modules/azure_rm_datalakestore_info.py @@ -48,7 +48,7 @@ - name: Get Azure Data Lake Store info from resource group 'myResourceGroup' azure_rm_datalakestore_info: resource_group: myResourceGroup - + - name: Get Azure Data Lake Store info azure_rm_datalakestore_info: ''' @@ -71,7 +71,7 @@ - The account creation time. returned: always type: str - sample: 2020-01-01T00:00:00.000000+00:00 + sample: '2020-01-01T00:00:00.000000+00:00' current_tier: description: - The commitment tier in use for the current month. @@ -103,7 +103,7 @@ - The resource identifier for the user managed Key Vault being used to encrypt. type: str returned: always - sample: /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.KeyVault/vaults/testkv + sample: /subscriptions/{subscriptionId}/resourceGroups/myRG/providers/Microsoft.KeyVault/vaults/testkv encryption_key_name: description: - The name of the user managed encryption key. @@ -174,7 +174,7 @@ - The resource identifier. returned: always type: str - sample: /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.DataLakeStore/accounts/testaccount + sample: /subscriptions/{subscriptionId}/resourceGroups/myResourceGroup/providers/Microsoft.DataLakeStore/accounts/testaccount identity: description: - The Key Vault encryption identity, if any. @@ -200,7 +200,7 @@ - The account last modified time. returned: always type: str - sample: 2020-01-01T00:00:00.000000+00:00 + sample: '2020-01-01T00:00:00.000000+00:00' location: description: - The resource location. @@ -271,7 +271,7 @@ virtual_network_rules: description: - The list of virtual network rules associated with this Data Lake Store account. - type: list + type: list contains: name: description: @@ -282,7 +282,7 @@ description: - The resource identifier for the subnet. type: str - sample: /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.Network/virtualNetworks/vnet/subnets/default + sample: /subscriptions/{subscriptionId}/resourceGroups/myRG/providers/Microsoft.Network/virtualNetworks/vnet/subnets/default ''' try: @@ -311,7 +311,7 @@ def __init__(self): self.resource_group = None super(AzureRMDatalakeStoreInfo, self).__init__(self.module_arg_spec, - supports_tags=False) + supports_tags=False) def exec_module(self, **kwargs): for key in self.module_arg_spec: @@ -408,7 +408,7 @@ def account_obj_to_dict(self, datalake_store_obj): virtual_network_rules=None ) - account_dict['firewall_rules']=list() + account_dict['firewall_rules'] = list() for rule in datalake_store_obj.firewall_rules: rule_item = dict( name=rule.name, @@ -417,7 +417,7 @@ def account_obj_to_dict(self, datalake_store_obj): ) account_dict['firewall_rules'].append(rule_item) - account_dict['virtual_network_rules']=list() + account_dict['virtual_network_rules'] = list() for vnet_rule in datalake_store_obj.virtual_network_rules: vnet_rule_item = dict( name=vnet_rule.name, @@ -426,7 +426,7 @@ def account_obj_to_dict(self, datalake_store_obj): account_dict['virtual_network_rules'].append(vnet_rule_item) if datalake_store_obj.identity: - account_dict['identity']=dict( + account_dict['identity'] = dict( type=datalake_store_obj.identity.type, principal_id=datalake_store_obj.identity.principal_id, tenant_id=datalake_store_obj.identity.tenant_id @@ -434,13 +434,13 @@ def account_obj_to_dict(self, datalake_store_obj): if datalake_store_obj.encryption_config.key_vault_meta_info: account_dict['encryption_config'] = dict( - key_vault_meta_info = dict( - key_vault_resource_id = datalake_store_obj.encryption_config.key_vault_meta_info.key_vault_resource_id, - encryption_key_name = datalake_store_obj.encryption_config.key_vault_meta_info.encryption_key_name, - encryption_key_version = datalake_store_obj.encryption_config.key_vault_meta_info.encryption_key_version + key_vault_meta_info=dict( + key_vault_resource_id=datalake_store_obj.encryption_config.key_vault_meta_info.key_vault_resource_id, + encryption_key_name=datalake_store_obj.encryption_config.key_vault_meta_info.encryption_key_name, + encryption_key_version=datalake_store_obj.encryption_config.key_vault_meta_info.encryption_key_version ) ) - + return account_dict def account_obj_to_dict_basic(self, datalake_store_obj): @@ -460,6 +460,7 @@ def account_obj_to_dict_basic(self, datalake_store_obj): return account_dict + def main(): AzureRMDatalakeStoreInfo() diff --git a/tests/sanity/ignore-2.10.txt b/tests/sanity/ignore-2.10.txt index f7700e729..ebf3435be 100644 --- a/tests/sanity/ignore-2.10.txt +++ b/tests/sanity/ignore-2.10.txt @@ -90,8 +90,12 @@ plugins/modules/azure_rm_containerregistrywebhook_info.py validate-modules:requi plugins/modules/azure_rm_containerregistrywebhook_info.py validate-modules:required_if-unknown-key plugins/modules/azure_rm_containerregistrywebhook_info.py validate-modules:return-syntax-error plugins/modules/azure_rm_containerregistrywebhook_info.py validate-modules:undocumented-parameter +plugins/modules/azure_rm_datalakestore.py validate-modules:undocumented-parameter plugins/modules/azure_rm_datalakestore.py validate-modules:required_if-unknown-key plugins/modules/azure_rm_datalakestore.py validate-modules:required_if-requirements-unknown +plugins/modules/azure_rm_datalakestore.py validate-modules:doc-elements-mismatch +plugins/modules/azure_rm_datalakestore.py validate-modules:nonexistent-parameter-documented +plugins/modules/azure_rm_datalakestore_info.py validate-modules:undocumented-parameter plugins/modules/azure_rm_datalakestore_info.py validate-modules:required_if-unknown-key plugins/modules/azure_rm_datalakestore_info.py validate-modules:required_if-requirements-unknown plugins/modules/azure_rm_deployment.py validate-modules:parameter-type-not-in-doc diff --git a/tests/sanity/ignore-2.11.txt b/tests/sanity/ignore-2.11.txt index 68b29d19c..c0fc212da 100644 --- a/tests/sanity/ignore-2.11.txt +++ b/tests/sanity/ignore-2.11.txt @@ -90,8 +90,12 @@ plugins/modules/azure_rm_containerregistrywebhook_info.py validate-modules:undoc plugins/modules/azure_rm_containerregistrywebhook_info.py validate-modules:required_if-requirements-unknown plugins/modules/azure_rm_containerregistrywebhook_info.py validate-modules:required_if-unknown-key plugins/modules/azure_rm_containerregistrywebhook_info.py validate-modules:return-syntax-error +plugins/modules/azure_rm_datalakestore.py validate-modules:undocumented-parameter plugins/modules/azure_rm_datalakestore.py validate-modules:required_if-unknown-key plugins/modules/azure_rm_datalakestore.py validate-modules:required_if-requirements-unknown +plugins/modules/azure_rm_datalakestore.py validate-modules:doc-elements-mismatch +plugins/modules/azure_rm_datalakestore.py validate-modules:nonexistent-parameter-documented +plugins/modules/azure_rm_datalakestore_info.py validate-modules:undocumented-parameter plugins/modules/azure_rm_datalakestore_info.py validate-modules:required_if-unknown-key plugins/modules/azure_rm_datalakestore_info.py validate-modules:required_if-requirements-unknown plugins/modules/azure_rm_deployment.py validate-modules:parameter-type-not-in-doc From bdf529aae13a84ca4ab27f12d7f952c5e036b423 Mon Sep 17 00:00:00 2001 From: David Duque Date: Tue, 22 Dec 2020 12:30:04 +0100 Subject: [PATCH 18/23] Change author to list --- plugins/modules/azure_rm_datalakestore.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/modules/azure_rm_datalakestore.py b/plugins/modules/azure_rm_datalakestore.py index d18b00df4..9a3461050 100644 --- a/plugins/modules/azure_rm_datalakestore.py +++ b/plugins/modules/azure_rm_datalakestore.py @@ -163,7 +163,7 @@ - azure.azcollection.azure_tags author: - David Duque Hernández (@next-davidduquehernandez) + - David Duque Hernández (@next-davidduquehernandez) ''' EXAMPLES = ''' From cbe4368a74e9688f4ae2a3f179e83c1be52be746 Mon Sep 17 00:00:00 2001 From: David Duque Date: Sat, 13 Feb 2021 10:58:52 +0100 Subject: [PATCH 19/23] Fixed name of returned param and set task name correctly --- plugins/modules/azure_rm_datalakestore.py | 6 +++--- plugins/modules/azure_rm_datalakestore_info.py | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/plugins/modules/azure_rm_datalakestore.py b/plugins/modules/azure_rm_datalakestore.py index 9a3461050..e89962e9f 100644 --- a/plugins/modules/azure_rm_datalakestore.py +++ b/plugins/modules/azure_rm_datalakestore.py @@ -167,7 +167,7 @@ ''' EXAMPLES = ''' - - name: Get Azure Data Lake Store info + - name: Create Azure Data Lake Store azure_rm_datalakestore: resource_group: myResourceGroup name: myDataLakeStore @@ -311,12 +311,12 @@ - The type of encryption being used. type: str sample: SystemAssigned - name: + principal_id: description: - The principal identifier associated with the encryption. type: str sample: 00000000-0000-0000-0000-000000000000 - name: + tenant_id: description: - The tenant identifier associated with the encryption. type: str diff --git a/plugins/modules/azure_rm_datalakestore_info.py b/plugins/modules/azure_rm_datalakestore_info.py index d868f9a9d..c54ad077f 100644 --- a/plugins/modules/azure_rm_datalakestore_info.py +++ b/plugins/modules/azure_rm_datalakestore_info.py @@ -185,12 +185,12 @@ - The type of encryption being used. type: str sample: SystemAssigned - name: + principal_id: description: - The principal identifier associated with the encryption. type: str sample: 00000000-0000-0000-0000-000000000000 - name: + tenant_id: description: - The tenant identifier associated with the encryption. type: str From ddef5170a65fcbeb1fa58d8129d0b0f3fda881fa Mon Sep 17 00:00:00 2001 From: David Duque Date: Sun, 14 Mar 2021 11:00:21 +0100 Subject: [PATCH 20/23] Fix errors in merge --- plugins/module_utils/azure_rm_common.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/plugins/module_utils/azure_rm_common.py b/plugins/module_utils/azure_rm_common.py index a7cd0eb0e..02a9542ee 100644 --- a/plugins/module_utils/azure_rm_common.py +++ b/plugins/module_utils/azure_rm_common.py @@ -1262,6 +1262,7 @@ def lock_models(self): @property def recovery_services_backup_client(self): self.log('Getting recovery services backup client') + if not self._recovery_services_backup_client: self._recovery_services_backup_client = self.get_mgmt_svc_client(RecoveryServicesBackupClient, base_url=self._cloud_environment.endpoints.resource_manager) return self._recovery_services_backup_client @@ -1272,6 +1273,7 @@ def recovery_services_backup_models(self): def datalake_store_client(self): self.log('Getting datalake store client...') + if not self._datalake_store_client: self._datalake_store_client = self.get_mgmt_svc_client(DataLakeStoreAccountManagementClient, base_url=self._cloud_environment.endpoints.resource_manager, api_version='2016-11-01') @@ -1281,6 +1283,7 @@ def datalake_store_client(self): def datalake_store_models(self): return DataLakeStoreAccountModel +class AzureSASAuthentication(Authentication): """Simple SAS Authentication. An implementation of Authentication in https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/authentication.py From 9239199dc58f6bc3e578ffb8e23860e84d4e6b29 Mon Sep 17 00:00:00 2001 From: David Duque Date: Sun, 14 Mar 2021 12:11:55 +0100 Subject: [PATCH 21/23] Fix merge error --- plugins/module_utils/azure_rm_common.py | 1 + 1 file changed, 1 insertion(+) diff --git a/plugins/module_utils/azure_rm_common.py b/plugins/module_utils/azure_rm_common.py index 02a9542ee..b0b886c72 100644 --- a/plugins/module_utils/azure_rm_common.py +++ b/plugins/module_utils/azure_rm_common.py @@ -1271,6 +1271,7 @@ def recovery_services_backup_client(self): def recovery_services_backup_models(self): return RecoveryServicesBackupModels + @property def datalake_store_client(self): self.log('Getting datalake store client...') if not self._datalake_store_client: From 023d6e787cddbac0ae58286d0ef83063c425f6a1 Mon Sep 17 00:00:00 2001 From: David Duque Date: Tue, 16 Mar 2021 21:09:30 +0100 Subject: [PATCH 22/23] Added missing blank line --- plugins/module_utils/azure_rm_common.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/plugins/module_utils/azure_rm_common.py b/plugins/module_utils/azure_rm_common.py index fb9c0959d..0446319c6 100644 --- a/plugins/module_utils/azure_rm_common.py +++ b/plugins/module_utils/azure_rm_common.py @@ -271,6 +271,7 @@ def default_api_version(self): import azure.mgmt.recoveryservicesbackup.models as RecoveryServicesBackupModels from azure.mgmt.datalake.store import DataLakeStoreAccountManagementClient import azure.mgmt.datalake.store.models as DataLakeStoreAccountModel + except ImportError as exc: Authentication = object HAS_AZURE_EXC = traceback.format_exc() @@ -1299,6 +1300,7 @@ def datalake_store_client(self): def datalake_store_models(self): return DataLakeStoreAccountModel + class AzureSASAuthentication(Authentication): """Simple SAS Authentication. An implementation of Authentication in From 1f56a4beb08831d3f7db94a26fd10d9fa77537b0 Mon Sep 17 00:00:00 2001 From: David Duque Date: Sat, 20 Mar 2021 10:35:17 +0100 Subject: [PATCH 23/23] Modified comments on PR --- plugins/modules/azure_rm_datalakestore.py | 8 +++----- plugins/modules/azure_rm_datalakestore_info.py | 9 +++------ tests/sanity/ignore-2.10.txt | 2 -- tests/sanity/ignore-2.9.txt | 1 + 4 files changed, 7 insertions(+), 13 deletions(-) diff --git a/plugins/modules/azure_rm_datalakestore.py b/plugins/modules/azure_rm_datalakestore.py index e89962e9f..e2397b8d4 100644 --- a/plugins/modules/azure_rm_datalakestore.py +++ b/plugins/modules/azure_rm_datalakestore.py @@ -8,13 +8,9 @@ __metaclass__ = type -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'community'} - DOCUMENTATION = ''' module: azure_rm_datalakestore -version_added: "1.2.0" +version_added: "1.4.0" short_description: Manage Azure data lake store description: - Create, update or delete a data lake store. @@ -134,6 +130,8 @@ - The name of the Azure resource group to use. required: true type: str + aliases: + - resource_group_name state: description: - State of the data lake store. Use C(present) to create or update a data lake store and use C(absent) to delete it. diff --git a/plugins/modules/azure_rm_datalakestore_info.py b/plugins/modules/azure_rm_datalakestore_info.py index c54ad077f..6613cdca3 100644 --- a/plugins/modules/azure_rm_datalakestore_info.py +++ b/plugins/modules/azure_rm_datalakestore_info.py @@ -8,15 +8,10 @@ __metaclass__ = type -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'community'} - - DOCUMENTATION = ''' --- module: azure_rm_datalakestore_info -version_added: "1.2.0" +version_added: "1.4.0" short_description: Get Azure Data Lake Store info description: - Get Azure Data Lake Store info. @@ -26,6 +21,8 @@ description: - The name of the Azure resource group. type: str + aliases: + - resource_group_name name: description: - The name of the Data Lake Store account. diff --git a/tests/sanity/ignore-2.10.txt b/tests/sanity/ignore-2.10.txt index ebf3435be..93726b49c 100644 --- a/tests/sanity/ignore-2.10.txt +++ b/tests/sanity/ignore-2.10.txt @@ -90,12 +90,10 @@ plugins/modules/azure_rm_containerregistrywebhook_info.py validate-modules:requi plugins/modules/azure_rm_containerregistrywebhook_info.py validate-modules:required_if-unknown-key plugins/modules/azure_rm_containerregistrywebhook_info.py validate-modules:return-syntax-error plugins/modules/azure_rm_containerregistrywebhook_info.py validate-modules:undocumented-parameter -plugins/modules/azure_rm_datalakestore.py validate-modules:undocumented-parameter plugins/modules/azure_rm_datalakestore.py validate-modules:required_if-unknown-key plugins/modules/azure_rm_datalakestore.py validate-modules:required_if-requirements-unknown plugins/modules/azure_rm_datalakestore.py validate-modules:doc-elements-mismatch plugins/modules/azure_rm_datalakestore.py validate-modules:nonexistent-parameter-documented -plugins/modules/azure_rm_datalakestore_info.py validate-modules:undocumented-parameter plugins/modules/azure_rm_datalakestore_info.py validate-modules:required_if-unknown-key plugins/modules/azure_rm_datalakestore_info.py validate-modules:required_if-requirements-unknown plugins/modules/azure_rm_deployment.py validate-modules:parameter-type-not-in-doc diff --git a/tests/sanity/ignore-2.9.txt b/tests/sanity/ignore-2.9.txt index caf5ef311..643b307b7 100644 --- a/tests/sanity/ignore-2.9.txt +++ b/tests/sanity/ignore-2.9.txt @@ -31,6 +31,7 @@ plugins/modules/azure_rm_containerregistrywebhook.py validate-modules:undocument plugins/modules/azure_rm_containerregistrywebhook_info.py validate-modules:doc-default-does-not-match-spec plugins/modules/azure_rm_containerregistrywebhook_info.py validate-modules:parameter-type-not-in-doc plugins/modules/azure_rm_containerregistrywebhook_info.py validate-modules:undocumented-parameter +plugins/modules/azure_rm_datalakestore.py validate-modules:nonexistent-parameter-documented plugins/modules/azure_rm_deployment.py validate-modules:parameter-type-not-in-doc plugins/modules/azure_rm_deployment_info.py validate-modules:parameter-type-not-in-doc plugins/modules/azure_rm_dnsrecordset.py validate-modules:doc-missing-type